Compare commits
5 Commits
054aa9391e
...
e831e1f5b9
| Author | SHA1 | Date |
|---|---|---|
|
|
e831e1f5b9 | 2 weeks ago |
|
|
02d4fa85dd | 2 weeks ago |
|
|
4465a1074a | 2 weeks ago |
|
|
f9988077e6 | 2 weeks ago |
|
|
a8c9850b19 | 2 weeks ago |
55 changed files with 5821 additions and 3180 deletions
@ -0,0 +1,30 @@
@@ -0,0 +1,30 @@
|
||||
/** |
||||
* Shared utility for triggering repo polls |
||||
* This provides a consistent interface for triggering polls from anywhere in the codebase |
||||
*/ |
||||
|
||||
import { getRepoPollingService } from '../services/service-registry.js'; |
||||
import logger from '../services/logger.js'; |
||||
|
||||
/** |
||||
* Trigger a repo poll |
||||
* This is the single source of truth for triggering polls |
||||
* @param context Optional context string for logging (e.g., 'user-verification', 'manual-refresh') |
||||
* @returns Promise that resolves when poll is triggered (not when it completes) |
||||
*/ |
||||
export async function triggerRepoPoll(context?: string): Promise<void> { |
||||
const pollingService = getRepoPollingService(); |
||||
|
||||
if (!pollingService) { |
||||
logger.warn({ context }, 'Poll request received but polling service not initialized'); |
||||
throw new Error('Polling service not available'); |
||||
} |
||||
|
||||
// Trigger poll asynchronously (non-blocking)
|
||||
// The poll will complete in the background
|
||||
pollingService.triggerPoll().catch((err) => { |
||||
logger.error({ error: err, context }, 'Failed to trigger poll'); |
||||
}); |
||||
|
||||
logger.info({ context }, 'Repo poll triggered'); |
||||
} |
||||
@ -1,329 +0,0 @@
@@ -1,329 +0,0 @@
|
||||
/** |
||||
* API endpoint for global code search across all repositories |
||||
* Searches file contents across multiple repositories |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { handleValidationError } from '$lib/utils/error-handler.js'; |
||||
import { extractRequestContext } from '$lib/utils/api-context.js'; |
||||
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache } from '$lib/utils/nostr-utils.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { readdir, stat } from 'fs/promises'; |
||||
import { join } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import { simpleGit } from 'simple-git'; |
||||
import { fileManager } from '$lib/services/service-registry.js'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
export interface GlobalCodeSearchResult { |
||||
repo: string; |
||||
npub: string; |
||||
file: string; |
||||
line: number; |
||||
content: string; |
||||
branch: string; |
||||
} |
||||
|
||||
export const GET: RequestHandler = async (event) => { |
||||
const query = event.url.searchParams.get('q'); |
||||
const repoFilter = event.url.searchParams.get('repo'); // Optional: filter by specific repo (npub/repo format)
|
||||
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10); |
||||
|
||||
if (!query || query.trim().length < 2) { |
||||
throw handleValidationError('Query must be at least 2 characters', { operation: 'globalCodeSearch' }); |
||||
} |
||||
|
||||
const requestContext = extractRequestContext(event); |
||||
const results: GlobalCodeSearchResult[] = []; |
||||
|
||||
try { |
||||
// If repo filter is specified, search only that repo
|
||||
if (repoFilter) { |
||||
const [npub, repo] = repoFilter.split('/'); |
||||
if (npub && repo) { |
||||
const repoPath = join(repoRoot, npub, `${repo}.git`); |
||||
if (existsSync(repoPath)) { |
||||
const repoResults = await searchInRepo(npub, repo, query, limit); |
||||
results.push(...repoResults); |
||||
} |
||||
} |
||||
return json(results); |
||||
} |
||||
|
||||
// Search across all repositories
|
||||
// First, get list of all repos from filesystem
|
||||
if (!existsSync(repoRoot)) { |
||||
return json([]); |
||||
} |
||||
|
||||
const users = await readdir(repoRoot); |
||||
|
||||
for (const user of users) { |
||||
const userPath = join(repoRoot, user); |
||||
const userStat = await stat(userPath); |
||||
|
||||
if (!userStat.isDirectory()) { |
||||
continue; |
||||
} |
||||
|
||||
const repos = await readdir(userPath); |
||||
|
||||
for (const repo of repos) { |
||||
if (!repo.endsWith('.git')) { |
||||
continue; |
||||
} |
||||
|
||||
const repoName = repo.replace(/\.git$/, ''); |
||||
const repoPath = join(userPath, repo); |
||||
const repoStat = await stat(repoPath); |
||||
|
||||
if (!repoStat.isDirectory()) { |
||||
continue; |
||||
} |
||||
|
||||
// Check access for private repos
|
||||
try { |
||||
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js'); |
||||
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); |
||||
|
||||
// Decode npub to hex
|
||||
const { nip19 } = await import('nostr-tools'); |
||||
let repoOwnerPubkey: string; |
||||
try { |
||||
const decoded = nip19.decode(user); |
||||
if (decoded.type === 'npub') { |
||||
repoOwnerPubkey = decoded.data as string; |
||||
} else { |
||||
repoOwnerPubkey = user; // Assume it's already hex
|
||||
} |
||||
} catch { |
||||
repoOwnerPubkey = user; // Assume it's already hex
|
||||
} |
||||
|
||||
const canView = await maintainerService.canView( |
||||
requestContext.userPubkeyHex || null, |
||||
repoOwnerPubkey, |
||||
repoName |
||||
); |
||||
|
||||
if (!canView) { |
||||
continue; // Skip private repos user can't access
|
||||
} |
||||
} catch (accessErr) { |
||||
logger.debug({ error: accessErr, user, repo: repoName }, 'Error checking access, skipping repo'); |
||||
continue; |
||||
} |
||||
|
||||
// Search in this repo
|
||||
try { |
||||
const repoResults = await searchInRepo(user, repoName, query, limit - results.length); |
||||
results.push(...repoResults); |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} catch (searchErr) { |
||||
logger.debug({ error: searchErr, user, repo: repoName }, 'Error searching repo, continuing'); |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} |
||||
|
||||
return json(results.slice(0, limit)); |
||||
} catch (err) { |
||||
logger.error({ error: err, query }, 'Error performing global code search'); |
||||
throw err; |
||||
} |
||||
}; |
||||
|
||||
async function searchInRepo( |
||||
npub: string, |
||||
repo: string, |
||||
query: string, |
||||
limit: number |
||||
): Promise<GlobalCodeSearchResult[]> { |
||||
const repoPath = join(repoRoot, npub, `${repo}.git`); |
||||
|
||||
if (!existsSync(repoPath)) { |
||||
return []; |
||||
} |
||||
|
||||
const results: GlobalCodeSearchResult[] = []; |
||||
const git = simpleGit(repoPath); |
||||
|
||||
try { |
||||
// Get default branch
|
||||
let branch = 'HEAD'; |
||||
try { |
||||
const branches = await git.branchLocal(); |
||||
branch = branches.current || 'HEAD'; |
||||
// If no current branch, try common defaults
|
||||
if (!branch || branch === 'HEAD') { |
||||
const allBranches = branches.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, '')); |
||||
branch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main'; |
||||
} |
||||
} catch { |
||||
branch = 'main'; |
||||
} |
||||
|
||||
// For bare repositories, we need to use a worktree or search the index
|
||||
let worktreePath: string | null = null; |
||||
try { |
||||
// Get the actual branch name (resolve HEAD if needed)
|
||||
let actualBranch = branch; |
||||
if (branch === 'HEAD') { |
||||
actualBranch = 'main'; |
||||
} |
||||
|
||||
// Get or create worktree
|
||||
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, npub, repo); |
||||
} catch (worktreeError) { |
||||
logger.debug({ error: worktreeError, npub, repo, branch }, 'Could not create worktree, trying git grep with tree reference'); |
||||
// Fall back to searching the index
|
||||
} |
||||
|
||||
const searchQuery = query.trim(); |
||||
|
||||
// If we have a worktree, search in the worktree
|
||||
if (worktreePath && existsSync(worktreePath)) { |
||||
try { |
||||
const worktreeGit = simpleGit(worktreePath); |
||||
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery]; |
||||
const grepOutput = await worktreeGit.raw(gitArgs); |
||||
|
||||
if (!grepOutput || !grepOutput.trim()) { |
||||
return []; |
||||
} |
||||
|
||||
// Parse git grep output
|
||||
const lines = grepOutput.split('\n'); |
||||
let currentFile = ''; |
||||
|
||||
for (const line of lines) { |
||||
if (!line.trim()) { |
||||
continue; |
||||
} |
||||
|
||||
// Check if this is a filename (no colon)
|
||||
if (!line.includes(':')) { |
||||
currentFile = line.trim(); |
||||
continue; |
||||
} |
||||
|
||||
// Parse line:content format
|
||||
const colonIndex = line.indexOf(':'); |
||||
if (colonIndex > 0 && currentFile) { |
||||
const lineNumber = parseInt(line.substring(0, colonIndex), 10); |
||||
const content = line.substring(colonIndex + 1); |
||||
|
||||
if (!isNaN(lineNumber) && content) { |
||||
// Make file path relative to repo root
|
||||
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, ''); |
||||
results.push({ |
||||
repo, |
||||
npub, |
||||
file: relativeFile, |
||||
line: lineNumber, |
||||
content: content.trim(), |
||||
branch: branch === 'HEAD' ? 'HEAD' : branch |
||||
}); |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} catch (grepError: any) { |
||||
// git grep returns exit code 1 when no matches found
|
||||
if (grepError.message && grepError.message.includes('exit code 1')) { |
||||
return []; |
||||
} |
||||
throw grepError; |
||||
} |
||||
} else { |
||||
// Fallback: search in the index using git grep with tree reference
|
||||
try { |
||||
// Get the tree for the branch
|
||||
let treeRef = branch; |
||||
if (branch === 'HEAD') { |
||||
try { |
||||
const branchInfo = await git.branch(['-a']); |
||||
treeRef = branchInfo.current || 'HEAD'; |
||||
} catch { |
||||
treeRef = 'HEAD'; |
||||
} |
||||
} |
||||
|
||||
// Use git grep with tree reference for bare repos
|
||||
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef]; |
||||
const grepOutput = await git.raw(gitArgs); |
||||
|
||||
if (!grepOutput || !grepOutput.trim()) { |
||||
return []; |
||||
} |
||||
|
||||
// Parse git grep output
|
||||
const lines = grepOutput.split('\n'); |
||||
let currentFile = ''; |
||||
|
||||
for (const line of lines) { |
||||
if (!line.trim()) { |
||||
continue; |
||||
} |
||||
|
||||
// Check if this is a filename (no colon)
|
||||
if (!line.includes(':')) { |
||||
currentFile = line.trim(); |
||||
continue; |
||||
} |
||||
|
||||
// Parse line:content format
|
||||
const colonIndex = line.indexOf(':'); |
||||
if (colonIndex > 0 && currentFile) { |
||||
const lineNumber = parseInt(line.substring(0, colonIndex), 10); |
||||
const content = line.substring(colonIndex + 1); |
||||
|
||||
if (!isNaN(lineNumber) && content) { |
||||
results.push({ |
||||
repo, |
||||
npub, |
||||
file: currentFile, |
||||
line: lineNumber, |
||||
content: content.trim(), |
||||
branch: branch === 'HEAD' ? 'HEAD' : branch |
||||
}); |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} catch (grepError: any) { |
||||
// git grep returns exit code 1 when no matches found
|
||||
if (grepError.message && grepError.message.includes('exit code 1')) { |
||||
return []; |
||||
} |
||||
throw grepError; |
||||
} |
||||
} |
||||
} catch (err) { |
||||
logger.debug({ error: err, npub, repo, query }, 'Error searching in repo'); |
||||
return []; |
||||
} |
||||
|
||||
return results; |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,503 @@
@@ -0,0 +1,503 @@
|
||||
/** |
||||
* RESTful Repository Resource Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo} # Get repository info (settings, metadata, access, verification) |
||||
* PUT /api/repos/{npub}/{repo} # Update repository (replace) |
||||
* PATCH /api/repos/{npub}/{repo} # Partial update (settings, description, etc.) |
||||
* DELETE /api/repos/{npub}/{repo} # Delete repository |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError, handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
import { nostrClient, maintainerService } from '$lib/services/service-registry.js'; |
||||
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
import { nip19 } from 'nostr-tools'; |
||||
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js'; |
||||
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js'; |
||||
import { getUserRelays } from '$lib/services/nostr/user-relays.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { rm } from 'fs/promises'; |
||||
import { join, resolve } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import { auditLogger } from '$lib/services/security/audit-logger.js'; |
||||
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; |
||||
import { verifyRepositoryOwnership } from '$lib/services/nostr/repo-verification.js'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
// Admin pubkeys (can be set via environment variable)
|
||||
const ADMIN_PUBKEYS = (typeof process !== 'undefined' && process.env?.ADMIN_PUBKEYS |
||||
? process.env.ADMIN_PUBKEYS.split(',').map(p => p.trim()).filter(p => p.length > 0) |
||||
: []) as string[]; |
||||
|
||||
function isAdmin(userPubkeyHex: string | null): boolean { |
||||
if (!userPubkeyHex) return false; |
||||
return ADMIN_PUBKEYS.some(adminPubkey => { |
||||
try { |
||||
const decoded = nip19.decode(adminPubkey); |
||||
if (decoded.type === 'npub') { |
||||
return decoded.data === userPubkeyHex; |
||||
} |
||||
} catch { |
||||
// Not an npub, compare as hex
|
||||
} |
||||
return adminPubkey.toLowerCase() === userPubkeyHex.toLowerCase(); |
||||
}); |
||||
} |
||||
|
||||
function isOwner(userPubkeyHex: string | null, repoOwnerPubkey: string): boolean { |
||||
if (!userPubkeyHex) return false; |
||||
return userPubkeyHex.toLowerCase() === repoOwnerPubkey.toLowerCase(); |
||||
} |
||||
|
||||
/** |
||||
* GET: Get repository info |
||||
* Query params: ?include=settings,maintainers,access,verification |
||||
*/ |
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
const url = new URL(event.request.url); |
||||
const include = url.searchParams.get('include')?.split(',') || ['settings', 'access']; |
||||
|
||||
// Fetch repository announcement
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
const result: any = { |
||||
npub: context.npub, |
||||
repo: context.repo, |
||||
owner: context.npub |
||||
}; |
||||
|
||||
// Include settings
|
||||
if (include.includes('settings') || include.includes('all')) { |
||||
if (announcement) { |
||||
result.description = announcement.tags.find(t => t[0] === 'description')?.[1] || ''; |
||||
result.visibility = getVisibility(announcement); |
||||
result.projectRelays = getProjectRelays(announcement); |
||||
result.private = result.visibility === 'restricted' || result.visibility === 'private'; |
||||
} else { |
||||
result.description = ''; |
||||
result.visibility = 'public'; |
||||
result.projectRelays = []; |
||||
result.private = false; |
||||
} |
||||
} |
||||
|
||||
// Include maintainers
|
||||
if (include.includes('maintainers') || include.includes('all')) { |
||||
const { maintainers, owner } = await maintainerService.getMaintainers( |
||||
context.repoOwnerPubkey, |
||||
context.repo |
||||
); |
||||
result.maintainers = maintainers.map(p => nip19.npubEncode(p)); |
||||
result.owner = nip19.npubEncode(owner); |
||||
if (context.userPubkeyHex) { |
||||
result.isMaintainer = maintainers.includes(context.userPubkeyHex); |
||||
result.isOwner = context.userPubkeyHex === owner; |
||||
} |
||||
} |
||||
|
||||
// Include access
|
||||
if (include.includes('access') || include.includes('all')) { |
||||
const { isPrivate, maintainers, owner } = await maintainerService.getMaintainers( |
||||
context.repoOwnerPubkey, |
||||
context.repo |
||||
); |
||||
const canView = await maintainerService.canView( |
||||
context.userPubkeyHex || null, |
||||
context.repoOwnerPubkey, |
||||
context.repo |
||||
); |
||||
result.access = { |
||||
canView, |
||||
isPrivate, |
||||
isMaintainer: context.userPubkeyHex ? maintainers.includes(context.userPubkeyHex) : false, |
||||
isOwner: context.userPubkeyHex ? context.userPubkeyHex === owner : false |
||||
}; |
||||
} |
||||
|
||||
// Include verification
|
||||
if (include.includes('verification') || include.includes('all')) { |
||||
// Simplified verification check - full verification is in /verification endpoint
|
||||
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`); |
||||
result.verification = { |
||||
exists: existsSync(repoPath), |
||||
announcementFound: !!announcement |
||||
}; |
||||
} |
||||
|
||||
return json(result); |
||||
}, |
||||
{ operation: 'getRepo', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
|
||||
/** |
||||
* PUT: Replace repository (full update) |
||||
* PATCH: Partial update |
||||
*/ |
||||
export const PUT: RequestHandler = createRepoPostHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
return updateRepository(context, event, true); // full update
|
||||
}, |
||||
{ operation: 'updateRepo', requireRepoExists: false } |
||||
); |
||||
|
||||
export const PATCH: RequestHandler = createRepoPostHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
return updateRepository(context, event, false); // partial update
|
||||
}, |
||||
{ operation: 'updateRepo', requireRepoExists: false } |
||||
); |
||||
|
||||
async function updateRepository( |
||||
context: RepoRequestContext, |
||||
event: RequestEvent, |
||||
isFullUpdate: boolean |
||||
) { |
||||
let body: {
|
||||
description?: string;
|
||||
visibility?: string;
|
||||
projectRelays?: string[];
|
||||
private?: boolean; |
||||
branchProtection?: any; |
||||
}; |
||||
try { |
||||
body = await event.request.json(); |
||||
} catch { |
||||
throw handleValidationError('Invalid JSON in request body', {
|
||||
operation: 'updateRepo',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Fetch current announcement
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (!announcement) { |
||||
throw handleValidationError('Repository announcement not found', {
|
||||
operation: 'updateRepo',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Get user's pubkey (required for signing)
|
||||
const userPubkey = await getPublicKeyWithNIP07(); |
||||
let userPubkeyHex: string; |
||||
if (typeof userPubkey === 'string' && userPubkey.length === 64) { |
||||
userPubkeyHex = userPubkey; |
||||
} else { |
||||
const decoded = nip19.decode(userPubkey) as { type: string; data: unknown }; |
||||
if (decoded.type === 'npub' && typeof decoded.data === 'string') { |
||||
userPubkeyHex = decoded.data; |
||||
} else { |
||||
throw handleValidationError('Invalid user pubkey format', { operation: 'updateRepo', npub: context.npub, repo: context.repo }); |
||||
} |
||||
} |
||||
|
||||
// Verify user is maintainer
|
||||
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, context.repoOwnerPubkey, context.repo); |
||||
if (!isMaintainer) { |
||||
return error(403, 'Only maintainers can update repository'); |
||||
} |
||||
|
||||
// Build updated tags
|
||||
const tags: string[][] = isFullUpdate ? [] : [...announcement.tags]; |
||||
|
||||
// Update description
|
||||
if (body.description !== undefined || isFullUpdate) { |
||||
const descIndex = tags.findIndex(t => t[0] === 'description'); |
||||
const descValue = body.description !== undefined ? body.description : (isFullUpdate ? '' : announcement.tags.find(t => t[0] === 'description')?.[1] || ''); |
||||
if (descIndex >= 0) { |
||||
tags[descIndex] = ['description', descValue]; |
||||
} else if (descValue) { |
||||
tags.push(['description', descValue]); |
||||
} |
||||
} |
||||
|
||||
// Update visibility
|
||||
let newVisibility: 'public' | 'unlisted' | 'restricted' | 'private' = getVisibility(announcement); |
||||
if (body.visibility !== undefined) { |
||||
const vis = body.visibility.toLowerCase(); |
||||
if (['public', 'unlisted', 'restricted', 'private'].includes(vis)) { |
||||
newVisibility = vis as typeof newVisibility; |
||||
} else { |
||||
throw handleValidationError(`Invalid visibility: ${body.visibility}. Must be one of: public, unlisted, restricted, private`,
|
||||
{ operation: 'updateRepo', npub: context.npub, repo: context.repo }); |
||||
} |
||||
} else if (body.private !== undefined) { |
||||
newVisibility = body.private ? 'restricted' : 'public'; |
||||
} else if (isFullUpdate) { |
||||
newVisibility = 'public'; |
||||
} |
||||
|
||||
// Update visibility tag
|
||||
const visIndex = tags.findIndex(t => t[0] === 'visibility'); |
||||
if (newVisibility === 'public') { |
||||
if (visIndex >= 0) { |
||||
tags.splice(visIndex, 1); |
||||
} |
||||
} else { |
||||
if (visIndex >= 0) { |
||||
tags[visIndex] = ['visibility', newVisibility]; |
||||
} else { |
||||
tags.push(['visibility', newVisibility]); |
||||
} |
||||
} |
||||
|
||||
// Update project-relay tags
|
||||
if (body.projectRelays !== undefined || isFullUpdate) { |
||||
// Remove existing project-relay tags
|
||||
const projectRelayIndices: number[] = []; |
||||
tags.forEach((tag, index) => { |
||||
if (tag[0] === 'project-relay') { |
||||
projectRelayIndices.push(index); |
||||
} |
||||
}); |
||||
for (let i = projectRelayIndices.length - 1; i >= 0; i--) { |
||||
tags.splice(projectRelayIndices[i], 1); |
||||
} |
||||
// Add new project-relay tags
|
||||
const relays = body.projectRelays || (isFullUpdate ? [] : getProjectRelays(announcement)); |
||||
for (const relay of relays) { |
||||
if (relay && (relay.startsWith('ws://') || relay.startsWith('wss://'))) { |
||||
tags.push(['project-relay', relay]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Validate: unlisted/restricted require project-relay
|
||||
if ((newVisibility === 'unlisted' || newVisibility === 'restricted')) { |
||||
const hasProjectRelay = tags.some(t => t[0] === 'project-relay'); |
||||
if (!hasProjectRelay) { |
||||
throw handleValidationError( |
||||
`Visibility '${newVisibility}' requires at least one project-relay. Please provide project-relays in the request.`, |
||||
{ operation: 'updateRepo', npub: context.npub, repo: context.repo } |
||||
); |
||||
} |
||||
} |
||||
|
||||
// Preserve essential tags
|
||||
if (!isFullUpdate) { |
||||
// Keep d-tag, name, clone tags, etc.
|
||||
const essentialTags = ['d', 'name', 'clone']; |
||||
essentialTags.forEach(tagName => { |
||||
announcement.tags.forEach(tag => { |
||||
if (tag[0] === tagName && !tags.some(t => t[0] === tagName && t[1] === tag[1])) { |
||||
tags.push(tag); |
||||
} |
||||
}); |
||||
}); |
||||
} else { |
||||
// For full update, we need d-tag at minimum
|
||||
const dTag = announcement.tags.find(t => t[0] === 'd'); |
||||
if (dTag) { |
||||
tags.unshift(dTag); |
||||
} |
||||
} |
||||
|
||||
// Remove old private tag if present
|
||||
const privateIndex = tags.findIndex(t => (t[0] === 'private' && t[1] === 'true') || (t[0] === 't' && t[1] === 'private')); |
||||
if (privateIndex >= 0) { |
||||
tags.splice(privateIndex, 1); |
||||
} |
||||
|
||||
// Create updated event
|
||||
const updatedEvent = { |
||||
kind: KIND.REPO_ANNOUNCEMENT, |
||||
pubkey: userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: announcement.content || '', |
||||
tags |
||||
}; |
||||
|
||||
// Sign with NIP-07
|
||||
const signedEvent = await signEventWithNIP07(updatedEvent); |
||||
|
||||
// Get user's relays for publishing
|
||||
const allSearchRelays = Array.from(new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])); |
||||
const fullRelayClient = new NostrClient(allSearchRelays); |
||||
|
||||
let userRelays: string[] = []; |
||||
try { |
||||
const { inbox, outbox } = await getUserRelays(userPubkeyHex, fullRelayClient); |
||||
if (outbox.length > 0) { |
||||
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS); |
||||
} else if (inbox.length > 0) { |
||||
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS); |
||||
} else { |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
} catch (err) { |
||||
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults'); |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
|
||||
// Determine which relays to publish to based on visibility
|
||||
const { getRelaysForEventPublishing } = await import('$lib/utils/repo-visibility.js'); |
||||
const visibilityRelays = getRelaysForEventPublishing(signedEvent); |
||||
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : []; |
||||
|
||||
// Publish to relays (if not private)
|
||||
if (relaysToPublish.length > 0) { |
||||
const publishResult = await nostrClient.publishEvent(signedEvent, relaysToPublish); |
||||
if (publishResult.failed.length > 0 && publishResult.success.length === 0) { |
||||
logger.warn({ npub: context.npub, repo: context.repo }, 'Failed to publish update to all relays'); |
||||
} |
||||
} |
||||
|
||||
// Save to repository
|
||||
const { AnnouncementManager } = await import('$lib/services/git/announcement-manager.js'); |
||||
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`; |
||||
const announcementManager = new AnnouncementManager(repoRoot); |
||||
try { |
||||
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent); |
||||
} catch (err) { |
||||
logger.error({ error: err, npub: context.npub, repo: context.repo }, 'Failed to save update to repository'); |
||||
} |
||||
|
||||
// Return updated repository
|
||||
return json({ |
||||
npub: context.npub, |
||||
repo: context.repo, |
||||
owner: context.npub, |
||||
description: body.description !== undefined ? body.description : (announcement.tags.find(t => t[0] === 'description')?.[1] || ''), |
||||
visibility: newVisibility, |
||||
projectRelays: body.projectRelays !== undefined ? body.projectRelays : getProjectRelays(announcement), |
||||
private: newVisibility === 'restricted' || newVisibility === 'private' |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* DELETE: Delete repository |
||||
*/ |
||||
export const DELETE: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
const { npub, repo, repoOwnerPubkey, userPubkeyHex, clientIp } = context; |
||||
|
||||
// Check permissions: must be owner or admin
|
||||
if (!userPubkeyHex) { |
||||
auditLogger.log({ |
||||
user: undefined, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'denied', |
||||
error: 'Authentication required' |
||||
}); |
||||
return handleAuthorizationError('Authentication required to delete repositories'); |
||||
} |
||||
|
||||
const userIsOwner = isOwner(userPubkeyHex, repoOwnerPubkey); |
||||
const userIsAdmin = isAdmin(userPubkeyHex); |
||||
|
||||
if (!userIsOwner && !userIsAdmin) { |
||||
auditLogger.log({ |
||||
user: userPubkeyHex, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'denied', |
||||
error: 'Insufficient permissions' |
||||
}); |
||||
return handleAuthorizationError('Only repository owners or admins can delete repositories'); |
||||
} |
||||
|
||||
// Get repository path
|
||||
const repoPath = join(repoRoot, npub, `${repo}.git`); |
||||
|
||||
// Security: Ensure resolved path is within repoRoot
|
||||
const resolvedPath = resolve(repoPath).replace(/\\/g, '/'); |
||||
const resolvedRoot = resolve(repoRoot).replace(/\\/g, '/'); |
||||
if (!resolvedPath.startsWith(resolvedRoot + '/')) { |
||||
auditLogger.log({ |
||||
user: userPubkeyHex, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'denied', |
||||
error: 'Invalid repository path' |
||||
}); |
||||
return error(403, 'Invalid repository path'); |
||||
} |
||||
|
||||
// Check if repo exists
|
||||
if (!existsSync(repoPath)) { |
||||
auditLogger.log({ |
||||
user: userPubkeyHex, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'failure', |
||||
error: 'Repository not found' |
||||
}); |
||||
return error(404, 'Repository not found'); |
||||
} |
||||
|
||||
try { |
||||
// Delete the repository directory
|
||||
await rm(repoPath, { recursive: true, force: true }); |
||||
|
||||
// Clear cache
|
||||
repoCache.delete(RepoCache.repoExistsKey(npub, repo)); |
||||
|
||||
// Log successful deletion
|
||||
auditLogger.log({ |
||||
user: userPubkeyHex, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'success', |
||||
metadata: { |
||||
isOwner: userIsOwner, |
||||
isAdmin: userIsAdmin |
||||
} |
||||
}); |
||||
|
||||
logger.info({
|
||||
user: userPubkeyHex,
|
||||
npub,
|
||||
repo, |
||||
isOwner: userIsOwner, |
||||
isAdmin: userIsAdmin |
||||
}, 'Repository deleted'); |
||||
|
||||
return json({
|
||||
success: true,
|
||||
message: 'Repository deleted successfully'
|
||||
}); |
||||
} catch (err) { |
||||
const errorMessage = err instanceof Error ? err.message : 'Unknown error'; |
||||
|
||||
auditLogger.log({ |
||||
user: userPubkeyHex, |
||||
ip: clientIp, |
||||
action: 'repo.delete', |
||||
resource: `${npub}/${repo}`, |
||||
result: 'failure', |
||||
error: errorMessage |
||||
}); |
||||
|
||||
return handleApiError(err, { operation: 'deleteRepo', npub, repo }, 'Failed to delete repository'); |
||||
} |
||||
}, |
||||
{
|
||||
operation: 'deleteRepo', |
||||
requireRepoExists: true, |
||||
requireRepoAccess: false, |
||||
requireMaintainer: false |
||||
} |
||||
); |
||||
@ -1,8 +1,13 @@
@@ -1,8 +1,13 @@
|
||||
/** |
||||
* API endpoint for getting the default branch of a repository |
||||
* RESTful Default Branch Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/branches/default |
||||
*
|
||||
* Returns the default branch of the repository |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
@ -0,0 +1,125 @@
@@ -0,0 +1,125 @@
|
||||
/** |
||||
* RESTful Clone URLs Resource Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/clone-urls # List clone URLs |
||||
* POST /api/repos/{npub}/{repo}/clone-urls # Check reachability (body: {urls: [...]}) |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleApiError } from '$lib/utils/error-handler.js'; |
||||
import { getCloneUrlsReachability } from '$lib/services/git/clone-url-reachability.js'; |
||||
import { extractCloneUrls } from '$lib/utils/nostr-utils.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
import { nostrClient } from '$lib/services/service-registry.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
|
||||
/** |
||||
* GET: List clone URLs |
||||
* Query params: |
||||
* - includeReachability: boolean (optional) - Include reachability status |
||||
* - forceRefresh: boolean (optional) - Force refresh reachability cache |
||||
*/ |
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
try { |
||||
const url = new URL(event.request.url); |
||||
const includeReachability = url.searchParams.get('includeReachability') === 'true'; |
||||
const forceRefresh = url.searchParams.get('forceRefresh') === 'true'; |
||||
|
||||
// Fetch repository announcement (case-insensitive) with caching
|
||||
let allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
let announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
// If no events found in cache/default relays, try all relays
|
||||
if (!announcement) { |
||||
const allRelays = [...new Set([...DEFAULT_NOSTR_RELAYS, ...DEFAULT_NOSTR_SEARCH_RELAYS])]; |
||||
if (allRelays.length > DEFAULT_NOSTR_RELAYS.length) { |
||||
const allRelaysClient = new NostrClient(allRelays); |
||||
allEvents = await fetchRepoAnnouncementsWithCache(allRelaysClient, context.repoOwnerPubkey, eventCache); |
||||
announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
} |
||||
} |
||||
|
||||
if (!announcement) { |
||||
logger.warn({ npub: context.npub, repo: context.repo }, 'Repository announcement not found for clone URLs'); |
||||
return error(404, 'Repository announcement not found'); |
||||
} |
||||
|
||||
// Extract clone URLs
|
||||
const cloneUrls = extractCloneUrls(announcement, false); |
||||
|
||||
if (!includeReachability) { |
||||
return json({
|
||||
cloneUrls, |
||||
count: cloneUrls.length |
||||
}); |
||||
} |
||||
|
||||
// Extract relay URLs from relays tag (for proper GRASP server detection)
|
||||
const relayUrls: string[] = []; |
||||
for (const tag of announcement.tags) { |
||||
if (tag[0] === 'relays') { |
||||
for (let i = 1; i < tag.length; i++) { |
||||
const relayUrl = tag[i]; |
||||
if (relayUrl && typeof relayUrl === 'string' && (relayUrl.startsWith('ws://') || relayUrl.startsWith('wss://'))) { |
||||
relayUrls.push(relayUrl); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Get reachability for all clone URLs
|
||||
const reachabilityResults = await getCloneUrlsReachability( |
||||
cloneUrls,
|
||||
5000,
|
||||
forceRefresh,
|
||||
relayUrls.length > 0 ? relayUrls : undefined |
||||
); |
||||
|
||||
return json({
|
||||
cloneUrls, |
||||
count: cloneUrls.length, |
||||
reachability: reachabilityResults |
||||
}); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'getCloneUrls', npub: context.npub, repo: context.repo }, 'Failed to get clone URLs'); |
||||
} |
||||
}, |
||||
{ operation: 'getCloneUrls', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
|
||||
/** |
||||
* POST: Check reachability of clone URLs |
||||
* Body: { urls: string[], forceRefresh?: boolean } |
||||
*/ |
||||
export const POST: RequestHandler = createRepoPostHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
try { |
||||
const body = await event.request.json(); |
||||
const { urls, forceRefresh = false } = body; |
||||
|
||||
if (!Array.isArray(urls) || urls.length === 0) { |
||||
return error(400, 'urls must be a non-empty array'); |
||||
} |
||||
|
||||
// Validate URLs are strings
|
||||
if (!urls.every(url => typeof url === 'string')) { |
||||
return error(400, 'All URLs must be strings'); |
||||
} |
||||
|
||||
// Get reachability for specified URLs
|
||||
const results = await getCloneUrlsReachability(urls, 5000, forceRefresh); |
||||
|
||||
return json({ results }); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'checkReachability', npub: context.npub, repo: context.repo }, 'Failed to check clone URL reachability'); |
||||
} |
||||
}, |
||||
{ operation: 'checkReachability', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
@ -1,210 +0,0 @@
@@ -1,210 +0,0 @@
|
||||
/** |
||||
* API endpoint for code search within repositories |
||||
* Searches file contents across repositories |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager, nostrClient } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError } from '$lib/utils/error-handler.js'; |
||||
import { join } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { simpleGit } from 'simple-git'; |
||||
import { readFile } from 'fs/promises'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
export interface CodeSearchResult { |
||||
file: string; |
||||
line: number; |
||||
content: string; |
||||
branch: string; |
||||
commit?: string; |
||||
} |
||||
|
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
const query = event.url.searchParams.get('q'); |
||||
const branch = event.url.searchParams.get('branch') || 'HEAD'; |
||||
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10); |
||||
|
||||
if (!query || query.trim().length < 2) { |
||||
throw handleValidationError('Query must be at least 2 characters', { operation: 'codeSearch', npub: context.npub, repo: context.repo }); |
||||
} |
||||
|
||||
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`); |
||||
|
||||
// Check if repo exists
|
||||
if (!existsSync(repoPath)) { |
||||
logger.debug({ npub: context.npub, repo: context.repo, query }, 'Code search requested for non-existent repo'); |
||||
return json([]); |
||||
} |
||||
|
||||
try { |
||||
const git = simpleGit(repoPath); |
||||
const results: CodeSearchResult[] = []; |
||||
|
||||
// For bare repositories, we need to use a worktree or search the index
|
||||
// First, try to get or create a worktree for the branch
|
||||
let worktreePath: string | null = null; |
||||
try { |
||||
// Get the actual branch name (resolve HEAD if needed)
|
||||
let actualBranch = branch; |
||||
if (branch === 'HEAD') { |
||||
try { |
||||
const branchInfo = await git.branch(['-a']); |
||||
actualBranch = branchInfo.current || 'main'; |
||||
// If no current branch, try common defaults
|
||||
if (!actualBranch || actualBranch === 'HEAD') { |
||||
const allBranches = branchInfo.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, '')); |
||||
actualBranch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main'; |
||||
} |
||||
} catch { |
||||
actualBranch = 'main'; |
||||
} |
||||
} |
||||
|
||||
// Get or create worktree
|
||||
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, context.npub, context.repo); |
||||
} catch (worktreeError) { |
||||
logger.debug({ error: worktreeError, npub: context.npub, repo: context.repo, branch }, 'Could not create worktree, trying git grep with --cached'); |
||||
// Fall back to searching the index
|
||||
} |
||||
|
||||
const searchQuery = query.trim(); |
||||
|
||||
// If we have a worktree, search in the worktree
|
||||
if (worktreePath && existsSync(worktreePath)) { |
||||
try { |
||||
const worktreeGit = simpleGit(worktreePath); |
||||
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery]; |
||||
const grepOutput = await worktreeGit.raw(gitArgs); |
||||
|
||||
if (!grepOutput || !grepOutput.trim()) { |
||||
return json([]); |
||||
} |
||||
|
||||
// Parse git grep output
|
||||
const lines = grepOutput.split('\n'); |
||||
let currentFile = ''; |
||||
|
||||
for (const line of lines) { |
||||
if (!line.trim()) { |
||||
continue; |
||||
} |
||||
|
||||
// Check if this is a filename (no colon)
|
||||
if (!line.includes(':')) { |
||||
currentFile = line.trim(); |
||||
continue; |
||||
} |
||||
|
||||
// Parse line:content format
|
||||
const colonIndex = line.indexOf(':'); |
||||
if (colonIndex > 0 && currentFile) { |
||||
const lineNumber = parseInt(line.substring(0, colonIndex), 10); |
||||
const content = line.substring(colonIndex + 1); |
||||
|
||||
if (!isNaN(lineNumber) && content) { |
||||
// Make file path relative to repo root
|
||||
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, ''); |
||||
results.push({ |
||||
file: relativeFile, |
||||
line: lineNumber, |
||||
content: content.trim(), |
||||
branch: branch === 'HEAD' ? 'HEAD' : branch |
||||
}); |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} catch (grepError: any) { |
||||
// git grep returns exit code 1 when no matches found
|
||||
if (grepError.message && grepError.message.includes('exit code 1')) { |
||||
return json([]); |
||||
} |
||||
throw grepError; |
||||
} |
||||
} else { |
||||
// Fallback: search in the index using git grep --cached
|
||||
try { |
||||
// Get the tree for the branch
|
||||
let treeRef = branch; |
||||
if (branch === 'HEAD') { |
||||
try { |
||||
const branchInfo = await git.branch(['-a']); |
||||
treeRef = branchInfo.current || 'HEAD'; |
||||
} catch { |
||||
treeRef = 'HEAD'; |
||||
} |
||||
} |
||||
|
||||
// Use git grep with --cached to search the index
|
||||
// For bare repos, we can search a specific tree
|
||||
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef]; |
||||
const grepOutput = await git.raw(gitArgs); |
||||
|
||||
if (!grepOutput || !grepOutput.trim()) { |
||||
return json([]); |
||||
} |
||||
|
||||
// Parse git grep output
|
||||
const lines = grepOutput.split('\n'); |
||||
let currentFile = ''; |
||||
|
||||
for (const line of lines) { |
||||
if (!line.trim()) { |
||||
continue; |
||||
} |
||||
|
||||
// Check if this is a filename (no colon)
|
||||
if (!line.includes(':')) { |
||||
currentFile = line.trim(); |
||||
continue; |
||||
} |
||||
|
||||
// Parse line:content format
|
||||
const colonIndex = line.indexOf(':'); |
||||
if (colonIndex > 0 && currentFile) { |
||||
const lineNumber = parseInt(line.substring(0, colonIndex), 10); |
||||
const content = line.substring(colonIndex + 1); |
||||
|
||||
if (!isNaN(lineNumber) && content) { |
||||
results.push({ |
||||
file: currentFile, |
||||
line: lineNumber, |
||||
content: content.trim(), |
||||
branch: branch === 'HEAD' ? 'HEAD' : branch |
||||
}); |
||||
|
||||
if (results.length >= limit) { |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} catch (grepError: any) { |
||||
// git grep returns exit code 1 when no matches found
|
||||
if (grepError.message && grepError.message.includes('exit code 1')) { |
||||
return json([]); |
||||
} |
||||
throw grepError; |
||||
} |
||||
} |
||||
|
||||
return json(results); |
||||
} catch (err) { |
||||
logger.error({ error: err, npub: context.npub, repo: context.repo, query }, 'Error performing code search'); |
||||
throw err; |
||||
} |
||||
}, |
||||
{ operation: 'codeSearch', requireRepoExists: false, requireRepoAccess: true } |
||||
); |
||||
@ -1,5 +1,9 @@
@@ -1,5 +1,9 @@
|
||||
/** |
||||
* API endpoint for verifying commit signatures |
||||
* RESTful Commit Verification Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/commits/{hash}/verification |
||||
*
|
||||
* Verifies the signature of a commit |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
@ -1,8 +1,16 @@
@@ -1,8 +1,16 @@
|
||||
/** |
||||
* API endpoint for getting diffs |
||||
* RESTful Diffs Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/diffs?from=...&to=...&path=... |
||||
*
|
||||
* Query parameters: |
||||
* - from - Source ref (required) |
||||
* - to - Target ref (default: HEAD) |
||||
* - path - Optional file path to diff |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager, nostrClient } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
@ -1,573 +0,0 @@
@@ -1,573 +0,0 @@
|
||||
/** |
||||
* API endpoint for reading and writing files in a repository |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js'; |
||||
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js'; |
||||
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js'; |
||||
import { nip19 } from 'nostr-tools'; |
||||
import { verifyNIP98Auth } from '$lib/services/nostr/nip98-auth.js'; |
||||
import { auditLogger } from '$lib/services/security/audit-logger.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import type { NostrEvent } from '$lib/types/nostr.js'; |
||||
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js'; |
||||
import { handleApiError, handleValidationError, handleNotFoundError } from '$lib/utils/error-handler.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
import { join } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; |
||||
import { extractRequestContext } from '$lib/utils/api-context.js'; |
||||
import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); |
||||
|
||||
export const GET: RequestHandler = async (event) => { |
||||
const { params, url, request } = event; |
||||
const { npub, repo } = params; |
||||
const filePath = url.searchParams.get('path'); |
||||
let ref = url.searchParams.get('ref') || 'HEAD'; |
||||
|
||||
// Extract user pubkey using the same method as other endpoints
|
||||
const requestContext = extractRequestContext(event); |
||||
const userPubkey = requestContext.userPubkey; |
||||
const userPubkeyHex = requestContext.userPubkeyHex; |
||||
|
||||
// Debug logging for file endpoint
|
||||
logger.debug({
|
||||
hasUserPubkey: !!userPubkey,
|
||||
hasUserPubkeyHex: !!userPubkeyHex, |
||||
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null, |
||||
npub,
|
||||
repo,
|
||||
filePath
|
||||
}, 'File endpoint - extracted user context'); |
||||
|
||||
if (!npub || !repo || !filePath) { |
||||
return error(400, 'Missing npub, repo, or path parameter'); |
||||
} |
||||
|
||||
try { |
||||
const repoPath = join(repoRoot, npub, `${repo}.git`); |
||||
|
||||
// If repo doesn't exist, try to fetch it on-demand
|
||||
if (!existsSync(repoPath)) { |
||||
try { |
||||
// Get repo owner pubkey
|
||||
let repoOwnerPubkey: string; |
||||
try { |
||||
repoOwnerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// Fetch repository announcement (case-insensitive) with caching
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, repo); |
||||
|
||||
if (announcement) { |
||||
// Try API-based fetching first (no cloning)
|
||||
try { |
||||
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js'); |
||||
const fileContent = await tryApiFetchFile(announcement, npub, repo, filePath, ref); |
||||
|
||||
if (fileContent && fileContent.content) { |
||||
logger.debug({ npub, repo, filePath, ref }, 'Successfully fetched file via API fallback'); |
||||
return json(fileContent); |
||||
} |
||||
} catch (apiErr) { |
||||
// Log the error but don't throw - we'll return a helpful error message below
|
||||
logger.debug({ error: apiErr, npub, repo, filePath, ref }, 'API file fetch failed, will return 404'); |
||||
} |
||||
|
||||
// API fetch failed - repo is not cloned and API fetch didn't work
|
||||
// Check if announcement has clone URLs to provide better error message
|
||||
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js'); |
||||
const cloneUrls = extractCloneUrls(announcement); |
||||
const hasCloneUrls = cloneUrls.length > 0; |
||||
|
||||
logger.debug({ npub, repo, filePath, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available'); |
||||
|
||||
return error(404, hasCloneUrls
|
||||
? 'Repository is not cloned locally and could not fetch file via API. Privileged users can clone this repository using the "Clone to Server" button.' |
||||
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.'); |
||||
} else { |
||||
return error(404, 'Repository announcement not found in Nostr'); |
||||
} |
||||
} catch (err) { |
||||
logger.error({ error: err, npub, repo, filePath }, 'Error in on-demand file fetch'); |
||||
// Check if repo was created by another concurrent request
|
||||
if (existsSync(repoPath)) { |
||||
// Repo exists now, clear cache and continue with normal flow
|
||||
repoCache.delete(RepoCache.repoExistsKey(npub, repo)); |
||||
} else { |
||||
// If fetching fails, return 404
|
||||
return error(404, 'Repository not found'); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Double-check repo exists (should be true if we got here)
|
||||
if (!existsSync(repoPath)) { |
||||
return error(404, 'Repository not found'); |
||||
} |
||||
|
||||
// Get repo owner pubkey for access check (already validated above if we did on-demand fetch)
|
||||
let repoOwnerPubkey: string; |
||||
try { |
||||
repoOwnerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// If ref is a branch name, validate it exists or use default branch
|
||||
if (ref !== 'HEAD' && !ref.startsWith('refs/')) { |
||||
try { |
||||
const branches = await fileManager.getBranches(npub, repo); |
||||
if (!branches.includes(ref)) { |
||||
// Branch doesn't exist, try to get default branch
|
||||
try { |
||||
ref = await fileManager.getDefaultBranch(npub, repo); |
||||
logger.debug({ npub, repo, originalRef: url.searchParams.get('ref'), newRef: ref }, 'Branch not found, using default branch'); |
||||
} catch (defaultBranchErr) { |
||||
// If we can't get default branch, fall back to HEAD
|
||||
logger.warn({ error: defaultBranchErr, npub, repo, ref }, 'Could not get default branch, falling back to HEAD'); |
||||
ref = 'HEAD'; |
||||
} |
||||
} |
||||
} catch (branchErr) { |
||||
// If we can't get branches, fall back to HEAD
|
||||
logger.warn({ error: branchErr, npub, repo, ref }, 'Could not get branches, falling back to HEAD'); |
||||
ref = 'HEAD'; |
||||
} |
||||
} |
||||
|
||||
// Check repository privacy (repoOwnerPubkey already declared above)
|
||||
logger.debug({
|
||||
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null, |
||||
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...', |
||||
repo
|
||||
}, 'File endpoint - checking canView before access check'); |
||||
|
||||
const canView = await maintainerService.canView(userPubkeyHex || null, repoOwnerPubkey, repo); |
||||
|
||||
logger.debug({
|
||||
canView,
|
||||
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null, |
||||
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...', |
||||
repo
|
||||
}, 'File endpoint - canView result'); |
||||
|
||||
if (!canView) { |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex || null, |
||||
requestContext.clientIp, |
||||
'read', |
||||
`${npub}/${repo}`, |
||||
filePath, |
||||
'denied', |
||||
'Insufficient permissions' |
||||
); |
||||
return error(403, 'This repository is private. Only owners and maintainers can view it.'); |
||||
} |
||||
try { |
||||
// Log what we're trying to do
|
||||
logger.debug({ npub, repo, filePath, ref }, 'Attempting to read file from cloned repository'); |
||||
|
||||
let fileContent; |
||||
try { |
||||
fileContent = await fileManager.getFileContent(npub, repo, filePath, ref); |
||||
} catch (firstErr) { |
||||
// If the first attempt fails and ref is not HEAD, try with HEAD as fallback
|
||||
if (ref !== 'HEAD' && !ref.startsWith('refs/')) { |
||||
logger.warn({
|
||||
error: firstErr,
|
||||
npub,
|
||||
repo,
|
||||
filePath,
|
||||
originalRef: ref
|
||||
}, 'Failed to read file with specified ref, trying HEAD as fallback'); |
||||
try { |
||||
fileContent = await fileManager.getFileContent(npub, repo, filePath, 'HEAD'); |
||||
ref = 'HEAD'; // Update ref for logging
|
||||
} catch (headErr) { |
||||
// If HEAD also fails, try API fallback before throwing
|
||||
logger.debug({ error: headErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback'); |
||||
|
||||
try { |
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, repo); |
||||
|
||||
if (announcement) { |
||||
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js'); |
||||
// Use the original ref, or 'main' as fallback
|
||||
const apiRef = url.searchParams.get('ref') || 'main'; |
||||
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef); |
||||
|
||||
if (apiFileContent && apiFileContent.content) { |
||||
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo'); |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex || null, |
||||
requestContext.clientIp, |
||||
'read', |
||||
`${npub}/${repo}`, |
||||
filePath, |
||||
'success' |
||||
); |
||||
return json(apiFileContent); |
||||
} |
||||
} |
||||
} catch (apiErr) { |
||||
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file'); |
||||
} |
||||
|
||||
// If API fallback also fails, throw the original error
|
||||
throw firstErr; |
||||
} |
||||
} else { |
||||
// Try API fallback before throwing
|
||||
logger.debug({ error: firstErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback'); |
||||
|
||||
try { |
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, repo); |
||||
|
||||
if (announcement) { |
||||
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js'); |
||||
const apiRef = ref === 'HEAD' ? 'main' : ref; |
||||
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef); |
||||
|
||||
if (apiFileContent && apiFileContent.content) { |
||||
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo'); |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex || null, |
||||
requestContext.clientIp, |
||||
'read', |
||||
`${npub}/${repo}`, |
||||
filePath, |
||||
'success' |
||||
); |
||||
return json(apiFileContent); |
||||
} |
||||
} |
||||
} catch (apiErr) { |
||||
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file'); |
||||
} |
||||
|
||||
throw firstErr; |
||||
} |
||||
} |
||||
|
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex || null, |
||||
requestContext.clientIp, |
||||
'read', |
||||
`${npub}/${repo}`, |
||||
filePath, |
||||
'success' |
||||
); |
||||
return json(fileContent); |
||||
} catch (err) { |
||||
const errorMessage = err instanceof Error ? err.message : String(err); |
||||
const errorLower = errorMessage.toLowerCase(); |
||||
const errorStack = err instanceof Error ? err.stack : undefined; |
||||
|
||||
logger.error({
|
||||
error: err,
|
||||
errorStack, |
||||
npub,
|
||||
repo,
|
||||
filePath,
|
||||
ref, |
||||
repoExists: existsSync(repoPath), |
||||
errorMessage |
||||
}, 'Error reading file from cloned repository'); |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex || null, |
||||
requestContext.clientIp, |
||||
'read', |
||||
`${npub}/${repo}`, |
||||
filePath, |
||||
'failure', |
||||
errorMessage |
||||
); |
||||
// If file not found or path doesn't exist, return 404 instead of 500
|
||||
if (errorLower.includes('not found') ||
|
||||
errorLower.includes('no such file') ||
|
||||
errorLower.includes('does not exist') || |
||||
errorLower.includes('fatal:') || |
||||
errorMessage.includes('pathspec')) { |
||||
return error(404, `File not found: ${filePath} at ref ${ref}`); |
||||
} |
||||
// For other errors, return 500 with a more helpful message
|
||||
return error(500, `Failed to read file: ${errorMessage}`); |
||||
} |
||||
} catch (err) { |
||||
// This catch block handles errors that occur outside the file reading try-catch
|
||||
// (e.g., in branch validation, access checks, etc.)
|
||||
|
||||
// If it's already a Response (from error handlers), return it
|
||||
if (err instanceof Response) { |
||||
return err; |
||||
} |
||||
|
||||
// If it's a SvelteKit HttpError (from error() function), re-throw it
|
||||
// SvelteKit errors have a status property and body property
|
||||
if (err && typeof err === 'object' && 'status' in err && 'body' in err) { |
||||
throw err; |
||||
} |
||||
|
||||
const errorMessage = err instanceof Error ? err.message : String(err); |
||||
const errorStack = err instanceof Error ? err.stack : undefined; |
||||
|
||||
logger.error({
|
||||
error: err,
|
||||
errorStack, |
||||
npub,
|
||||
repo,
|
||||
filePath, |
||||
ref: url.searchParams.get('ref'), |
||||
errorMessage |
||||
}, 'Unexpected error in file endpoint (outside file reading block)'); |
||||
|
||||
// Check if it's a "not found" type error
|
||||
const errorLower = errorMessage.toLowerCase(); |
||||
if (errorLower.includes('not found') ||
|
||||
errorLower.includes('repository not found')) { |
||||
return error(404, errorMessage); |
||||
} |
||||
|
||||
return handleApiError(err, { operation: 'readFile', npub, repo, filePath }, 'Failed to read file'); |
||||
} |
||||
}; |
||||
|
||||
export const POST: RequestHandler = async ({ params, url, request }: { params: { npub?: string; repo?: string }; url: URL; request: Request }) => { |
||||
const { npub, repo } = params; |
||||
|
||||
if (!npub || !repo) { |
||||
return error(400, 'Missing npub or repo parameter'); |
||||
} |
||||
|
||||
let path: string | undefined; |
||||
try { |
||||
const body = await request.json(); |
||||
path = body.path; |
||||
const { content, commitMessage, authorName, authorEmail, branch, action, userPubkey, useNIP07, nsecKey, commitSignatureEvent } = body; |
||||
|
||||
// Check for NIP-98 authentication (for git operations)
|
||||
const authHeader = request.headers.get('Authorization'); |
||||
let nip98Event = null; |
||||
if (authHeader && authHeader.startsWith('Nostr ')) { |
||||
const requestUrl = `${request.headers.get('x-forwarded-proto') || (url.protocol === 'https:' ? 'https' : 'http')}://${request.headers.get('host') || url.host}${url.pathname}${url.search}`; |
||||
const authResult = verifyNIP98Auth(authHeader, requestUrl, request.method); |
||||
if (authResult.valid && authResult.event) { |
||||
nip98Event = authResult.event; |
||||
} |
||||
} |
||||
|
||||
if (!path || !commitMessage) { |
||||
return error(400, 'Missing required fields: path, commitMessage'); |
||||
} |
||||
|
||||
// Fetch authorName and authorEmail from kind 0 event if not provided
|
||||
let finalAuthorName = authorName; |
||||
let finalAuthorEmail = authorEmail; |
||||
|
||||
if (!finalAuthorName || !finalAuthorEmail) { |
||||
if (!userPubkey) { |
||||
return error(400, 'Missing userPubkey. Cannot fetch author information without userPubkey.'); |
||||
} |
||||
|
||||
const userPubkeyHexForProfile = decodeNpubToHex(userPubkey) || userPubkey; |
||||
|
||||
try { |
||||
if (!finalAuthorName) { |
||||
finalAuthorName = await fetchUserName(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS); |
||||
} |
||||
if (!finalAuthorEmail) { |
||||
finalAuthorEmail = await fetchUserEmail(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS); |
||||
} |
||||
} catch (err) { |
||||
logger.warn({ error: err, userPubkey }, 'Failed to fetch user profile for author info, using fallbacks'); |
||||
// Use fallbacks if fetch fails
|
||||
if (!finalAuthorName) { |
||||
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile); |
||||
finalAuthorName = npub.substring(0, 20); |
||||
} |
||||
if (!finalAuthorEmail) { |
||||
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile); |
||||
finalAuthorEmail = `${npub.substring(0, 20)}@gitrepublic.web`; |
||||
} |
||||
} |
||||
} |
||||
|
||||
if (!userPubkey) { |
||||
return error(401, 'Authentication required. Please provide userPubkey.'); |
||||
} |
||||
|
||||
// Check if repo exists locally
|
||||
if (!fileManager.repoExists(npub, repo)) { |
||||
// Try to fetch announcement to see if repo exists in Nostr
|
||||
let repoOwnerPubkey: string; |
||||
try { |
||||
repoOwnerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// Fetch repository announcement (case-insensitive) with caching
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, repo); |
||||
|
||||
if (announcement) { |
||||
// Repository exists in Nostr but is not cloned locally
|
||||
// For file editing, we need a local clone
|
||||
return error(404, 'Repository is not cloned locally. To edit files, the repository must be cloned to the server first. Please use the "Clone to Server" button if you have unlimited access, or contact a server administrator.'); |
||||
} else { |
||||
return error(404, 'Repository not found'); |
||||
} |
||||
} |
||||
|
||||
// Check if user is a maintainer
|
||||
let repoOwnerPubkey: string; |
||||
try { |
||||
repoOwnerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// Convert userPubkey to hex if needed
|
||||
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey; |
||||
|
||||
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, repoOwnerPubkey, repo); |
||||
if (!isMaintainer) { |
||||
return error(403, 'Only repository maintainers can edit files directly. Please submit a pull request instead.'); |
||||
} |
||||
|
||||
// Prepare signing options
|
||||
// NOTE: nsecKey is intentionally NOT supported from client requests for security reasons.
|
||||
// Clients should use NIP-07 (browser extension) or NIP-98 (HTTP auth) instead.
|
||||
// nsecKey is only for server-side use via environment variables.
|
||||
const signingOptions: { |
||||
useNIP07?: boolean; |
||||
nip98Event?: NostrEvent; |
||||
nsecKey?: string; |
||||
commitSignatureEvent?: NostrEvent; |
||||
} = {}; |
||||
|
||||
// If client sent a pre-signed commit signature event (from NIP-07), use it
|
||||
if (commitSignatureEvent && commitSignatureEvent.sig && commitSignatureEvent.id) { |
||||
signingOptions.commitSignatureEvent = commitSignatureEvent; |
||||
} else if (nip98Event) { |
||||
signingOptions.nip98Event = nip98Event; |
||||
} |
||||
// Note: useNIP07 is no longer used since signing happens client-side
|
||||
// Explicitly ignore nsecKey from client requests - it's a security risk
|
||||
// Server-side signing is not recommended - commits should be signed by their authors
|
||||
if (nsecKey) { |
||||
// Security: Log warning but never log the actual key value
|
||||
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown'; |
||||
logger.warn({ clientIp, npub, repo }, '[SECURITY] Client attempted to send nsecKey in request. This is not allowed for security reasons.'); |
||||
auditLogger.log({ |
||||
user: userPubkeyHex || undefined, |
||||
ip: clientIp, |
||||
action: 'auth_attempt', |
||||
resource: 'file_operation', |
||||
result: 'failure', |
||||
error: 'Client attempted to send private key in request body', |
||||
metadata: { reason: 'security_violation' } |
||||
}); |
||||
} |
||||
|
||||
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown'; |
||||
|
||||
if (action === 'delete') { |
||||
try { |
||||
// Get default branch if not provided
|
||||
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo); |
||||
|
||||
await fileManager.deleteFile( |
||||
npub, |
||||
repo, |
||||
path, |
||||
commitMessage, |
||||
finalAuthorName, |
||||
finalAuthorEmail, |
||||
targetBranch, |
||||
Object.keys(signingOptions).length > 0 ? signingOptions : undefined |
||||
); |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex, |
||||
clientIp, |
||||
'delete', |
||||
`${npub}/${repo}`, |
||||
path, |
||||
'success' |
||||
); |
||||
return json({ success: true, message: 'File deleted and committed' }); |
||||
} catch (err) { |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex, |
||||
clientIp, |
||||
'delete', |
||||
`${npub}/${repo}`, |
||||
path, |
||||
'failure', |
||||
err instanceof Error ? err.message : String(err) |
||||
); |
||||
throw err; |
||||
} |
||||
} else if (action === 'create' || content !== undefined) { |
||||
if (content === undefined) { |
||||
return error(400, 'Content is required for create/update operations'); |
||||
} |
||||
try { |
||||
// Get default branch if not provided
|
||||
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo); |
||||
|
||||
await fileManager.writeFile( |
||||
npub, |
||||
repo, |
||||
path, |
||||
content, |
||||
commitMessage, |
||||
finalAuthorName, |
||||
finalAuthorEmail, |
||||
targetBranch, |
||||
Object.keys(signingOptions).length > 0 ? signingOptions : undefined |
||||
); |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex, |
||||
clientIp, |
||||
action === 'create' ? 'create' : 'write', |
||||
`${npub}/${repo}`, |
||||
path, |
||||
'success' |
||||
); |
||||
return json({ success: true, message: 'File saved and committed' }); |
||||
} catch (err) { |
||||
auditLogger.logFileOperation( |
||||
userPubkeyHex, |
||||
clientIp, |
||||
action === 'create' ? 'create' : 'write', |
||||
`${npub}/${repo}`, |
||||
path, |
||||
'failure', |
||||
err instanceof Error ? err.message : String(err) |
||||
); |
||||
throw err; |
||||
} |
||||
} else { |
||||
return error(400, 'Invalid action or missing content'); |
||||
} |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'writeFile', npub, repo, filePath: path }, 'Failed to write file'); |
||||
} |
||||
}; |
||||
File diff suppressed because it is too large
Load Diff
@ -1,543 +0,0 @@
@@ -1,543 +0,0 @@
|
||||
/** |
||||
* API endpoint for forking repositories |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { DEFAULT_NOSTR_RELAYS, combineRelays, getGitUrl } from '$lib/config.js'; |
||||
import { getUserRelays } from '$lib/services/nostr/user-relays.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { KIND, type NostrEvent } from '$lib/types/nostr.js'; |
||||
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js'; |
||||
import { nip19 } from 'nostr-tools'; |
||||
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js'; |
||||
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js'; |
||||
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js'; |
||||
import { existsSync } from 'fs'; |
||||
import { rm } from 'fs/promises'; |
||||
import { join, resolve } from 'path'; |
||||
import simpleGit from 'simple-git'; |
||||
import { isValidBranchName, validateRepoPath } from '$lib/utils/security.js'; |
||||
import { ResourceLimits } from '$lib/services/security/resource-limits.js'; |
||||
import { auditLogger } from '$lib/services/security/audit-logger.js'; |
||||
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js'; |
||||
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js'; |
||||
import { hasUnlimitedAccess } from '$lib/utils/user-access.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { handleApiError, handleValidationError, handleNotFoundError, handleAuthorizationError } from '$lib/utils/error-handler.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
|
||||
import { repoManager, nostrClient, forkCountService } from '$lib/services/service-registry.js'; |
||||
|
||||
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
|
||||
const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos'; |
||||
const repoRoot = resolve(repoRootEnv); |
||||
const resourceLimits = new ResourceLimits(repoRoot); |
||||
|
||||
/** |
||||
* Retry publishing an event with exponential backoff |
||||
* Attempts up to 3 times with delays: 1s, 2s, 4s |
||||
*/ |
||||
async function publishEventWithRetry( |
||||
event: NostrEvent, |
||||
relays: string[], |
||||
eventName: string, |
||||
maxAttempts: number = 3, |
||||
context?: string |
||||
): Promise<{ success: string[]; failed: Array<{ relay: string; error: string }> }> { |
||||
let lastResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
|
||||
// Extract context from event if available (for better logging)
|
||||
const eventId = event.id.slice(0, 8); |
||||
const logContext = context || `[event:${eventId}]`; |
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) { |
||||
logger.info({ logContext, eventName, attempt, maxAttempts }, `[Fork] Publishing ${eventName} - Attempt ${attempt}/${maxAttempts}...`); |
||||
|
||||
lastResult = await nostrClient.publishEvent(event, relays); |
||||
|
||||
if (lastResult.success.length > 0) { |
||||
logger.info({ logContext, eventName, successCount: lastResult.success.length, relays: lastResult.success }, `[Fork] ${eventName} published successfully`); |
||||
if (lastResult.failed.length > 0) { |
||||
logger.warn({ logContext, eventName, failed: lastResult.failed }, `[Fork] Some relays failed`); |
||||
} |
||||
return lastResult; |
||||
} |
||||
|
||||
if (attempt < maxAttempts) { |
||||
const delayMs = Math.pow(2, attempt - 1) * 1000; // 1s, 2s, 4s
|
||||
logger.warn({ logContext, eventName, attempt, delayMs, failed: lastResult.failed }, `[Fork] ${eventName} failed on attempt ${attempt}. Retrying...`); |
||||
await new Promise(resolve => setTimeout(resolve, delayMs)); |
||||
} |
||||
} |
||||
|
||||
// All attempts failed
|
||||
logger.error({ logContext, eventName, maxAttempts, failed: lastResult?.failed }, `[Fork] ${eventName} failed after ${maxAttempts} attempts`); |
||||
return lastResult!; |
||||
} |
||||
|
||||
/** |
||||
* POST - Fork a repository |
||||
* Body: { userPubkey, forkName? } |
||||
*/ |
||||
export const POST: RequestHandler = async ({ params, request }) => { |
||||
const { npub, repo } = params; |
||||
|
||||
if (!npub || !repo) { |
||||
return error(400, 'Missing npub or repo parameter'); |
||||
} |
||||
|
||||
try { |
||||
const body = await request.json(); |
||||
const { userPubkey, forkName, localOnly } = body; |
||||
|
||||
if (!userPubkey) { |
||||
return error(401, 'Authentication required. Please provide userPubkey.'); |
||||
} |
||||
|
||||
// Validate localOnly parameter
|
||||
const isLocalOnly = localOnly === true; |
||||
|
||||
// Decode original repo owner npub
|
||||
let originalOwnerPubkey: string; |
||||
try { |
||||
originalOwnerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// Decode user pubkey if needed (must be done before using it)
|
||||
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey; |
||||
|
||||
// Convert to npub for resource check and path construction
|
||||
const userNpub = nip19.npubEncode(userPubkeyHex); |
||||
|
||||
// Determine fork name (use original name if not specified)
|
||||
const forkRepoName = forkName || repo; |
||||
|
||||
// Check if user has unlimited access (required for storing repos locally)
|
||||
const userLevel = getCachedUserLevel(userPubkeyHex); |
||||
if (!hasUnlimitedAccess(userLevel?.level)) { |
||||
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${npub}/${repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'failure', |
||||
'User does not have unlimited access' |
||||
); |
||||
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.'); |
||||
} |
||||
|
||||
// Check resource limits before forking
|
||||
const resourceCheck = await resourceLimits.canCreateRepo(userNpub); |
||||
if (!resourceCheck.allowed) { |
||||
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${npub}/${repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'failure', |
||||
resourceCheck.reason |
||||
); |
||||
return error(403, resourceCheck.reason || 'Resource limit exceeded'); |
||||
} |
||||
|
||||
// Check if original repo exists
|
||||
const originalRepoPath = join(repoRoot, npub, `${repo}.git`); |
||||
// Security: Ensure resolved path is within repoRoot
|
||||
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot); |
||||
if (!originalPathValidation.valid) { |
||||
return error(403, originalPathValidation.error || 'Invalid repository path'); |
||||
} |
||||
if (!existsSync(originalRepoPath)) { |
||||
return error(404, 'Original repository not found'); |
||||
} |
||||
|
||||
// Get original repo announcement (case-insensitive) with caching
|
||||
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache); |
||||
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, repo); |
||||
|
||||
if (!originalAnnouncement) { |
||||
return error(404, 'Original repository announcement not found'); |
||||
} |
||||
|
||||
// Check if fork already exists
|
||||
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`); |
||||
// Security: Ensure resolved path is within repoRoot
|
||||
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot); |
||||
if (!forkPathValidation.valid) { |
||||
return error(403, forkPathValidation.error || 'Invalid fork repository path'); |
||||
} |
||||
if (existsSync(forkRepoPath)) { |
||||
return error(409, 'Fork already exists'); |
||||
} |
||||
|
||||
// Clone the repository using simple-git (safer than shell commands)
|
||||
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${npub}/${repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'success' |
||||
); |
||||
|
||||
const git = simpleGit(); |
||||
await git.clone(originalRepoPath, forkRepoPath, ['--bare']); |
||||
|
||||
// Invalidate resource limit cache after creating repo
|
||||
resourceLimits.invalidateCache(userNpub); |
||||
|
||||
// Create fork announcement
|
||||
const gitDomain = process.env.GIT_DOMAIN || 'localhost:6543'; |
||||
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1'); |
||||
const protocol = isLocalhost ? 'http' : 'https'; |
||||
const forkGitUrl = `${protocol}://${gitDomain}/${userNpub}/${forkRepoName}.git`; |
||||
|
||||
// Get Tor .onion URL if available
|
||||
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js'); |
||||
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName); |
||||
|
||||
// Extract original clone URLs and earliest unique commit
|
||||
const originalCloneUrls = originalAnnouncement.tags |
||||
.filter(t => t[0] === 'clone') |
||||
.flatMap(t => t.slice(1)) |
||||
.filter(url => url && typeof url === 'string') |
||||
.filter(url => { |
||||
// Exclude our domain and .onion URLs (we'll add our own if available)
|
||||
if (url.includes(gitDomain)) return false; |
||||
if (url.includes('.onion')) return false; |
||||
return true; |
||||
}) as string[]; |
||||
|
||||
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc'); |
||||
const earliestCommit = earliestCommitTag?.[1]; |
||||
|
||||
// Get original repo name and description
|
||||
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || repo; |
||||
const originalDescription = originalAnnouncement.tags.find(t => t[0] === 'description')?.[1] || ''; |
||||
|
||||
// Build clone URLs for fork - NEVER include localhost, only include public domain or Tor .onion
|
||||
const forkCloneUrls: string[] = []; |
||||
|
||||
// Add our domain URL only if it's NOT localhost (explicitly check the URL)
|
||||
if (!isLocalhost && !forkGitUrl.includes('localhost') && !forkGitUrl.includes('127.0.0.1')) { |
||||
forkCloneUrls.push(forkGitUrl); |
||||
} |
||||
|
||||
// Add Tor .onion URL if available
|
||||
if (torOnionUrl) { |
||||
forkCloneUrls.push(torOnionUrl); |
||||
} |
||||
|
||||
// Add original clone URLs
|
||||
forkCloneUrls.push(...originalCloneUrls); |
||||
|
||||
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
|
||||
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) { |
||||
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.'); |
||||
} |
||||
|
||||
// Preserve visibility and project-relay from original repo
|
||||
const originalVisibility = getVisibility(originalAnnouncement); |
||||
const originalProjectRelays = getProjectRelays(originalAnnouncement); |
||||
|
||||
// Build fork announcement tags
|
||||
// Use standardized fork tag: ['fork', '30617:pubkey:d-tag']
|
||||
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${repo}`; |
||||
const tags: string[][] = [ |
||||
['d', forkRepoName], |
||||
['name', `${originalName} (fork)`], |
||||
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`], |
||||
['clone', ...forkCloneUrls], |
||||
['relays', ...DEFAULT_NOSTR_RELAYS], |
||||
['fork', originalRepoTag], // Standardized fork tag format
|
||||
['p', originalOwnerPubkey], // Original owner
|
||||
]; |
||||
|
||||
// Local-only forks are always private and marked as synthetic
|
||||
if (isLocalOnly) { |
||||
tags.push(['visibility', 'private']); |
||||
tags.push(['local-only', 'true']); // Mark as synthetic/local-only
|
||||
} else { |
||||
// Preserve visibility from original repo (defaults to public if not set)
|
||||
if (originalVisibility !== 'public') { |
||||
tags.push(['visibility', originalVisibility]); |
||||
} |
||||
} |
||||
|
||||
// Preserve project-relay tags from original repo
|
||||
for (const relay of originalProjectRelays) { |
||||
tags.push(['project-relay', relay]); |
||||
} |
||||
|
||||
// Add earliest unique commit if available
|
||||
if (earliestCommit) { |
||||
tags.push(['r', earliestCommit, 'euc']); |
||||
} |
||||
|
||||
// Create fork announcement event
|
||||
const forkAnnouncementTemplate = { |
||||
kind: KIND.REPO_ANNOUNCEMENT, |
||||
pubkey: userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: '', |
||||
tags |
||||
}; |
||||
|
||||
// Sign fork announcement
|
||||
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate); |
||||
|
||||
// Security: Truncate npub in logs and create context (must be before use)
|
||||
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub; |
||||
const truncatedOriginalNpub = npub.length > 16 ? `${npub.slice(0, 12)}...` : npub; |
||||
const context = `[${truncatedOriginalNpub}/${repo} → ${truncatedNpub}/${forkRepoName}]`; |
||||
|
||||
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
let signedOwnershipEvent: NostrEvent | null = null; |
||||
|
||||
if (isLocalOnly) { |
||||
// Local-only fork: Skip publishing to Nostr relays
|
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)'); |
||||
publishResult = { success: [], failed: [] }; |
||||
ownershipPublishResult = { success: [], failed: [] }; |
||||
|
||||
// For local-only forks, create a synthetic ownership event (not published)
|
||||
const ownershipService = new OwnershipTransferService([]); |
||||
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName); |
||||
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent); |
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Created synthetic ownership event for local-only fork'); |
||||
} else { |
||||
// Public fork: Publish to Nostr relays
|
||||
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient); |
||||
const combinedRelays = combineRelays(outbox); |
||||
|
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length, relays: combinedRelays }, 'Starting fork process'); |
||||
|
||||
publishResult = await publishEventWithRetry( |
||||
signedForkAnnouncement, |
||||
combinedRelays, |
||||
'fork announcement', |
||||
3, |
||||
context |
||||
); |
||||
|
||||
if (publishResult.success.length === 0) { |
||||
// Clean up repo if announcement failed
|
||||
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.'); |
||||
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {}); |
||||
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`; |
||||
return json({ |
||||
success: false, |
||||
error: 'Failed to publish fork announcement to relays after 3 attempts', |
||||
details: errorDetails, |
||||
eventName: 'fork announcement' |
||||
}, { status: 500 }); |
||||
} |
||||
|
||||
// Create and publish initial ownership proof (self-transfer event)
|
||||
// This MUST succeed for the fork to be valid - without it, there's no proof of ownership on Nostr
|
||||
const ownershipService = new OwnershipTransferService(combinedRelays); |
||||
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName); |
||||
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent); |
||||
|
||||
ownershipPublishResult = await publishEventWithRetry( |
||||
signedOwnershipEvent, |
||||
combinedRelays, |
||||
'ownership transfer event', |
||||
3, |
||||
context |
||||
); |
||||
|
||||
if (ownershipPublishResult.success.length === 0) { |
||||
// Clean up repo if ownership proof failed
|
||||
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository and publishing deletion request.'); |
||||
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {}); |
||||
|
||||
// Publish deletion request (NIP-09) for the announcement since it's invalid without ownership proof
|
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...'); |
||||
const deletionRequest = { |
||||
kind: KIND.DELETION_REQUEST, // NIP-09: Event Deletion Request
|
||||
pubkey: userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.', |
||||
tags: [ |
||||
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`], // Reference to the repo announcement
|
||||
['k', KIND.REPO_ANNOUNCEMENT.toString()] // Kind of event being deleted
|
||||
] |
||||
}; |
||||
|
||||
const signedDeletionRequest = await signEventWithNIP07(deletionRequest); |
||||
const deletionResult = await publishEventWithRetry( |
||||
signedDeletionRequest, |
||||
combinedRelays, |
||||
'deletion request', |
||||
3, |
||||
context |
||||
); |
||||
|
||||
if (deletionResult.success.length > 0) { |
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Deletion request published successfully'); |
||||
} else { |
||||
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: deletionResult.failed }, 'Failed to publish deletion request'); |
||||
} |
||||
|
||||
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`; |
||||
return json({ |
||||
success: false, |
||||
error: 'Failed to publish ownership transfer event to relays after 3 attempts', |
||||
details: errorDetails, |
||||
eventName: 'ownership transfer event' |
||||
}, { status: 500 }); |
||||
} |
||||
} |
||||
|
||||
// Provision the fork repo (this will create verification file and include self-transfer)
|
||||
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...'); |
||||
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false); |
||||
|
||||
// Save fork announcement to repo (offline papertrail) in nostr/repo-events.jsonl
|
||||
try { |
||||
const { fileManager } = await import('$lib/services/service-registry.js'); |
||||
|
||||
// Save to repo if it exists locally (should exist after provisioning)
|
||||
if (fileManager.repoExists(userNpub, forkRepoName)) { |
||||
// Get worktree to save to repo-events.jsonl
|
||||
const defaultBranch = await fileManager.getDefaultBranch(userNpub, forkRepoName).catch(() => 'main'); |
||||
const repoPath = fileManager.getRepoPath(userNpub, forkRepoName); |
||||
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, userNpub, forkRepoName); |
||||
|
||||
// Save to repo-events.jsonl
|
||||
await fileManager.saveRepoEventToWorktree(workDir, signedForkAnnouncement as NostrEvent, 'announcement').catch(err => { |
||||
logger.debug({ error: err }, 'Failed to save fork announcement to repo-events.jsonl'); |
||||
}); |
||||
|
||||
// Stage and commit the file
|
||||
const workGit = simpleGit(workDir); |
||||
await workGit.add(['nostr/repo-events.jsonl']); |
||||
await workGit.commit( |
||||
`Add fork repository announcement: ${signedForkAnnouncement.id.slice(0, 16)}...`, |
||||
['nostr/repo-events.jsonl'], |
||||
{ |
||||
'--author': `Nostr <${userPubkeyHex}@nostr>` |
||||
} |
||||
); |
||||
|
||||
// Clean up worktree
|
||||
await fileManager.removeWorktree(repoPath, workDir).catch(err => { |
||||
logger.debug({ error: err }, 'Failed to remove worktree after saving fork announcement'); |
||||
}); |
||||
} |
||||
} catch (err) { |
||||
// Log but don't fail - publishing to relays is more important
|
||||
logger.warn({ error: err, npub: userNpub, repo: forkRepoName }, 'Failed to save fork announcement to repo'); |
||||
} |
||||
|
||||
logger.info({ |
||||
operation: 'fork', |
||||
originalRepo: `${npub}/${repo}`, |
||||
forkRepo: `${userNpub}/${forkRepoName}`, |
||||
localOnly: isLocalOnly, |
||||
announcementId: signedForkAnnouncement.id, |
||||
ownershipTransferId: signedOwnershipEvent?.id, |
||||
announcementRelays: publishResult?.success.length || 0, |
||||
ownershipRelays: ownershipPublishResult?.success.length || 0 |
||||
}, 'Fork completed successfully'); |
||||
|
||||
const message = isLocalOnly |
||||
? 'Local-only fork created successfully! This fork is private and only exists on this server.' |
||||
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`; |
||||
|
||||
return json({ |
||||
success: true, |
||||
fork: { |
||||
npub: userNpub, |
||||
repo: forkRepoName, |
||||
url: forkGitUrl, |
||||
localOnly: isLocalOnly, |
||||
announcementId: signedForkAnnouncement.id, |
||||
ownershipTransferId: signedOwnershipEvent?.id, |
||||
publishedTo: isLocalOnly ? null : { |
||||
announcement: publishResult?.success.length || 0, |
||||
ownershipTransfer: ownershipPublishResult?.success.length || 0 |
||||
} |
||||
}, |
||||
message |
||||
}); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'fork', npub, repo }, 'Failed to fork repository'); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* GET - Get fork information |
||||
* Returns whether this is a fork and what it's forked from |
||||
*/ |
||||
export const GET: RequestHandler = async ({ params }) => { |
||||
const { npub, repo } = params; |
||||
|
||||
if (!npub || !repo) { |
||||
return error(400, 'Missing npub or repo parameter'); |
||||
} |
||||
|
||||
try { |
||||
// Decode repo owner npub
|
||||
let ownerPubkey: string; |
||||
try { |
||||
ownerPubkey = requireNpubHex(npub); |
||||
} catch { |
||||
return error(400, 'Invalid npub format'); |
||||
} |
||||
|
||||
// Get repo announcement (case-insensitive) with caching
|
||||
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, ownerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allAnnouncements, repo); |
||||
|
||||
if (!announcement) { |
||||
return error(404, 'Repository announcement not found'); |
||||
} |
||||
|
||||
// announcement is already set above
|
||||
const isFork = announcement.tags.some(t => t[0] === 't' && t[1] === 'fork'); |
||||
|
||||
// Get original repo reference
|
||||
const originalRepoTag = announcement.tags.find(t => t[0] === 'a' && t[1]?.startsWith(`${KIND.REPO_ANNOUNCEMENT}:`)); |
||||
const originalOwnerTag = announcement.tags.find(t => t[0] === 'p' && t[1] !== ownerPubkey); |
||||
|
||||
let originalRepo: { npub: string; repo: string } | null = null; |
||||
if (originalRepoTag && originalRepoTag[1]) { |
||||
const match = originalRepoTag[1].match(new RegExp(`^${KIND.REPO_ANNOUNCEMENT}:([a-f0-9]{64}):(.+)$`)); |
||||
if (match) { |
||||
const [, originalOwnerPubkey, originalRepoName] = match; |
||||
try { |
||||
const originalNpub = nip19.npubEncode(originalOwnerPubkey); |
||||
originalRepo = { npub: originalNpub, repo: originalRepoName }; |
||||
} catch { |
||||
// Invalid pubkey
|
||||
} |
||||
} |
||||
} |
||||
|
||||
// Get fork count for this repo
|
||||
let forkCount = 0; |
||||
if (!isFork && ownerPubkey && repo) { |
||||
try { |
||||
forkCount = await forkCountService.getForkCount(ownerPubkey, repo); |
||||
} catch (err) { |
||||
// Log but don't fail the request
|
||||
const context = npub && repo ? `[${npub}/${repo}]` : '[unknown]'; |
||||
logger.warn({ error: err, npub, repo }, `[Fork] ${context} Failed to get fork count`); |
||||
} |
||||
} |
||||
|
||||
return json({ |
||||
isFork, |
||||
originalRepo, |
||||
forkCount |
||||
}); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'getForkInfo', npub, repo }, 'Failed to get fork information'); |
||||
} |
||||
}; |
||||
@ -0,0 +1,457 @@
@@ -0,0 +1,457 @@
|
||||
/** |
||||
* RESTful Forks Resource Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/forks # List forks / Get fork info |
||||
* POST /api/repos/{npub}/{repo}/forks # Create fork (fork this repo) |
||||
*/ |
||||
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleApiError } from '$lib/utils/error-handler.js'; |
||||
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js'; |
||||
import { getUserRelays } from '$lib/services/nostr/user-relays.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { KIND, type NostrEvent } from '$lib/types/nostr.js'; |
||||
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js'; |
||||
import { nip19 } from 'nostr-tools'; |
||||
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js'; |
||||
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js'; |
||||
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js'; |
||||
import { existsSync } from 'fs'; |
||||
import { rm } from 'fs/promises'; |
||||
import { join, resolve } from 'path'; |
||||
import simpleGit from 'simple-git'; |
||||
import { validateRepoPath } from '$lib/utils/security.js'; |
||||
import { ResourceLimits } from '$lib/services/security/resource-limits.js'; |
||||
import { auditLogger } from '$lib/services/security/audit-logger.js'; |
||||
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js'; |
||||
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js'; |
||||
import { hasUnlimitedAccess } from '$lib/utils/user-access.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
import { repoManager, nostrClient, forkCountService } from '$lib/services/service-registry.js'; |
||||
|
||||
// Resolve GIT_REPO_ROOT to absolute path
|
||||
const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos'; |
||||
const repoRoot = resolve(repoRootEnv); |
||||
const resourceLimits = new ResourceLimits(repoRoot); |
||||
|
||||
/** |
||||
* Retry publishing an event with exponential backoff |
||||
*/ |
||||
async function publishEventWithRetry( |
||||
event: NostrEvent, |
||||
relays: string[], |
||||
eventName: string, |
||||
maxAttempts: number = 3, |
||||
context?: string |
||||
): Promise<{ success: string[]; failed: Array<{ relay: string; error: string }> }> { |
||||
let lastResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
const eventId = event.id.slice(0, 8); |
||||
const logContext = context || `[event:${eventId}]`; |
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) { |
||||
logger.info({ logContext, eventName, attempt, maxAttempts }, `[Fork] Publishing ${eventName} - Attempt ${attempt}/${maxAttempts}...`); |
||||
|
||||
lastResult = await nostrClient.publishEvent(event, relays); |
||||
|
||||
if (lastResult.success.length > 0) { |
||||
logger.info({ logContext, eventName, successCount: lastResult.success.length }, `[Fork] ${eventName} published successfully`); |
||||
return lastResult; |
||||
} |
||||
|
||||
if (attempt < maxAttempts) { |
||||
const delayMs = Math.pow(2, attempt - 1) * 1000; |
||||
logger.warn({ logContext, eventName, attempt, delayMs }, `[Fork] ${eventName} failed on attempt ${attempt}. Retrying...`); |
||||
await new Promise(resolve => setTimeout(resolve, delayMs)); |
||||
} |
||||
} |
||||
|
||||
logger.error({ logContext, eventName, maxAttempts }, `[Fork] ${eventName} failed after ${maxAttempts} attempts`); |
||||
return lastResult!; |
||||
} |
||||
|
||||
/** |
||||
* GET: Get fork information |
||||
* Returns whether this repo is a fork and original repo info |
||||
*/ |
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext) => { |
||||
try { |
||||
// Get repo announcement (case-insensitive) with caching
|
||||
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allAnnouncements, context.repo); |
||||
|
||||
if (!announcement) { |
||||
return error(404, 'Repository announcement not found'); |
||||
} |
||||
|
||||
// Check if this is a fork
|
||||
const isFork = announcement.tags.some(t => t[0] === 'fork'); |
||||
|
||||
// Get original repo reference
|
||||
const originalRepoTag = announcement.tags.find(t => t[0] === 'fork'); |
||||
let originalRepo: { npub: string; repo: string } | null = null; |
||||
|
||||
if (originalRepoTag && originalRepoTag[1]) { |
||||
const match = originalRepoTag[1].match(new RegExp(`^${KIND.REPO_ANNOUNCEMENT}:([a-f0-9]{64}):(.+)$`)); |
||||
if (match) { |
||||
const [, originalOwnerPubkey, originalRepoName] = match; |
||||
try { |
||||
const originalNpub = nip19.npubEncode(originalOwnerPubkey); |
||||
originalRepo = { npub: originalNpub, repo: originalRepoName }; |
||||
} catch { |
||||
// Invalid pubkey
|
||||
} |
||||
} |
||||
} |
||||
|
||||
// Get fork count for this repo (if not a fork itself)
|
||||
let forkCount = 0; |
||||
if (!isFork && context.repoOwnerPubkey && context.repo) { |
||||
try { |
||||
forkCount = await forkCountService.getForkCount(context.repoOwnerPubkey, context.repo); |
||||
} catch (err) { |
||||
logger.warn({ error: err, npub: context.npub, repo: context.repo }, 'Failed to get fork count'); |
||||
} |
||||
} |
||||
|
||||
return json({ |
||||
isFork, |
||||
originalRepo, |
||||
forkCount |
||||
}); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'getForkInfo', npub: context.npub, repo: context.repo }, 'Failed to get fork information'); |
||||
} |
||||
}, |
||||
{ operation: 'getForkInfo', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
|
||||
/** |
||||
* POST: Create fork |
||||
* Body: { userPubkey, forkName?, localOnly? } |
||||
*/ |
||||
export const POST: RequestHandler = createRepoPostHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
try { |
||||
const body = await event.request.json(); |
||||
const { userPubkey, forkName, localOnly } = body; |
||||
|
||||
if (!userPubkey) { |
||||
return error(401, 'Authentication required. Please provide userPubkey.'); |
||||
} |
||||
|
||||
const isLocalOnly = localOnly === true; |
||||
const originalOwnerPubkey = context.repoOwnerPubkey; |
||||
|
||||
// Decode user pubkey
|
||||
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey; |
||||
const userNpub = nip19.npubEncode(userPubkeyHex); |
||||
|
||||
// Determine fork name
|
||||
const forkRepoName = forkName || context.repo; |
||||
|
||||
// Check if user has unlimited access
|
||||
const userLevel = getCachedUserLevel(userPubkeyHex); |
||||
if (!hasUnlimitedAccess(userLevel?.level)) { |
||||
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${context.npub}/${context.repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'failure', |
||||
'User does not have unlimited access' |
||||
); |
||||
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.'); |
||||
} |
||||
|
||||
// Check resource limits
|
||||
const resourceCheck = await resourceLimits.canCreateRepo(userNpub); |
||||
if (!resourceCheck.allowed) { |
||||
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${context.npub}/${context.repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'failure', |
||||
resourceCheck.reason |
||||
); |
||||
return error(403, resourceCheck.reason || 'Resource limit exceeded'); |
||||
} |
||||
|
||||
// Check if original repo exists
|
||||
const originalRepoPath = join(repoRoot, context.npub, `${context.repo}.git`); |
||||
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot); |
||||
if (!originalPathValidation.valid) { |
||||
return error(403, originalPathValidation.error || 'Invalid repository path'); |
||||
} |
||||
if (!existsSync(originalRepoPath)) { |
||||
return error(404, 'Original repository not found'); |
||||
} |
||||
|
||||
// Get original repo announcement
|
||||
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache); |
||||
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, context.repo); |
||||
|
||||
if (!originalAnnouncement) { |
||||
return error(404, 'Original repository announcement not found'); |
||||
} |
||||
|
||||
// Check if fork already exists
|
||||
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`); |
||||
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot); |
||||
if (!forkPathValidation.valid) { |
||||
return error(403, forkPathValidation.error || 'Invalid fork repository path'); |
||||
} |
||||
if (existsSync(forkRepoPath)) { |
||||
return error(409, 'Fork already exists'); |
||||
} |
||||
|
||||
// Clone the repository
|
||||
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown'; |
||||
auditLogger.logRepoFork( |
||||
userPubkeyHex, |
||||
`${context.npub}/${context.repo}`, |
||||
`${userNpub}/${forkRepoName}`, |
||||
'success' |
||||
); |
||||
|
||||
const git = simpleGit(); |
||||
await git.clone(originalRepoPath, forkRepoPath, ['--bare']); |
||||
|
||||
// Invalidate resource limit cache
|
||||
resourceLimits.invalidateCache(userNpub); |
||||
|
||||
// Create fork announcement
|
||||
const gitDomain = process.env.GIT_DOMAIN || 'localhost:6543'; |
||||
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1'); |
||||
const protocol = isLocalhost ? 'http' : 'https'; |
||||
const forkGitUrl = `${protocol}://${gitDomain}/${userNpub}/${forkRepoName}.git`; |
||||
|
||||
// Get Tor .onion URL if available
|
||||
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js'); |
||||
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName); |
||||
|
||||
// Extract original clone URLs
|
||||
const originalCloneUrls = originalAnnouncement.tags |
||||
.filter(t => t[0] === 'clone') |
||||
.flatMap(t => t.slice(1)) |
||||
.filter(url => url && typeof url === 'string') |
||||
.filter(url => { |
||||
if (url.includes(gitDomain)) return false; |
||||
if (url.includes('.onion')) return false; |
||||
return true; |
||||
}) as string[]; |
||||
|
||||
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc'); |
||||
const earliestCommit = earliestCommitTag?.[1]; |
||||
|
||||
// Get original repo name and description
|
||||
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || context.repo; |
||||
const originalDescription = originalAnnouncement.tags.find(t => t[0] === 'description')?.[1] || ''; |
||||
|
||||
// Build clone URLs for fork
|
||||
const forkCloneUrls: string[] = []; |
||||
|
||||
if (!isLocalhost && !forkGitUrl.includes('localhost') && !forkGitUrl.includes('127.0.0.1')) { |
||||
forkCloneUrls.push(forkGitUrl); |
||||
} |
||||
|
||||
if (torOnionUrl) { |
||||
forkCloneUrls.push(torOnionUrl); |
||||
} |
||||
|
||||
forkCloneUrls.push(...originalCloneUrls); |
||||
|
||||
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
|
||||
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) { |
||||
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.'); |
||||
} |
||||
|
||||
// Preserve visibility and project-relay from original repo
|
||||
const originalVisibility = getVisibility(originalAnnouncement); |
||||
const originalProjectRelays = getProjectRelays(originalAnnouncement); |
||||
|
||||
// Build fork announcement tags
|
||||
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${context.repo}`; |
||||
const tags: string[][] = [ |
||||
['d', forkRepoName], |
||||
['name', `${originalName} (fork)`], |
||||
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`], |
||||
['clone', ...forkCloneUrls], |
||||
['relays', ...DEFAULT_NOSTR_RELAYS], |
||||
['fork', originalRepoTag], |
||||
['p', originalOwnerPubkey], |
||||
]; |
||||
|
||||
// Local-only forks are always private
|
||||
if (isLocalOnly) { |
||||
tags.push(['visibility', 'private']); |
||||
tags.push(['local-only', 'true']); |
||||
} else { |
||||
if (originalVisibility !== 'public') { |
||||
tags.push(['visibility', originalVisibility]); |
||||
} |
||||
} |
||||
|
||||
// Preserve project-relay tags
|
||||
for (const relay of originalProjectRelays) { |
||||
tags.push(['project-relay', relay]); |
||||
} |
||||
|
||||
// Add earliest unique commit if available
|
||||
if (earliestCommit) { |
||||
tags.push(['r', earliestCommit, 'euc']); |
||||
} |
||||
|
||||
// Create fork announcement event
|
||||
const forkAnnouncementTemplate = { |
||||
kind: KIND.REPO_ANNOUNCEMENT, |
||||
pubkey: userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: '', |
||||
tags |
||||
}; |
||||
|
||||
// Sign fork announcement
|
||||
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate); |
||||
|
||||
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub; |
||||
const truncatedOriginalNpub = context.npub.length > 16 ? `${context.npub.slice(0, 12)}...` : context.npub; |
||||
const logContext = `[${truncatedOriginalNpub}/${context.repo} → ${truncatedNpub}/${forkRepoName}]`; |
||||
|
||||
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null; |
||||
let signedOwnershipEvent: NostrEvent | null = null; |
||||
|
||||
if (isLocalOnly) { |
||||
// Local-only fork: Skip publishing to Nostr relays
|
||||
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)'); |
||||
publishResult = { success: [], failed: [] }; |
||||
ownershipPublishResult = { success: [], failed: [] }; |
||||
|
||||
// Create synthetic ownership event
|
||||
const ownershipService = new OwnershipTransferService([]); |
||||
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName); |
||||
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent); |
||||
} else { |
||||
// Public fork: Publish to Nostr relays
|
||||
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient); |
||||
const combinedRelays = combineRelays(outbox); |
||||
|
||||
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length }, 'Starting fork process'); |
||||
|
||||
publishResult = await publishEventWithRetry( |
||||
signedForkAnnouncement, |
||||
combinedRelays, |
||||
'fork announcement', |
||||
3, |
||||
logContext |
||||
); |
||||
|
||||
if (publishResult.success.length === 0) { |
||||
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.'); |
||||
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {}); |
||||
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`; |
||||
return json({ |
||||
success: false, |
||||
error: 'Failed to publish fork announcement to relays after 3 attempts', |
||||
details: errorDetails, |
||||
eventName: 'fork announcement' |
||||
}, { status: 500 }); |
||||
} |
||||
|
||||
// Create and publish initial ownership proof
|
||||
const ownershipService = new OwnershipTransferService(combinedRelays); |
||||
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName); |
||||
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent); |
||||
|
||||
ownershipPublishResult = await publishEventWithRetry( |
||||
signedOwnershipEvent, |
||||
combinedRelays, |
||||
'ownership transfer event', |
||||
3, |
||||
logContext |
||||
); |
||||
|
||||
if (ownershipPublishResult.success.length === 0) { |
||||
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository.'); |
||||
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {}); |
||||
|
||||
// Publish deletion request (NIP-09)
|
||||
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...'); |
||||
const deletionRequest = { |
||||
kind: KIND.DELETION_REQUEST, |
||||
pubkey: userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.', |
||||
tags: [ |
||||
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`], |
||||
['k', KIND.REPO_ANNOUNCEMENT.toString()] |
||||
] |
||||
}; |
||||
|
||||
const signedDeletionRequest = await signEventWithNIP07(deletionRequest); |
||||
const deletionResult = await publishEventWithRetry( |
||||
signedDeletionRequest, |
||||
combinedRelays, |
||||
'deletion request', |
||||
3, |
||||
logContext |
||||
); |
||||
|
||||
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`; |
||||
return json({ |
||||
success: false, |
||||
error: 'Failed to publish ownership transfer event to relays after 3 attempts', |
||||
details: errorDetails, |
||||
eventName: 'ownership transfer event' |
||||
}, { status: 500 }); |
||||
} |
||||
} |
||||
|
||||
// Provision the fork repo
|
||||
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...'); |
||||
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false); |
||||
|
||||
logger.info({ |
||||
operation: 'fork', |
||||
originalRepo: `${context.npub}/${context.repo}`, |
||||
forkRepo: `${userNpub}/${forkRepoName}`, |
||||
localOnly: isLocalOnly, |
||||
announcementId: signedForkAnnouncement.id, |
||||
ownershipTransferId: signedOwnershipEvent?.id, |
||||
announcementRelays: publishResult?.success.length || 0, |
||||
ownershipRelays: ownershipPublishResult?.success.length || 0 |
||||
}, 'Fork completed successfully'); |
||||
|
||||
const message = isLocalOnly |
||||
? 'Local-only fork created successfully! This fork is private and only exists on this server.' |
||||
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`; |
||||
|
||||
return json({ |
||||
success: true, |
||||
fork: { |
||||
npub: userNpub, |
||||
repo: forkRepoName, |
||||
url: forkGitUrl, |
||||
localOnly: isLocalOnly, |
||||
announcementId: signedForkAnnouncement.id, |
||||
ownershipTransferId: signedOwnershipEvent?.id, |
||||
publishedTo: isLocalOnly ? null : { |
||||
announcement: publishResult?.success.length || 0, |
||||
ownershipTransfer: ownershipPublishResult?.success.length || 0 |
||||
} |
||||
}, |
||||
message |
||||
}); |
||||
} catch (err) { |
||||
return handleApiError(err, { operation: 'createFork', npub: context.npub, repo: context.repo }, 'Failed to fork repository'); |
||||
} |
||||
}, |
||||
{ operation: 'createFork', requireRepoExists: false } |
||||
); |
||||
@ -1,31 +1,380 @@
@@ -1,31 +1,380 @@
|
||||
/** |
||||
* API endpoint for checking maintainer status |
||||
* RESTful Maintainers Resource Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/maintainers # List maintainers |
||||
* POST /api/repos/{npub}/{repo}/maintainers # Add maintainer |
||||
* DELETE /api/repos/{npub}/{repo}/maintainers/{npub} # Remove maintainer |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import { json, error } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { maintainerService } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext } from '$lib/utils/api-context.js'; |
||||
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js'; |
||||
import { nip19 } from 'nostr-tools'; |
||||
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
import { nostrClient } from '$lib/services/service-registry.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js'; |
||||
import { getUserRelays } from '$lib/services/nostr/user-relays.js'; |
||||
import { NostrClient } from '$lib/services/nostr/nostr-client.js'; |
||||
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js'; |
||||
import { AnnouncementManager } from '$lib/services/git/announcement-manager.js'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
/** |
||||
* GET: List maintainers |
||||
*/ |
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext) => { |
||||
const { maintainers, owner } = await maintainerService.getMaintainers(context.repoOwnerPubkey, context.repo); |
||||
|
||||
// Convert hex pubkeys to npubs for response
|
||||
const maintainerNpubs = maintainers.map(p => nip19.npubEncode(p)); |
||||
const ownerNpub = nip19.npubEncode(owner); |
||||
|
||||
// If userPubkey provided, check if they're a maintainer
|
||||
// SECURITY: Do NOT leak userPubkey in response - only return boolean status
|
||||
if (context.userPubkeyHex) { |
||||
const isMaintainer = maintainers.includes(context.userPubkeyHex); |
||||
return json({
|
||||
maintainers,
|
||||
owner,
|
||||
maintainers: maintainerNpubs, |
||||
owner: ownerNpub, |
||||
isMaintainer |
||||
// SECURITY: Removed userPubkey leak - client already knows their own pubkey
|
||||
}); |
||||
} |
||||
|
||||
return json({ maintainers, owner }); |
||||
return json({
|
||||
maintainers: maintainerNpubs,
|
||||
owner: ownerNpub
|
||||
}); |
||||
}, |
||||
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
|
||||
/** |
||||
* POST: Add maintainer |
||||
* Body: { maintainer: "npub..." } |
||||
*/ |
||||
export const POST: RequestHandler = createRepoPostHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
if (!context.userPubkeyHex) { |
||||
return error(401, 'Authentication required'); |
||||
} |
||||
|
||||
// Verify user is owner or maintainer
|
||||
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo); |
||||
if (!isMaintainer) { |
||||
return error(403, 'Only maintainers can add maintainers'); |
||||
} |
||||
|
||||
const body = await event.request.json(); |
||||
const { maintainer } = body; |
||||
|
||||
if (!maintainer) { |
||||
throw handleValidationError('Missing maintainer in request body', {
|
||||
operation: 'addMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Decode maintainer npub to hex
|
||||
let maintainerHex: string; |
||||
|
||||
// Try as hex first (most common case)
|
||||
if (/^[0-9a-f]{64}$/i.test(maintainer)) { |
||||
maintainerHex = maintainer.toLowerCase(); |
||||
} else { |
||||
// Try decoding as npub
|
||||
try { |
||||
const decoded = nip19.decode(maintainer) as { type: string; data: unknown }; |
||||
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') { |
||||
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
|
||||
operation: 'addMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
maintainerHex = decoded.data; |
||||
} catch (err) { |
||||
if (err instanceof Error && err.message.includes('Invalid maintainer format')) { |
||||
throw err; |
||||
} |
||||
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
|
||||
operation: 'addMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
} |
||||
|
||||
// Get current announcement
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (!announcement) { |
||||
throw handleValidationError('Repository announcement not found', {
|
||||
operation: 'addMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Get current maintainers
|
||||
const { maintainers: currentMaintainers } = await maintainerService.getMaintainers( |
||||
context.repoOwnerPubkey, |
||||
context.repo |
||||
); |
||||
|
||||
// Check if already a maintainer
|
||||
if (currentMaintainers.includes(maintainerHex)) { |
||||
return json({
|
||||
success: true,
|
||||
message: 'Maintainer already exists', |
||||
maintainer: maintainer |
||||
}); |
||||
} |
||||
|
||||
// Build updated tags
|
||||
const tags: string[][] = [...announcement.tags]; |
||||
|
||||
// Remove existing maintainers tags
|
||||
const maintainerTagIndices: number[] = []; |
||||
tags.forEach((tag, index) => { |
||||
if (tag[0] === 'maintainers') { |
||||
maintainerTagIndices.push(index); |
||||
} |
||||
}); |
||||
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) { |
||||
tags.splice(maintainerTagIndices[i], 1); |
||||
} |
||||
|
||||
// Add all maintainers (including new one)
|
||||
const allMaintainers = [...currentMaintainers, maintainerHex]; |
||||
if (allMaintainers.length > 0) { |
||||
tags.push(['maintainers', ...allMaintainers]); |
||||
} |
||||
|
||||
// Create updated event
|
||||
const updatedEvent = { |
||||
kind: KIND.REPO_ANNOUNCEMENT, |
||||
pubkey: context.userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: announcement.content || '', |
||||
tags |
||||
}; |
||||
|
||||
// Sign and publish
|
||||
const signedEvent = await signEventWithNIP07(updatedEvent); |
||||
|
||||
// Get user's relays
|
||||
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])]; |
||||
const fullRelayClient = new NostrClient(allSearchRelays); |
||||
|
||||
let userRelays: string[] = []; |
||||
try { |
||||
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient); |
||||
if (outbox.length > 0) { |
||||
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS); |
||||
} else if (inbox.length > 0) { |
||||
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS); |
||||
} else { |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
} catch (err) { |
||||
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults'); |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
|
||||
const visibilityRelays = getRelaysForEventPublishing(signedEvent); |
||||
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : []; |
||||
|
||||
if (relaysToPublish.length > 0) { |
||||
await nostrClient.publishEvent(signedEvent, relaysToPublish); |
||||
} |
||||
|
||||
// Save to repository
|
||||
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`; |
||||
const announcementManager = new AnnouncementManager(repoRoot); |
||||
try { |
||||
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent); |
||||
} catch (err) { |
||||
logger.error({ error: err }, 'Failed to save maintainer update to repository'); |
||||
} |
||||
|
||||
return json({
|
||||
success: true,
|
||||
maintainer: maintainer, |
||||
message: 'Maintainer added successfully' |
||||
}); |
||||
}, |
||||
{ operation: 'addMaintainer', requireRepoExists: false } |
||||
); |
||||
|
||||
/** |
||||
* DELETE: Remove maintainer |
||||
* Path: /api/repos/{npub}/{repo}/maintainers/{maintainerNpub} |
||||
*/ |
||||
export const DELETE: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
if (!context.userPubkeyHex) { |
||||
return error(401, 'Authentication required'); |
||||
} |
||||
|
||||
// Get maintainer npub from path
|
||||
const url = new URL(event.request.url); |
||||
const pathParts = url.pathname.split('/'); |
||||
const maintainerNpub = pathParts[pathParts.length - 1]; // Last part of path
|
||||
|
||||
if (!maintainerNpub || maintainerNpub === 'maintainers') { |
||||
throw handleValidationError('Missing maintainer npub in path', {
|
||||
operation: 'removeMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Verify user is owner or maintainer
|
||||
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo); |
||||
if (!isMaintainer) { |
||||
return error(403, 'Only maintainers can remove maintainers'); |
||||
} |
||||
|
||||
// Decode maintainer npub to hex
|
||||
let maintainerHex: string; |
||||
try { |
||||
const decoded = nip19.decode(maintainerNpub) as { type: string; data: unknown }; |
||||
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') { |
||||
throw handleValidationError('Invalid maintainer npub format', {
|
||||
operation: 'removeMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
maintainerHex = decoded.data; |
||||
} catch (err) { |
||||
if (err instanceof Error && err.message.includes('Invalid maintainer')) { |
||||
throw err; |
||||
} |
||||
throw handleValidationError('Invalid maintainer npub format', {
|
||||
operation: 'removeMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Get current maintainers
|
||||
const { maintainers: currentMaintainers, owner } = await maintainerService.getMaintainers( |
||||
context.repoOwnerPubkey, |
||||
context.repo |
||||
); |
||||
|
||||
// Cannot remove owner
|
||||
if (maintainerHex === owner) { |
||||
return error(403, 'Cannot remove repository owner from maintainers'); |
||||
} |
||||
|
||||
// Check if maintainer exists
|
||||
if (!currentMaintainers.includes(maintainerHex)) { |
||||
return json({
|
||||
success: true,
|
||||
message: 'Maintainer not found (may have already been removed)', |
||||
maintainer: maintainerNpub |
||||
}); |
||||
} |
||||
|
||||
// Get current announcement
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (!announcement) { |
||||
throw handleValidationError('Repository announcement not found', {
|
||||
operation: 'removeMaintainer',
|
||||
npub: context.npub,
|
||||
repo: context.repo
|
||||
}); |
||||
} |
||||
|
||||
// Build updated tags
|
||||
const tags: string[][] = [...announcement.tags]; |
||||
|
||||
// Remove existing maintainers tags
|
||||
const maintainerTagIndices: number[] = []; |
||||
tags.forEach((tag, index) => { |
||||
if (tag[0] === 'maintainers') { |
||||
maintainerTagIndices.push(index); |
||||
} |
||||
}); |
||||
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) { |
||||
tags.splice(maintainerTagIndices[i], 1); |
||||
} |
||||
|
||||
// Add all maintainers except the one being removed
|
||||
const remainingMaintainers = currentMaintainers.filter(m => m !== maintainerHex); |
||||
if (remainingMaintainers.length > 0) { |
||||
tags.push(['maintainers', ...remainingMaintainers]); |
||||
} |
||||
|
||||
// Create updated event
|
||||
const updatedEvent = { |
||||
kind: KIND.REPO_ANNOUNCEMENT, |
||||
pubkey: context.userPubkeyHex, |
||||
created_at: Math.floor(Date.now() / 1000), |
||||
content: announcement.content || '', |
||||
tags |
||||
}; |
||||
|
||||
// Sign and publish
|
||||
const signedEvent = await signEventWithNIP07(updatedEvent); |
||||
|
||||
// Get user's relays
|
||||
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])]; |
||||
const fullRelayClient = new NostrClient(allSearchRelays); |
||||
|
||||
let userRelays: string[] = []; |
||||
try { |
||||
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient); |
||||
if (outbox.length > 0) { |
||||
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS); |
||||
} else if (inbox.length > 0) { |
||||
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS); |
||||
} else { |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
} catch (err) { |
||||
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults'); |
||||
userRelays = DEFAULT_NOSTR_RELAYS; |
||||
} |
||||
|
||||
const visibilityRelays = getRelaysForEventPublishing(signedEvent); |
||||
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : []; |
||||
|
||||
if (relaysToPublish.length > 0) { |
||||
await nostrClient.publishEvent(signedEvent, relaysToPublish); |
||||
} |
||||
|
||||
// Save to repository
|
||||
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`; |
||||
const announcementManager = new AnnouncementManager(repoRoot); |
||||
try { |
||||
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent); |
||||
} catch (err) { |
||||
logger.error({ error: err }, 'Failed to save maintainer update to repository'); |
||||
} |
||||
|
||||
return json({
|
||||
success: true,
|
||||
maintainer: maintainerNpub, |
||||
message: 'Maintainer removed successfully' |
||||
}); |
||||
}, |
||||
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false } // Maintainer list is public info, doesn't need repo to exist
|
||||
{ operation: 'removeMaintainer', requireRepoExists: false } |
||||
); |
||||
|
||||
@ -1,111 +0,0 @@
@@ -1,111 +0,0 @@
|
||||
/** |
||||
* API endpoint for merging Pull Requests |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { withRepoValidation } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js'; |
||||
import { prsService, repoManager, fileManager, maintainerService } from '$lib/services/service-registry.js'; |
||||
import { simpleGit } from 'simple-git'; |
||||
import { join } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { isValidBranchName } from '$lib/utils/security.js'; |
||||
import { validatePubkey } from '$lib/utils/input-validation.js'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
export const POST: RequestHandler = withRepoValidation( |
||||
async ({ repoContext, requestContext, event }) => { |
||||
const body = await event.request.json(); |
||||
const { prId, prAuthor, prCommitId, targetBranch = 'main', mergeMessage } = body; |
||||
|
||||
// Validate required fields
|
||||
if (!prId || typeof prId !== 'string' || prId.length !== 64) { |
||||
throw handleValidationError('Invalid prId: must be a 64-character hex string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
if (!prAuthor || typeof prAuthor !== 'string') { |
||||
throw handleValidationError('Invalid prAuthor: must be a string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Validate pubkey format
|
||||
const pubkeyValidation = validatePubkey(prAuthor); |
||||
if (!pubkeyValidation.valid) { |
||||
throw handleValidationError(`Invalid prAuthor: ${pubkeyValidation.error}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
if (!prCommitId || typeof prCommitId !== 'string' || prCommitId.length !== 40) { |
||||
throw handleValidationError('Invalid prCommitId: must be a 40-character commit hash', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Validate branch name
|
||||
if (!isValidBranchName(targetBranch)) { |
||||
throw handleValidationError(`Invalid branch name: ${targetBranch}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Validate merge message if provided
|
||||
if (mergeMessage && (typeof mergeMessage !== 'string' || mergeMessage.length > 10000)) { |
||||
throw handleValidationError('Invalid mergeMessage: must be a string with max 10000 characters', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Check if user is maintainer
|
||||
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo); |
||||
|
||||
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) { |
||||
throw handleApiError(new Error('Only repository owners and maintainers can merge PRs'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized'); |
||||
} |
||||
|
||||
// Check if repo exists locally
|
||||
const repoPath = join(repoRoot, repoContext.npub, `${repoContext.repo}.git`); |
||||
if (!existsSync(repoPath)) { |
||||
throw handleApiError(new Error('Repository not cloned locally. Please clone the repository first.'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found'); |
||||
} |
||||
|
||||
// Get user info for commit
|
||||
const authorName = requestContext.userName || 'GitRepublic User'; |
||||
const authorEmail = requestContext.userEmail || `${requestContext.userPubkeyHex?.slice(0, 20)}@gitrepublic.web`; |
||||
|
||||
try { |
||||
const git = simpleGit(repoPath); |
||||
|
||||
// Fetch latest changes
|
||||
await git.fetch(['origin']).catch(() => {}); // Ignore errors if no remote
|
||||
|
||||
// Checkout target branch
|
||||
await git.checkout(targetBranch); |
||||
|
||||
// Merge the PR commit
|
||||
const mergeMessageText = mergeMessage || `Merge pull request ${prId.slice(0, 7)}`; |
||||
await git.merge([prCommitId, '--no-ff', '-m', mergeMessageText]); |
||||
|
||||
// Get the merge commit ID
|
||||
const mergeCommitId = (await git.revparse(['HEAD'])).trim(); |
||||
|
||||
// Update PR status to merged
|
||||
const statusEvent = await prsService.updatePRStatus( |
||||
prId, |
||||
prAuthor, |
||||
repoContext.repoOwnerPubkey, |
||||
repoContext.repo, |
||||
'merged', |
||||
mergeCommitId |
||||
); |
||||
|
||||
return json({
|
||||
success: true,
|
||||
mergeCommitId, |
||||
statusEvent
|
||||
}); |
||||
} catch (err) { |
||||
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, prId, prCommitId }, 'Error merging PR'); |
||||
throw handleApiError(err instanceof Error ? err : new Error('Failed to merge PR'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to merge pull request'); |
||||
} |
||||
}, |
||||
{ operation: 'mergePR', requireRepoAccess: true } |
||||
); |
||||
@ -1,43 +0,0 @@
@@ -1,43 +0,0 @@
|
||||
/** |
||||
* API endpoint for updating Pull Requests (kind 1619) |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { withRepoValidation } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js'; |
||||
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js'; |
||||
import { prsService } from '$lib/services/service-registry.js'; |
||||
import { getGitUrl } from '$lib/config.js'; |
||||
|
||||
export const POST: RequestHandler = withRepoValidation( |
||||
async ({ repoContext, requestContext, event }) => { |
||||
const body = await event.request.json(); |
||||
const { prId, prAuthor, newCommitId, mergeBase } = body; |
||||
|
||||
if (!prId || !prAuthor || !newCommitId) { |
||||
throw handleValidationError('Missing required fields: prId, prAuthor, newCommitId', { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Only PR author can update their PR
|
||||
if (requestContext.userPubkeyHex !== prAuthor) { |
||||
throw handleApiError(new Error('Only the PR author can update the PR'), { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized'); |
||||
} |
||||
|
||||
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo); |
||||
const updateEvent = await prsService.updatePullRequest( |
||||
prId, |
||||
prAuthor, |
||||
repoContext.repoOwnerPubkey, |
||||
repoContext.repo, |
||||
newCommitId, |
||||
cloneUrl, |
||||
mergeBase |
||||
); |
||||
|
||||
return json({ success: true, event: updateEvent }); |
||||
}, |
||||
{ operation: 'updatePR', requireRepoAccess: false } |
||||
); |
||||
@ -0,0 +1,136 @@
@@ -0,0 +1,136 @@
|
||||
/** |
||||
* RESTful Pull Request Individual Resource Endpoint |
||||
*
|
||||
* GET /api/repos/{npub}/{repo}/pull-requests/{id} # Get pull request |
||||
* PATCH /api/repos/{npub}/{repo}/pull-requests/{id} # Update pull request status |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
// @ts-ignore - SvelteKit generates this type
|
||||
import type { RequestHandler } from './$types'; |
||||
import { prsService, nostrClient } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler, withRepoValidation } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js'; |
||||
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js'; |
||||
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
|
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
const id = (event.params as any).id; |
||||
|
||||
if (!id) { |
||||
throw handleValidationError('Missing pull request ID', { operation: 'getPullRequest', npub: context.npub, repo: context.repo }); |
||||
} |
||||
|
||||
try { |
||||
// Fetch the PR event
|
||||
const prEvents = await nostrClient.fetchEvents([ |
||||
{ |
||||
kinds: [KIND.PULL_REQUEST], |
||||
ids: [id], |
||||
limit: 1 |
||||
} |
||||
]); |
||||
|
||||
if (prEvents.length === 0) { |
||||
throw handleApiError(new Error('Pull request not found'), { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Pull request not found'); |
||||
} |
||||
|
||||
return json(prEvents[0]); |
||||
} catch (err) { |
||||
throw handleApiError(err, { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Failed to get pull request'); |
||||
} |
||||
}, |
||||
{ operation: 'getPullRequest', requireRepoExists: false, requireRepoAccess: false } |
||||
); |
||||
|
||||
export const PATCH: RequestHandler = withRepoValidation( |
||||
async ({ repoContext, requestContext, event }) => { |
||||
const id = (event.params as any).id; |
||||
const body = await event.request.json(); |
||||
const { status, mergeCommitId, newCommitId, mergeBase } = body; |
||||
|
||||
if (!id) { |
||||
throw handleValidationError('Missing pull request ID', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Fetch the PR to get the author
|
||||
const prEvents = await nostrClient.fetchEvents([ |
||||
{ |
||||
kinds: [KIND.PULL_REQUEST], |
||||
ids: [id], |
||||
limit: 1 |
||||
} |
||||
]); |
||||
|
||||
if (prEvents.length === 0) { |
||||
throw handleApiError(new Error('Pull request not found'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found'); |
||||
} |
||||
|
||||
const prEvent = prEvents[0]; |
||||
const prAuthor = prEvent.pubkey; |
||||
|
||||
// If updating status, check if user is maintainer
|
||||
if (status !== undefined) { |
||||
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js'); |
||||
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); |
||||
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo); |
||||
|
||||
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) { |
||||
throw handleApiError(new Error('Only repository owners and maintainers can update PR status'), { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized'); |
||||
} |
||||
|
||||
if (!status) { |
||||
throw handleValidationError('Missing required field: status', { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo }); |
||||
} |
||||
|
||||
// Get repository announcement to determine visibility and relay publishing
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoContext.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, repoContext.repo); |
||||
|
||||
// Determine which relays to publish to based on visibility
|
||||
const relaysToPublish = announcement ? getRelaysForEventPublishing(announcement) : DEFAULT_NOSTR_RELAYS; |
||||
|
||||
// Update PR status with visibility-based relays
|
||||
const statusEvent = await prsService.updatePRStatus( |
||||
id, |
||||
prAuthor, |
||||
repoContext.repoOwnerPubkey, |
||||
repoContext.repo, |
||||
status, |
||||
mergeCommitId, |
||||
relaysToPublish |
||||
); |
||||
|
||||
return json({ success: true, event: statusEvent }); |
||||
} |
||||
|
||||
// If updating commit, only PR author can update
|
||||
if (newCommitId !== undefined) { |
||||
if (requestContext.userPubkeyHex !== prAuthor) { |
||||
throw handleApiError(new Error('Only the PR author can update the PR commit'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized'); |
||||
} |
||||
|
||||
const { getGitUrl } = await import('$lib/config.js'); |
||||
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo); |
||||
const updateEvent = await prsService.updatePullRequest( |
||||
id, |
||||
prAuthor, |
||||
repoContext.repoOwnerPubkey, |
||||
repoContext.repo, |
||||
newCommitId, |
||||
cloneUrl, |
||||
mergeBase |
||||
); |
||||
|
||||
return json({ success: true, event: updateEvent }); |
||||
} |
||||
|
||||
throw handleValidationError('Missing required field: status or newCommitId', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }); |
||||
}, |
||||
{ operation: 'updatePullRequest', requireRepoAccess: false } |
||||
); |
||||
@ -1,153 +0,0 @@
@@ -1,153 +0,0 @@
|
||||
/** |
||||
* API endpoint for raw file access |
||||
*/ |
||||
|
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager, repoManager } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleValidationError } from '$lib/utils/error-handler.js'; |
||||
import { spawn } from 'child_process'; |
||||
import { join } from 'path'; |
||||
import { promisify } from 'util'; |
||||
|
||||
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
|
||||
// Check if a file extension is a binary image type
|
||||
function isBinaryImage(ext: string): boolean { |
||||
const binaryImageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp', 'bmp', 'ico', 'apng', 'avif']; |
||||
return binaryImageExtensions.includes(ext.toLowerCase()); |
||||
} |
||||
|
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext, event: RequestEvent) => { |
||||
const filePath = context.path || event.url.searchParams.get('path'); |
||||
const ref = context.ref || event.url.searchParams.get('ref') || 'HEAD'; |
||||
|
||||
if (!filePath) { |
||||
throw handleValidationError('Missing path parameter', { operation: 'getRawFile', npub: context.npub, repo: context.repo }); |
||||
} |
||||
|
||||
// Determine content type based on file extension
|
||||
const ext = filePath.split('.').pop()?.toLowerCase(); |
||||
const contentTypeMap: Record<string, string> = { |
||||
'js': 'application/javascript', |
||||
'ts': 'application/typescript', |
||||
'json': 'application/json', |
||||
'css': 'text/css', |
||||
'html': 'text/html', |
||||
'xml': 'application/xml', |
||||
'svg': 'image/svg+xml', |
||||
'png': 'image/png', |
||||
'jpg': 'image/jpeg', |
||||
'jpeg': 'image/jpeg', |
||||
'gif': 'image/gif', |
||||
'webp': 'image/webp', |
||||
'bmp': 'image/bmp', |
||||
'ico': 'image/x-icon', |
||||
'pdf': 'application/pdf', |
||||
'txt': 'text/plain', |
||||
'md': 'text/markdown', |
||||
'yml': 'text/yaml', |
||||
'yaml': 'text/yaml', |
||||
}; |
||||
|
||||
const contentType = contentTypeMap[ext || ''] || 'text/plain'; |
||||
|
||||
// For binary image files, use git cat-file to get raw binary data
|
||||
if (ext && isBinaryImage(ext)) { |
||||
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`); |
||||
|
||||
// Get the blob hash for the file
|
||||
return new Promise<Response>((resolve, reject) => { |
||||
// First, get the object hash using git ls-tree
|
||||
const lsTreeProcess = spawn('git', ['ls-tree', ref, filePath], { |
||||
cwd: repoPath, |
||||
stdio: ['ignore', 'pipe', 'pipe'] |
||||
}); |
||||
|
||||
let lsTreeOutput = ''; |
||||
let lsTreeError = ''; |
||||
|
||||
lsTreeProcess.stdout.on('data', (data: Buffer) => { |
||||
lsTreeOutput += data.toString(); |
||||
}); |
||||
|
||||
lsTreeProcess.stderr.on('data', (data: Buffer) => { |
||||
lsTreeError += data.toString(); |
||||
}); |
||||
|
||||
lsTreeProcess.on('close', (code) => { |
||||
if (code !== 0) { |
||||
reject(new Error(`Failed to get file hash: ${lsTreeError || 'Unknown error'}`)); |
||||
return; |
||||
} |
||||
|
||||
// Parse the output: format is "mode type hash\tpath"
|
||||
const match = lsTreeOutput.match(/^\d+\s+\w+\s+([a-f0-9]{40})\s+/); |
||||
if (!match) { |
||||
reject(new Error('Failed to parse file hash from git ls-tree output')); |
||||
return; |
||||
} |
||||
|
||||
const blobHash = match[1]; |
||||
|
||||
// Now get the binary content using git cat-file
|
||||
const catFileProcess = spawn('git', ['cat-file', 'blob', blobHash], { |
||||
cwd: repoPath, |
||||
stdio: ['ignore', 'pipe', 'pipe'] |
||||
}); |
||||
|
||||
const chunks: Buffer[] = []; |
||||
let catFileError = ''; |
||||
|
||||
catFileProcess.stdout.on('data', (data: Buffer) => { |
||||
chunks.push(data); |
||||
}); |
||||
|
||||
catFileProcess.stderr.on('data', (data: Buffer) => { |
||||
catFileError += data.toString(); |
||||
}); |
||||
|
||||
catFileProcess.on('close', (code) => { |
||||
if (code !== 0) { |
||||
reject(new Error(`Failed to get file content: ${catFileError || 'Unknown error'}`)); |
||||
return; |
||||
} |
||||
|
||||
const binaryContent = Buffer.concat(chunks); |
||||
resolve(new Response(binaryContent, { |
||||
headers: { |
||||
'Content-Type': contentType, |
||||
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`, |
||||
'Cache-Control': 'public, max-age=3600' |
||||
} |
||||
})); |
||||
}); |
||||
|
||||
catFileProcess.on('error', (err) => { |
||||
reject(new Error(`Failed to execute git cat-file: ${err.message}`)); |
||||
}); |
||||
}); |
||||
|
||||
lsTreeProcess.on('error', (err) => { |
||||
reject(new Error(`Failed to execute git ls-tree: ${err.message}`)); |
||||
}); |
||||
}); |
||||
} else { |
||||
// For text files (including SVG), use the existing method
|
||||
const fileData = await fileManager.getFileContent(context.npub, context.repo, filePath, ref); |
||||
|
||||
return new Response(fileData.content, { |
||||
headers: { |
||||
'Content-Type': contentType, |
||||
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`, |
||||
'Cache-Control': 'public, max-age=3600' |
||||
} |
||||
}); |
||||
} |
||||
}, |
||||
{ operation: 'getRawFile' } |
||||
); |
||||
@ -1,327 +0,0 @@
@@ -1,327 +0,0 @@
|
||||
/** |
||||
* API endpoint for listing files and directories in a repository |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js'; |
||||
import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; |
||||
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js'; |
||||
import { handleApiError, handleNotFoundError } from '$lib/utils/error-handler.js'; |
||||
import { KIND } from '$lib/types/nostr.js'; |
||||
import { join, resolve } from 'path'; |
||||
import { existsSync } from 'fs'; |
||||
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; |
||||
import logger from '$lib/services/logger.js'; |
||||
import { eventCache } from '$lib/services/nostr/event-cache.js'; |
||||
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; |
||||
|
||||
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
|
||||
const repoRootEnv = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT |
||||
? process.env.GIT_REPO_ROOT |
||||
: '/repos'; |
||||
const repoRoot = resolve(repoRootEnv); |
||||
|
||||
export const GET: RequestHandler = createRepoGetHandler( |
||||
async (context: RepoRequestContext) => { |
||||
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`); |
||||
|
||||
// If repo doesn't exist, try to fetch it on-demand
|
||||
if (!existsSync(repoPath)) { |
||||
try { |
||||
// Fetch repository announcement from Nostr (case-insensitive) with caching
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (announcement) { |
||||
// Try API-based fetching first (no cloning)
|
||||
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); |
||||
const { extractCloneUrls: extractCloneUrlsHelper } = await import('$lib/utils/nostr-utils.js'); |
||||
const cloneUrlsForLogging = extractCloneUrlsHelper(announcement); |
||||
|
||||
logger.debug({ npub: context.npub, repo: context.repo, cloneUrlCount: cloneUrlsForLogging.length, cloneUrls: cloneUrlsForLogging, path: context.path }, 'Attempting API fallback for tree'); |
||||
|
||||
const apiData = await tryApiFetch(announcement, context.npub, context.repo); |
||||
|
||||
if (apiData && apiData.files !== undefined) { |
||||
// Return empty array if no files (legitimate for empty repos)
|
||||
// Only proceed if we have files to filter
|
||||
if (apiData.files.length === 0) { |
||||
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, 'API fallback returned empty files array (repo may be empty)'); |
||||
return json([]); |
||||
} |
||||
logger.debug({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback'); |
||||
// Return API data directly without cloning
|
||||
const path = context.path || ''; |
||||
// Filter files by path if specified
|
||||
let filteredFiles: typeof apiData.files; |
||||
if (path) { |
||||
// Normalize path: ensure it ends with / for directory matching
|
||||
const normalizedPath = path.endsWith('/') ? path : `${path}/`; |
||||
// Filter files that are directly in this directory (not in subdirectories)
|
||||
filteredFiles = apiData.files.filter(f => { |
||||
// File must start with the normalized path
|
||||
if (!f.path.startsWith(normalizedPath)) { |
||||
return false; |
||||
} |
||||
// Get the relative path after the directory prefix
|
||||
const relativePath = f.path.slice(normalizedPath.length); |
||||
// If relative path is empty, skip (this would be the directory itself)
|
||||
if (!relativePath) { |
||||
return false; |
||||
} |
||||
// Remove trailing slash from relative path for directories
|
||||
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath; |
||||
// Check if it's directly in this directory (no additional / in the relative path)
|
||||
// This works for both files (e.g., "icon.svg") and directories (e.g., "subfolder")
|
||||
return !cleanRelativePath.includes('/'); |
||||
}); |
||||
} else { |
||||
// Root directory: show only files and directories in root
|
||||
filteredFiles = apiData.files.filter(f => { |
||||
// Remove trailing slash for directories
|
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
const pathParts = cleanPath.split('/'); |
||||
// Include only items in root (single path segment)
|
||||
return pathParts.length === 1; |
||||
}); |
||||
} |
||||
|
||||
// Normalize type: API returns 'dir' but frontend expects 'directory'
|
||||
// Also update name to be just the filename/dirname for display
|
||||
const normalizedFiles = filteredFiles.map(f => { |
||||
// Extract display name from path
|
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
const pathParts = cleanPath.split('/'); |
||||
const displayName = pathParts[pathParts.length - 1] || f.name; |
||||
return { |
||||
name: displayName, |
||||
path: f.path, |
||||
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory', |
||||
size: f.size |
||||
}; |
||||
}); |
||||
|
||||
return json(normalizedFiles); |
||||
} |
||||
|
||||
// API fetch failed - repo is not cloned and API fetch didn't work
|
||||
// Check if announcement has clone URLs to provide better error message
|
||||
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js'); |
||||
const cloneUrls = extractCloneUrls(announcement); |
||||
const hasCloneUrls = cloneUrls.length > 0; |
||||
|
||||
logger.debug({ npub: context.npub, repo: context.repo, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available'); |
||||
|
||||
throw handleNotFoundError( |
||||
hasCloneUrls
|
||||
? 'Repository is not cloned locally and could not be fetched via API. Privileged users can clone this repository using the "Clone to Server" button.' |
||||
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.', |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo } |
||||
); |
||||
} else { |
||||
throw handleNotFoundError( |
||||
'Repository announcement not found in Nostr', |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo } |
||||
); |
||||
} |
||||
} catch (err) { |
||||
// Check if repo was created by another concurrent request
|
||||
if (existsSync(repoPath)) { |
||||
// Repo exists now, clear cache and continue with normal flow
|
||||
repoCache.delete(RepoCache.repoExistsKey(context.npub, context.repo)); |
||||
} else { |
||||
// If fetching fails, return 404
|
||||
throw handleNotFoundError( |
||||
'Repository not found', |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo } |
||||
); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Double-check repo exists (should be true if we got here)
|
||||
if (!existsSync(repoPath)) { |
||||
throw handleNotFoundError( |
||||
'Repository not found', |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo } |
||||
); |
||||
} |
||||
|
||||
// Get default branch if no ref specified
|
||||
let ref = context.ref || 'HEAD'; |
||||
// If ref is a branch name, validate it exists or use default branch
|
||||
if (ref !== 'HEAD' && !ref.startsWith('refs/')) { |
||||
try { |
||||
const branches = await fileManager.getBranches(context.npub, context.repo); |
||||
if (!branches.includes(ref)) { |
||||
// Branch doesn't exist, use default branch
|
||||
ref = await fileManager.getDefaultBranch(context.npub, context.repo); |
||||
} |
||||
} catch { |
||||
// If we can't get branches, fall back to HEAD
|
||||
ref = 'HEAD'; |
||||
} |
||||
} |
||||
const path = context.path || ''; |
||||
|
||||
try { |
||||
const files = await fileManager.listFiles(context.npub, context.repo, ref, path); |
||||
|
||||
// If repo exists but has no files (empty repo), try API fallback
|
||||
if (files.length === 0) { |
||||
logger.debug({ npub: context.npub, repo: context.repo, path, ref }, 'Repo exists but is empty, attempting API fallback for tree'); |
||||
|
||||
try { |
||||
// Fetch repository announcement for API fallback
|
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (announcement) { |
||||
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); |
||||
const apiData = await tryApiFetch(announcement, context.npub, context.repo); |
||||
|
||||
if (apiData && apiData.files && apiData.files.length > 0) { |
||||
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback for empty repo'); |
||||
|
||||
// Filter files by path if specified (same logic as above)
|
||||
let filteredFiles: typeof apiData.files; |
||||
if (path) { |
||||
const normalizedPath = path.endsWith('/') ? path : `${path}/`; |
||||
filteredFiles = apiData.files.filter(f => { |
||||
if (!f.path.startsWith(normalizedPath)) { |
||||
return false; |
||||
} |
||||
const relativePath = f.path.slice(normalizedPath.length); |
||||
if (!relativePath) { |
||||
return false; |
||||
} |
||||
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath; |
||||
return !cleanRelativePath.includes('/'); |
||||
}); |
||||
} else { |
||||
filteredFiles = apiData.files.filter(f => { |
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
const pathParts = cleanPath.split('/'); |
||||
return pathParts.length === 1; |
||||
}); |
||||
} |
||||
|
||||
// Normalize type and name
|
||||
const normalizedFiles = filteredFiles.map(f => { |
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
const pathParts = cleanPath.split('/'); |
||||
const displayName = pathParts[pathParts.length - 1] || f.name; |
||||
return { |
||||
name: displayName, |
||||
path: f.path, |
||||
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory', |
||||
size: f.size |
||||
}; |
||||
}); |
||||
|
||||
return json(normalizedFiles); |
||||
} |
||||
} |
||||
} catch (apiErr) { |
||||
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed for empty repo, returning empty files'); |
||||
} |
||||
} |
||||
|
||||
// Debug logging to help diagnose missing files
|
||||
logger.debug({
|
||||
npub: context.npub,
|
||||
repo: context.repo,
|
||||
path,
|
||||
ref,
|
||||
fileCount: files.length, |
||||
files: files.map(f => ({ name: f.name, path: f.path, type: f.type })) |
||||
}, '[Tree] Returning files from fileManager.listFiles'); |
||||
return json(files); |
||||
} catch (err) { |
||||
// If error occurs, try API fallback before giving up
|
||||
logger.debug({ error: err, npub: context.npub, repo: context.repo }, '[Tree] Error listing files, attempting API fallback'); |
||||
|
||||
try { |
||||
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); |
||||
const announcement = findRepoAnnouncement(allEvents, context.repo); |
||||
|
||||
if (announcement) { |
||||
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); |
||||
const apiData = await tryApiFetch(announcement, context.npub, context.repo); |
||||
|
||||
if (apiData && apiData.files && apiData.files.length > 0) { |
||||
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback after error'); |
||||
|
||||
// Filter and normalize files (same logic as above)
|
||||
const path = context.path || ''; |
||||
let filteredFiles: typeof apiData.files; |
||||
if (path) { |
||||
const normalizedPath = path.endsWith('/') ? path : `${path}/`; |
||||
filteredFiles = apiData.files.filter(f => { |
||||
if (!f.path.startsWith(normalizedPath)) return false; |
||||
const relativePath = f.path.slice(normalizedPath.length); |
||||
if (!relativePath) return false; |
||||
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath; |
||||
return !cleanRelativePath.includes('/'); |
||||
}); |
||||
} else { |
||||
filteredFiles = apiData.files.filter(f => { |
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
return cleanPath.split('/').length === 1; |
||||
}); |
||||
} |
||||
|
||||
const normalizedFiles = filteredFiles.map(f => { |
||||
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path; |
||||
const pathParts = cleanPath.split('/'); |
||||
const displayName = pathParts[pathParts.length - 1] || f.name; |
||||
return { |
||||
name: displayName, |
||||
path: f.path, |
||||
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory', |
||||
size: f.size |
||||
}; |
||||
}); |
||||
|
||||
return json(normalizedFiles); |
||||
} |
||||
} |
||||
} catch (apiErr) { |
||||
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed after error'); |
||||
} |
||||
|
||||
// Log the actual error for debugging
|
||||
logger.error({ error: err, npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Error listing files'); |
||||
|
||||
// For optional paths (like "docs"), return empty array instead of 404
|
||||
// This allows components to gracefully handle missing directories
|
||||
const optionalPaths = ['docs']; |
||||
if (context.path && optionalPaths.includes(context.path.toLowerCase())) { |
||||
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Optional path not found, returning empty array'); |
||||
return json([]); |
||||
} |
||||
|
||||
// Check if it's a "not found" error for the repo itself
|
||||
if (err instanceof Error && (err.message.includes('Repository not found') || err.message.includes('not cloned'))) { |
||||
throw handleNotFoundError( |
||||
err.message, |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo } |
||||
); |
||||
} |
||||
|
||||
// For other errors with optional paths, return empty array
|
||||
if (context.path && optionalPaths.includes(context.path.toLowerCase())) { |
||||
return json([]); |
||||
} |
||||
|
||||
// Otherwise, it's a server error
|
||||
throw handleApiError( |
||||
err, |
||||
{ operation: 'listFiles', npub: context.npub, repo: context.repo }, |
||||
'Failed to list files' |
||||
); |
||||
} |
||||
}, |
||||
{ operation: 'listFiles', requireRepoExists: false, requireRepoAccess: false } // Tree listing should be publicly accessible for public repos
|
||||
); |
||||
@ -0,0 +1,33 @@
@@ -0,0 +1,33 @@
|
||||
/** |
||||
* API endpoint for manually triggering a repo poll |
||||
* This allows users to refresh the repo list and trigger provisioning of new repos |
||||
*
|
||||
* This is the public API interface for triggering polls. |
||||
* All poll triggers should go through this endpoint or the shared triggerRepoPoll utility. |
||||
*/ |
||||
|
||||
import { json } from '@sveltejs/kit'; |
||||
import type { RequestHandler } from './$types'; |
||||
import { triggerRepoPoll } from '$lib/utils/repo-poll-trigger.js'; |
||||
import { extractRequestContext } from '$lib/utils/api-context.js'; |
||||
|
||||
export const POST: RequestHandler = async (event) => { |
||||
const requestContext = extractRequestContext(event); |
||||
const clientIp = requestContext.clientIp || 'unknown'; |
||||
|
||||
try { |
||||
await triggerRepoPoll('api-endpoint'); |
||||
|
||||
return json({
|
||||
success: true, |
||||
message: 'Poll triggered successfully' |
||||
}); |
||||
} catch (err) { |
||||
const errorMessage = err instanceof Error ? err.message : String(err); |
||||
|
||||
return json({
|
||||
success: false,
|
||||
error: errorMessage
|
||||
}, { status: err instanceof Error && errorMessage.includes('not available') ? 503 : 500 }); |
||||
} |
||||
}; |
||||
Loading…
Reference in new issue