Browse Source

refactor API

Nostr-Signature: 934f8809638cea0bc7b8158fca959bc60880e0cae9ab8ff653687313adcd2f57 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc c9d8e5b821ae8182f8d39599c50fd0a4db6040ead1d8d83730a608a1d94d5078770a6ccbfc525a98691e98fabd9f9d24f0298680fb564c6b76c2f34bed9889b5
main
Silberengel 2 weeks ago
parent
commit
f9988077e6
  1. 1
      nostr/commit-signatures.jsonl
  2. 329
      src/routes/api/code-search/+server.ts
  3. 43
      src/routes/api/openapi.json/openapi.json
  4. 503
      src/routes/api/repos/[npub]/[repo]/+server.ts
  5. 22
      src/routes/api/repos/[npub]/[repo]/archive/+server.ts
  6. 7
      src/routes/api/repos/[npub]/[repo]/branches/default/+server.ts
  7. 125
      src/routes/api/repos/[npub]/[repo]/clone-urls/+server.ts
  8. 210
      src/routes/api/repos/[npub]/[repo]/code-search/+server.ts
  9. 6
      src/routes/api/repos/[npub]/[repo]/commits/[hash]/verification/+server.ts
  10. 12
      src/routes/api/repos/[npub]/[repo]/diffs/+server.ts
  11. 573
      src/routes/api/repos/[npub]/[repo]/file/+server.ts
  12. 1194
      src/routes/api/repos/[npub]/[repo]/files/+server.ts
  13. 457
      src/routes/api/repos/[npub]/[repo]/forks/+server.ts
  14. 371
      src/routes/api/repos/[npub]/[repo]/maintainers/+server.ts
  15. 23
      src/routes/api/repos/[npub]/[repo]/patches/[id]/application/+server.ts
  16. 111
      src/routes/api/repos/[npub]/[repo]/prs/merge/+server.ts
  17. 43
      src/routes/api/repos/[npub]/[repo]/prs/update/+server.ts
  18. 56
      src/routes/api/repos/[npub]/[repo]/pull-requests/+server.ts
  19. 136
      src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/+server.ts
  20. 38
      src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/merge/+server.ts
  21. 153
      src/routes/api/repos/[npub]/[repo]/raw/+server.ts
  22. 169
      src/routes/api/repos/[npub]/[repo]/transfers/+server.ts
  23. 327
      src/routes/api/repos/[npub]/[repo]/tree/+server.ts
  24. 329
      src/routes/api/repos/[npub]/[repo]/verification/+server.ts
  25. 328
      src/routes/api/search/+server.ts

1
nostr/commit-signatures.jsonl

@ -115,3 +115,4 @@ @@ -115,3 +115,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772226191,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"20be97351d2b05fa7ad9e161b2619e9babaaffc6a8090057c1a3ac50a0f08d6a","sig":"a174c7dd39f613dd88260ef5c111b943df381b0acae20d048596e11ef1a6b0e3c1bfb9a8858af3df0f8858c4c79d1e2d03ad248a0608ac5d5cded6a81e99af77"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772227102,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fix"]],"content":"Signed commit: bug-fix","id":"0f366a0cc7c003f74e375f40e7c322781746d12829943df1287bf67f36e1330a","sig":"167177ccfeb053cd645e50e7d00450b847ecd65c305165777bcfbe39fd3f48ccc86b57fdd183d2a4b138d94d27d11e4f1c121d702b295d94b9aee0a8dc81a744"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772261455,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision"]],"content":"Signed commit: fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision","id":"b0da119e7477b46f5d82be831693a92e117f25379476488f19351e2bac8f88b8","sig":"b8ca18e8215a9f5b3fc877ce113936c582353d44f8d03cdccd9f9ee70fb3e6fdd64db7cc6a3ca15339fb21b9ca87ea8471a38b587721a594a189d97cc2964ad9"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772264490,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","polling update"]],"content":"Signed commit: polling update","id":"42c1a2a63a4568c65d82d78701451b3b4363bdf9c8c57e804535b5f3f0d7b6fc","sig":"8e5f32ecb79da876ac41eba04c3b1541b21d039ae50d1b9fefa630d35f31c97dd29af64e4b695742fa7d4eaec17db8f4a066b4db99ce628aed596971975d4a87"}

329
src/routes/api/code-search/+server.ts

@ -1,329 +0,0 @@ @@ -1,329 +0,0 @@
/**
* API endpoint for global code search across all repositories
* Searches file contents across multiple repositories
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND } from '$lib/types/nostr.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache } from '$lib/utils/nostr-utils.js';
import logger from '$lib/services/logger.js';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
import { simpleGit } from 'simple-git';
import { fileManager } from '$lib/services/service-registry.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export interface GlobalCodeSearchResult {
repo: string;
npub: string;
file: string;
line: number;
content: string;
branch: string;
}
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const repoFilter = event.url.searchParams.get('repo'); // Optional: filter by specific repo (npub/repo format)
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10);
if (!query || query.trim().length < 2) {
throw handleValidationError('Query must be at least 2 characters', { operation: 'globalCodeSearch' });
}
const requestContext = extractRequestContext(event);
const results: GlobalCodeSearchResult[] = [];
try {
// If repo filter is specified, search only that repo
if (repoFilter) {
const [npub, repo] = repoFilter.split('/');
if (npub && repo) {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (existsSync(repoPath)) {
const repoResults = await searchInRepo(npub, repo, query, limit);
results.push(...repoResults);
}
}
return json(results);
}
// Search across all repositories
// First, get list of all repos from filesystem
if (!existsSync(repoRoot)) {
return json([]);
}
const users = await readdir(repoRoot);
for (const user of users) {
const userPath = join(repoRoot, user);
const userStat = await stat(userPath);
if (!userStat.isDirectory()) {
continue;
}
const repos = await readdir(userPath);
for (const repo of repos) {
if (!repo.endsWith('.git')) {
continue;
}
const repoName = repo.replace(/\.git$/, '');
const repoPath = join(userPath, repo);
const repoStat = await stat(repoPath);
if (!repoStat.isDirectory()) {
continue;
}
// Check access for private repos
try {
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
// Decode npub to hex
const { nip19 } = await import('nostr-tools');
let repoOwnerPubkey: string;
try {
const decoded = nip19.decode(user);
if (decoded.type === 'npub') {
repoOwnerPubkey = decoded.data as string;
} else {
repoOwnerPubkey = user; // Assume it's already hex
}
} catch {
repoOwnerPubkey = user; // Assume it's already hex
}
const canView = await maintainerService.canView(
requestContext.userPubkeyHex || null,
repoOwnerPubkey,
repoName
);
if (!canView) {
continue; // Skip private repos user can't access
}
} catch (accessErr) {
logger.debug({ error: accessErr, user, repo: repoName }, 'Error checking access, skipping repo');
continue;
}
// Search in this repo
try {
const repoResults = await searchInRepo(user, repoName, query, limit - results.length);
results.push(...repoResults);
if (results.length >= limit) {
break;
}
} catch (searchErr) {
logger.debug({ error: searchErr, user, repo: repoName }, 'Error searching repo, continuing');
continue;
}
}
if (results.length >= limit) {
break;
}
}
return json(results.slice(0, limit));
} catch (err) {
logger.error({ error: err, query }, 'Error performing global code search');
throw err;
}
};
async function searchInRepo(
npub: string,
repo: string,
query: string,
limit: number
): Promise<GlobalCodeSearchResult[]> {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (!existsSync(repoPath)) {
return [];
}
const results: GlobalCodeSearchResult[] = [];
const git = simpleGit(repoPath);
try {
// Get default branch
let branch = 'HEAD';
try {
const branches = await git.branchLocal();
branch = branches.current || 'HEAD';
// If no current branch, try common defaults
if (!branch || branch === 'HEAD') {
const allBranches = branches.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
branch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
branch = 'main';
}
// For bare repositories, we need to use a worktree or search the index
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
actualBranch = 'main';
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, npub, repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub, repo, branch }, 'Could not create worktree, trying git grep with tree reference');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
repo,
npub,
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep with tree reference
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with tree reference for bare repos
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
repo,
npub,
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
}
} catch (err) {
logger.debug({ error: err, npub, repo, query }, 'Error searching in repo');
return [];
}
return results;
}

43
src/routes/api/openapi.json/openapi.json

@ -286,6 +286,49 @@ @@ -286,6 +286,49 @@
}
}
},
"/api/repos/poll": {
"post": {
"summary": "Trigger repository polling",
"description": "Manually trigger repository polling to provision new repos from Nostr announcements. This endpoint fetches NIP-34 repo announcements from relays and provisions repositories that list this server's domain in their clone URLs. The poll runs asynchronously and does not block the request.",
"tags": ["Infrastructure"],
"responses": {
"200": {
"description": "Poll triggered successfully",
"content": {
"application/json": {
"schema": {
"type": "object",
"properties": {
"success": {"type": "boolean"},
"message": {"type": "string"}
}
}
}
}
},
"503": {
"description": "Polling service not available",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
},
"500": {
"description": "Error triggering poll",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Error"
}
}
}
}
}
}
},
"/api/repos/{npub}/{repo}/file": {
"get": {
"summary": "Get file content",

503
src/routes/api/repos/[npub]/[repo]/+server.ts

@ -0,0 +1,503 @@ @@ -0,0 +1,503 @@
/**
* RESTful Repository Resource Endpoint
*
* GET /api/repos/{npub}/{repo} # Get repository info (settings, metadata, access, verification)
* PUT /api/repos/{npub}/{repo} # Update repository (replace)
* PATCH /api/repos/{npub}/{repo} # Partial update (settings, description, etc.)
* DELETE /api/repos/{npub}/{repo} # Delete repository
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient, maintainerService } from '$lib/services/service-registry.js';
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js';
import { KIND } from '$lib/types/nostr.js';
import { nip19 } from 'nostr-tools';
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import logger from '$lib/services/logger.js';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { verifyRepositoryOwnership } from '$lib/services/nostr/repo-verification.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
// Admin pubkeys (can be set via environment variable)
const ADMIN_PUBKEYS = (typeof process !== 'undefined' && process.env?.ADMIN_PUBKEYS
? process.env.ADMIN_PUBKEYS.split(',').map(p => p.trim()).filter(p => p.length > 0)
: []) as string[];
function isAdmin(userPubkeyHex: string | null): boolean {
if (!userPubkeyHex) return false;
return ADMIN_PUBKEYS.some(adminPubkey => {
try {
const decoded = nip19.decode(adminPubkey);
if (decoded.type === 'npub') {
return decoded.data === userPubkeyHex;
}
} catch {
// Not an npub, compare as hex
}
return adminPubkey.toLowerCase() === userPubkeyHex.toLowerCase();
});
}
function isOwner(userPubkeyHex: string | null, repoOwnerPubkey: string): boolean {
if (!userPubkeyHex) return false;
return userPubkeyHex.toLowerCase() === repoOwnerPubkey.toLowerCase();
}
/**
* GET: Get repository info
* Query params: ?include=settings,maintainers,access,verification
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const url = new URL(event.request.url);
const include = url.searchParams.get('include')?.split(',') || ['settings', 'access'];
// Fetch repository announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
const result: any = {
npub: context.npub,
repo: context.repo,
owner: context.npub
};
// Include settings
if (include.includes('settings') || include.includes('all')) {
if (announcement) {
result.description = announcement.tags.find(t => t[0] === 'description')?.[1] || '';
result.visibility = getVisibility(announcement);
result.projectRelays = getProjectRelays(announcement);
result.private = result.visibility === 'restricted' || result.visibility === 'private';
} else {
result.description = '';
result.visibility = 'public';
result.projectRelays = [];
result.private = false;
}
}
// Include maintainers
if (include.includes('maintainers') || include.includes('all')) {
const { maintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
result.maintainers = maintainers.map(p => nip19.npubEncode(p));
result.owner = nip19.npubEncode(owner);
if (context.userPubkeyHex) {
result.isMaintainer = maintainers.includes(context.userPubkeyHex);
result.isOwner = context.userPubkeyHex === owner;
}
}
// Include access
if (include.includes('access') || include.includes('all')) {
const { isPrivate, maintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
const canView = await maintainerService.canView(
context.userPubkeyHex || null,
context.repoOwnerPubkey,
context.repo
);
result.access = {
canView,
isPrivate,
isMaintainer: context.userPubkeyHex ? maintainers.includes(context.userPubkeyHex) : false,
isOwner: context.userPubkeyHex ? context.userPubkeyHex === owner : false
};
}
// Include verification
if (include.includes('verification') || include.includes('all')) {
// Simplified verification check - full verification is in /verification endpoint
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
result.verification = {
exists: existsSync(repoPath),
announcementFound: !!announcement
};
}
return json(result);
},
{ operation: 'getRepo', requireRepoExists: false, requireRepoAccess: false }
);
/**
* PUT: Replace repository (full update)
* PATCH: Partial update
*/
export const PUT: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
return updateRepository(context, event, true); // full update
},
{ operation: 'updateRepo', requireRepoExists: false }
);
export const PATCH: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
return updateRepository(context, event, false); // partial update
},
{ operation: 'updateRepo', requireRepoExists: false }
);
async function updateRepository(
context: RepoRequestContext,
event: RequestEvent,
isFullUpdate: boolean
) {
let body: {
description?: string;
visibility?: string;
projectRelays?: string[];
private?: boolean;
branchProtection?: any;
};
try {
body = await event.request.json();
} catch {
throw handleValidationError('Invalid JSON in request body', {
operation: 'updateRepo',
npub: context.npub,
repo: context.repo
});
}
// Fetch current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'updateRepo',
npub: context.npub,
repo: context.repo
});
}
// Get user's pubkey (required for signing)
const userPubkey = await getPublicKeyWithNIP07();
let userPubkeyHex: string;
if (typeof userPubkey === 'string' && userPubkey.length === 64) {
userPubkeyHex = userPubkey;
} else {
const decoded = nip19.decode(userPubkey) as { type: string; data: unknown };
if (decoded.type === 'npub' && typeof decoded.data === 'string') {
userPubkeyHex = decoded.data;
} else {
throw handleValidationError('Invalid user pubkey format', { operation: 'updateRepo', npub: context.npub, repo: context.repo });
}
}
// Verify user is maintainer
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can update repository');
}
// Build updated tags
const tags: string[][] = isFullUpdate ? [] : [...announcement.tags];
// Update description
if (body.description !== undefined || isFullUpdate) {
const descIndex = tags.findIndex(t => t[0] === 'description');
const descValue = body.description !== undefined ? body.description : (isFullUpdate ? '' : announcement.tags.find(t => t[0] === 'description')?.[1] || '');
if (descIndex >= 0) {
tags[descIndex] = ['description', descValue];
} else if (descValue) {
tags.push(['description', descValue]);
}
}
// Update visibility
let newVisibility: 'public' | 'unlisted' | 'restricted' | 'private' = getVisibility(announcement);
if (body.visibility !== undefined) {
const vis = body.visibility.toLowerCase();
if (['public', 'unlisted', 'restricted', 'private'].includes(vis)) {
newVisibility = vis as typeof newVisibility;
} else {
throw handleValidationError(`Invalid visibility: ${body.visibility}. Must be one of: public, unlisted, restricted, private`,
{ operation: 'updateRepo', npub: context.npub, repo: context.repo });
}
} else if (body.private !== undefined) {
newVisibility = body.private ? 'restricted' : 'public';
} else if (isFullUpdate) {
newVisibility = 'public';
}
// Update visibility tag
const visIndex = tags.findIndex(t => t[0] === 'visibility');
if (newVisibility === 'public') {
if (visIndex >= 0) {
tags.splice(visIndex, 1);
}
} else {
if (visIndex >= 0) {
tags[visIndex] = ['visibility', newVisibility];
} else {
tags.push(['visibility', newVisibility]);
}
}
// Update project-relay tags
if (body.projectRelays !== undefined || isFullUpdate) {
// Remove existing project-relay tags
const projectRelayIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'project-relay') {
projectRelayIndices.push(index);
}
});
for (let i = projectRelayIndices.length - 1; i >= 0; i--) {
tags.splice(projectRelayIndices[i], 1);
}
// Add new project-relay tags
const relays = body.projectRelays || (isFullUpdate ? [] : getProjectRelays(announcement));
for (const relay of relays) {
if (relay && (relay.startsWith('ws://') || relay.startsWith('wss://'))) {
tags.push(['project-relay', relay]);
}
}
}
// Validate: unlisted/restricted require project-relay
if ((newVisibility === 'unlisted' || newVisibility === 'restricted')) {
const hasProjectRelay = tags.some(t => t[0] === 'project-relay');
if (!hasProjectRelay) {
throw handleValidationError(
`Visibility '${newVisibility}' requires at least one project-relay. Please provide project-relays in the request.`,
{ operation: 'updateRepo', npub: context.npub, repo: context.repo }
);
}
}
// Preserve essential tags
if (!isFullUpdate) {
// Keep d-tag, name, clone tags, etc.
const essentialTags = ['d', 'name', 'clone'];
essentialTags.forEach(tagName => {
announcement.tags.forEach(tag => {
if (tag[0] === tagName && !tags.some(t => t[0] === tagName && t[1] === tag[1])) {
tags.push(tag);
}
});
});
} else {
// For full update, we need d-tag at minimum
const dTag = announcement.tags.find(t => t[0] === 'd');
if (dTag) {
tags.unshift(dTag);
}
}
// Remove old private tag if present
const privateIndex = tags.findIndex(t => (t[0] === 'private' && t[1] === 'true') || (t[0] === 't' && t[1] === 'private'));
if (privateIndex >= 0) {
tags.splice(privateIndex, 1);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign with NIP-07
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays for publishing
const allSearchRelays = Array.from(new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS]));
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
// Determine which relays to publish to based on visibility
const { getRelaysForEventPublishing } = await import('$lib/utils/repo-visibility.js');
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
// Publish to relays (if not private)
if (relaysToPublish.length > 0) {
const publishResult = await nostrClient.publishEvent(signedEvent, relaysToPublish);
if (publishResult.failed.length > 0 && publishResult.success.length === 0) {
logger.warn({ npub: context.npub, repo: context.repo }, 'Failed to publish update to all relays');
}
}
// Save to repository
const { AnnouncementManager } = await import('$lib/services/git/announcement-manager.js');
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo }, 'Failed to save update to repository');
}
// Return updated repository
return json({
npub: context.npub,
repo: context.repo,
owner: context.npub,
description: body.description !== undefined ? body.description : (announcement.tags.find(t => t[0] === 'description')?.[1] || ''),
visibility: newVisibility,
projectRelays: body.projectRelays !== undefined ? body.projectRelays : getProjectRelays(announcement),
private: newVisibility === 'restricted' || newVisibility === 'private'
});
}
/**
* DELETE: Delete repository
*/
export const DELETE: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const { npub, repo, repoOwnerPubkey, userPubkeyHex, clientIp } = context;
// Check permissions: must be owner or admin
if (!userPubkeyHex) {
auditLogger.log({
user: undefined,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Authentication required'
});
return handleAuthorizationError('Authentication required to delete repositories');
}
const userIsOwner = isOwner(userPubkeyHex, repoOwnerPubkey);
const userIsAdmin = isAdmin(userPubkeyHex);
if (!userIsOwner && !userIsAdmin) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Insufficient permissions'
});
return handleAuthorizationError('Only repository owners or admins can delete repositories');
}
// Get repository path
const repoPath = join(repoRoot, npub, `${repo}.git`);
// Security: Ensure resolved path is within repoRoot
const resolvedPath = resolve(repoPath).replace(/\\/g, '/');
const resolvedRoot = resolve(repoRoot).replace(/\\/g, '/');
if (!resolvedPath.startsWith(resolvedRoot + '/')) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Invalid repository path'
});
return error(403, 'Invalid repository path');
}
// Check if repo exists
if (!existsSync(repoPath)) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'failure',
error: 'Repository not found'
});
return error(404, 'Repository not found');
}
try {
// Delete the repository directory
await rm(repoPath, { recursive: true, force: true });
// Clear cache
repoCache.delete(RepoCache.repoExistsKey(npub, repo));
// Log successful deletion
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'success',
metadata: {
isOwner: userIsOwner,
isAdmin: userIsAdmin
}
});
logger.info({
user: userPubkeyHex,
npub,
repo,
isOwner: userIsOwner,
isAdmin: userIsAdmin
}, 'Repository deleted');
return json({
success: true,
message: 'Repository deleted successfully'
});
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error';
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'failure',
error: errorMessage
});
return handleApiError(err, { operation: 'deleteRepo', npub, repo }, 'Failed to delete repository');
}
},
{
operation: 'deleteRepo',
requireRepoExists: true,
requireRepoAccess: false,
requireMaintainer: false
}
);

22
src/routes/api/repos/[npub]/[repo]/download/+server.ts → src/routes/api/repos/[npub]/[repo]/archive/+server.ts

@ -1,9 +1,15 @@ @@ -1,9 +1,15 @@
/**
* API endpoint for downloading repository as ZIP or TAR.GZ
* Refactored for better error handling and reliability
* RESTful Archive Endpoint
*
* GET /api/repos/{npub}/{repo}/archive?format=zip|tar.gz&ref=...
*
* Query parameters:
* - format - Archive format: 'zip' or 'tar.gz' (default: 'zip')
* - ref - Branch, tag, or commit hash (default: 'HEAD')
*/
import { error } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
@ -55,7 +61,7 @@ async function createTempClone( @@ -55,7 +61,7 @@ async function createTempClone(
return null;
}
logger.info({ npub: context.npub, repo: context.repo }, 'Creating temporary clone for download');
logger.info({ npub: context.npub, repo: context.repo }, 'Creating temporary clone for archive');
// Setup temp clone directory
const tempDir = resolve(join(repoRoot, '..', 'temp-clones'));
@ -275,7 +281,7 @@ function createTarGzArchive(workDir: string, archivePath: string): Promise<void> @@ -275,7 +281,7 @@ function createTarGzArchive(workDir: string, archivePath: string): Promise<void>
}
/**
* Main download handler
* Main archive handler
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
@ -302,7 +308,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -302,7 +308,7 @@ export const GET: RequestHandler = createRepoGetHandler(
} else {
throw handleNotFoundError(
'Repository not found',
{ operation: 'download', npub: context.npub, repo: context.repo }
{ operation: 'archive', npub: context.npub, repo: context.repo }
);
}
}
@ -312,7 +318,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -312,7 +318,7 @@ export const GET: RequestHandler = createRepoGetHandler(
if (!existsSync(sourceRepoPath)) {
throw handleNotFoundError(
'Repository not found',
{ operation: 'download', npub: context.npub, repo: context.repo }
{ operation: 'archive', npub: context.npub, repo: context.repo }
);
}
@ -441,7 +447,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -441,7 +447,7 @@ export const GET: RequestHandler = createRepoGetHandler(
}
// Return archive
logger.info({ npub: context.npub, repo: context.repo, ref, format, size: archiveBuffer.length }, 'Download completed successfully');
logger.info({ npub: context.npub, repo: context.repo, ref, format, size: archiveBuffer.length }, 'Archive created successfully');
return new Response(archiveBuffer, {
headers: {
@ -490,5 +496,5 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -490,5 +496,5 @@ export const GET: RequestHandler = createRepoGetHandler(
throw error(500, `Failed to create archive: ${err instanceof Error ? err.message : String(err)}`);
}
},
{ operation: 'download', requireRepoExists: false, requireRepoAccess: true }
{ operation: 'archive', requireRepoExists: false, requireRepoAccess: true }
);

7
src/routes/api/repos/[npub]/[repo]/default-branch/+server.ts → src/routes/api/repos/[npub]/[repo]/branches/default/+server.ts

@ -1,8 +1,13 @@ @@ -1,8 +1,13 @@
/**
* API endpoint for getting the default branch of a repository
* RESTful Default Branch Endpoint
*
* GET /api/repos/{npub}/{repo}/branches/default
*
* Returns the default branch of the repository
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';

125
src/routes/api/repos/[npub]/[repo]/clone-urls/+server.ts

@ -0,0 +1,125 @@ @@ -0,0 +1,125 @@
/**
* RESTful Clone URLs Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/clone-urls # List clone URLs
* POST /api/repos/{npub}/{repo}/clone-urls # Check reachability (body: {urls: [...]})
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { getCloneUrlsReachability } from '$lib/services/git/clone-url-reachability.js';
import { extractCloneUrls } from '$lib/utils/nostr-utils.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient } from '$lib/services/service-registry.js';
import logger from '$lib/services/logger.js';
/**
* GET: List clone URLs
* Query params:
* - includeReachability: boolean (optional) - Include reachability status
* - forceRefresh: boolean (optional) - Force refresh reachability cache
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const url = new URL(event.request.url);
const includeReachability = url.searchParams.get('includeReachability') === 'true';
const forceRefresh = url.searchParams.get('forceRefresh') === 'true';
// Fetch repository announcement (case-insensitive) with caching
let allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
let announcement = findRepoAnnouncement(allEvents, context.repo);
// If no events found in cache/default relays, try all relays
if (!announcement) {
const allRelays = [...new Set([...DEFAULT_NOSTR_RELAYS, ...DEFAULT_NOSTR_SEARCH_RELAYS])];
if (allRelays.length > DEFAULT_NOSTR_RELAYS.length) {
const allRelaysClient = new NostrClient(allRelays);
allEvents = await fetchRepoAnnouncementsWithCache(allRelaysClient, context.repoOwnerPubkey, eventCache);
announcement = findRepoAnnouncement(allEvents, context.repo);
}
}
if (!announcement) {
logger.warn({ npub: context.npub, repo: context.repo }, 'Repository announcement not found for clone URLs');
return error(404, 'Repository announcement not found');
}
// Extract clone URLs
const cloneUrls = extractCloneUrls(announcement, false);
if (!includeReachability) {
return json({
cloneUrls,
count: cloneUrls.length
});
}
// Extract relay URLs from relays tag (for proper GRASP server detection)
const relayUrls: string[] = [];
for (const tag of announcement.tags) {
if (tag[0] === 'relays') {
for (let i = 1; i < tag.length; i++) {
const relayUrl = tag[i];
if (relayUrl && typeof relayUrl === 'string' && (relayUrl.startsWith('ws://') || relayUrl.startsWith('wss://'))) {
relayUrls.push(relayUrl);
}
}
}
}
// Get reachability for all clone URLs
const reachabilityResults = await getCloneUrlsReachability(
cloneUrls,
5000,
forceRefresh,
relayUrls.length > 0 ? relayUrls : undefined
);
return json({
cloneUrls,
count: cloneUrls.length,
reachability: reachabilityResults
});
} catch (err) {
return handleApiError(err, { operation: 'getCloneUrls', npub: context.npub, repo: context.repo }, 'Failed to get clone URLs');
}
},
{ operation: 'getCloneUrls', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Check reachability of clone URLs
* Body: { urls: string[], forceRefresh?: boolean }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const body = await event.request.json();
const { urls, forceRefresh = false } = body;
if (!Array.isArray(urls) || urls.length === 0) {
return error(400, 'urls must be a non-empty array');
}
// Validate URLs are strings
if (!urls.every(url => typeof url === 'string')) {
return error(400, 'All URLs must be strings');
}
// Get reachability for specified URLs
const results = await getCloneUrlsReachability(urls, 5000, forceRefresh);
return json({ results });
} catch (err) {
return handleApiError(err, { operation: 'checkReachability', npub: context.npub, repo: context.repo }, 'Failed to check clone URL reachability');
}
},
{ operation: 'checkReachability', requireRepoExists: false, requireRepoAccess: false }
);

210
src/routes/api/repos/[npub]/[repo]/code-search/+server.ts

@ -1,210 +0,0 @@ @@ -1,210 +0,0 @@
/**
* API endpoint for code search within repositories
* Searches file contents across repositories
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { join } from 'path';
import { existsSync } from 'fs';
import logger from '$lib/services/logger.js';
import { simpleGit } from 'simple-git';
import { readFile } from 'fs/promises';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export interface CodeSearchResult {
file: string;
line: number;
content: string;
branch: string;
commit?: string;
}
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const query = event.url.searchParams.get('q');
const branch = event.url.searchParams.get('branch') || 'HEAD';
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10);
if (!query || query.trim().length < 2) {
throw handleValidationError('Query must be at least 2 characters', { operation: 'codeSearch', npub: context.npub, repo: context.repo });
}
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// Check if repo exists
if (!existsSync(repoPath)) {
logger.debug({ npub: context.npub, repo: context.repo, query }, 'Code search requested for non-existent repo');
return json([]);
}
try {
const git = simpleGit(repoPath);
const results: CodeSearchResult[] = [];
// For bare repositories, we need to use a worktree or search the index
// First, try to get or create a worktree for the branch
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
actualBranch = branchInfo.current || 'main';
// If no current branch, try common defaults
if (!actualBranch || actualBranch === 'HEAD') {
const allBranches = branchInfo.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
actualBranch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
actualBranch = 'main';
}
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, context.npub, context.repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub: context.npub, repo: context.repo, branch }, 'Could not create worktree, trying git grep with --cached');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return json([]);
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return json([]);
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep --cached
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with --cached to search the index
// For bare repos, we can search a specific tree
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return json([]);
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return json([]);
}
throw grepError;
}
}
return json(results);
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo, query }, 'Error performing code search');
throw err;
}
},
{ operation: 'codeSearch', requireRepoExists: false, requireRepoAccess: true }
);

6
src/routes/api/repos/[npub]/[repo]/commits/[hash]/verify/+server.ts → src/routes/api/repos/[npub]/[repo]/commits/[hash]/verification/+server.ts

@ -1,5 +1,9 @@ @@ -1,5 +1,9 @@
/**
* API endpoint for verifying commit signatures
* RESTful Commit Verification Endpoint
*
* GET /api/repos/{npub}/{repo}/commits/{hash}/verification
*
* Verifies the signature of a commit
*/
import { json } from '@sveltejs/kit';

12
src/routes/api/repos/[npub]/[repo]/diff/+server.ts → src/routes/api/repos/[npub]/[repo]/diffs/+server.ts

@ -1,8 +1,16 @@ @@ -1,8 +1,16 @@
/**
* API endpoint for getting diffs
* RESTful Diffs Endpoint
*
* GET /api/repos/{npub}/{repo}/diffs?from=...&to=...&path=...
*
* Query parameters:
* - from - Source ref (required)
* - to - Target ref (default: HEAD)
* - path - Optional file path to diff
*/
import { json, error } from '@sveltejs/kit';
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';

573
src/routes/api/repos/[npub]/[repo]/file/+server.ts

@ -1,573 +0,0 @@ @@ -1,573 +0,0 @@
/**
* API endpoint for reading and writing files in a repository
*/
import { json, error } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { nip19 } from 'nostr-tools';
import { verifyNIP98Auth } from '$lib/services/nostr/nip98-auth.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import logger from '$lib/services/logger.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { handleApiError, handleValidationError, handleNotFoundError } from '$lib/utils/error-handler.js';
import { KIND } from '$lib/types/nostr.js';
import { join } from 'path';
import { existsSync } from 'fs';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
export const GET: RequestHandler = async (event) => {
const { params, url, request } = event;
const { npub, repo } = params;
const filePath = url.searchParams.get('path');
let ref = url.searchParams.get('ref') || 'HEAD';
// Extract user pubkey using the same method as other endpoints
const requestContext = extractRequestContext(event);
const userPubkey = requestContext.userPubkey;
const userPubkeyHex = requestContext.userPubkeyHex;
// Debug logging for file endpoint
logger.debug({
hasUserPubkey: !!userPubkey,
hasUserPubkeyHex: !!userPubkeyHex,
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
npub,
repo,
filePath
}, 'File endpoint - extracted user context');
if (!npub || !repo || !filePath) {
return error(400, 'Missing npub, repo, or path parameter');
}
try {
const repoPath = join(repoRoot, npub, `${repo}.git`);
// If repo doesn't exist, try to fetch it on-demand
if (!existsSync(repoPath)) {
try {
// Get repo owner pubkey
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Fetch repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
// Try API-based fetching first (no cloning)
try {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
const fileContent = await tryApiFetchFile(announcement, npub, repo, filePath, ref);
if (fileContent && fileContent.content) {
logger.debug({ npub, repo, filePath, ref }, 'Successfully fetched file via API fallback');
return json(fileContent);
}
} catch (apiErr) {
// Log the error but don't throw - we'll return a helpful error message below
logger.debug({ error: apiErr, npub, repo, filePath, ref }, 'API file fetch failed, will return 404');
}
// API fetch failed - repo is not cloned and API fetch didn't work
// Check if announcement has clone URLs to provide better error message
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const cloneUrls = extractCloneUrls(announcement);
const hasCloneUrls = cloneUrls.length > 0;
logger.debug({ npub, repo, filePath, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available');
return error(404, hasCloneUrls
? 'Repository is not cloned locally and could not fetch file via API. Privileged users can clone this repository using the "Clone to Server" button.'
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.');
} else {
return error(404, 'Repository announcement not found in Nostr');
}
} catch (err) {
logger.error({ error: err, npub, repo, filePath }, 'Error in on-demand file fetch');
// Check if repo was created by another concurrent request
if (existsSync(repoPath)) {
// Repo exists now, clear cache and continue with normal flow
repoCache.delete(RepoCache.repoExistsKey(npub, repo));
} else {
// If fetching fails, return 404
return error(404, 'Repository not found');
}
}
}
// Double-check repo exists (should be true if we got here)
if (!existsSync(repoPath)) {
return error(404, 'Repository not found');
}
// Get repo owner pubkey for access check (already validated above if we did on-demand fetch)
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// If ref is a branch name, validate it exists or use default branch
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
try {
const branches = await fileManager.getBranches(npub, repo);
if (!branches.includes(ref)) {
// Branch doesn't exist, try to get default branch
try {
ref = await fileManager.getDefaultBranch(npub, repo);
logger.debug({ npub, repo, originalRef: url.searchParams.get('ref'), newRef: ref }, 'Branch not found, using default branch');
} catch (defaultBranchErr) {
// If we can't get default branch, fall back to HEAD
logger.warn({ error: defaultBranchErr, npub, repo, ref }, 'Could not get default branch, falling back to HEAD');
ref = 'HEAD';
}
}
} catch (branchErr) {
// If we can't get branches, fall back to HEAD
logger.warn({ error: branchErr, npub, repo, ref }, 'Could not get branches, falling back to HEAD');
ref = 'HEAD';
}
}
// Check repository privacy (repoOwnerPubkey already declared above)
logger.debug({
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...',
repo
}, 'File endpoint - checking canView before access check');
const canView = await maintainerService.canView(userPubkeyHex || null, repoOwnerPubkey, repo);
logger.debug({
canView,
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...',
repo
}, 'File endpoint - canView result');
if (!canView) {
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'denied',
'Insufficient permissions'
);
return error(403, 'This repository is private. Only owners and maintainers can view it.');
}
try {
// Log what we're trying to do
logger.debug({ npub, repo, filePath, ref }, 'Attempting to read file from cloned repository');
let fileContent;
try {
fileContent = await fileManager.getFileContent(npub, repo, filePath, ref);
} catch (firstErr) {
// If the first attempt fails and ref is not HEAD, try with HEAD as fallback
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
logger.warn({
error: firstErr,
npub,
repo,
filePath,
originalRef: ref
}, 'Failed to read file with specified ref, trying HEAD as fallback');
try {
fileContent = await fileManager.getFileContent(npub, repo, filePath, 'HEAD');
ref = 'HEAD'; // Update ref for logging
} catch (headErr) {
// If HEAD also fails, try API fallback before throwing
logger.debug({ error: headErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
// Use the original ref, or 'main' as fallback
const apiRef = url.searchParams.get('ref') || 'main';
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef);
if (apiFileContent && apiFileContent.content) {
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(apiFileContent);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file');
}
// If API fallback also fails, throw the original error
throw firstErr;
}
} else {
// Try API fallback before throwing
logger.debug({ error: firstErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
const apiRef = ref === 'HEAD' ? 'main' : ref;
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef);
if (apiFileContent && apiFileContent.content) {
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(apiFileContent);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file');
}
throw firstErr;
}
}
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(fileContent);
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
const errorLower = errorMessage.toLowerCase();
const errorStack = err instanceof Error ? err.stack : undefined;
logger.error({
error: err,
errorStack,
npub,
repo,
filePath,
ref,
repoExists: existsSync(repoPath),
errorMessage
}, 'Error reading file from cloned repository');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'failure',
errorMessage
);
// If file not found or path doesn't exist, return 404 instead of 500
if (errorLower.includes('not found') ||
errorLower.includes('no such file') ||
errorLower.includes('does not exist') ||
errorLower.includes('fatal:') ||
errorMessage.includes('pathspec')) {
return error(404, `File not found: ${filePath} at ref ${ref}`);
}
// For other errors, return 500 with a more helpful message
return error(500, `Failed to read file: ${errorMessage}`);
}
} catch (err) {
// This catch block handles errors that occur outside the file reading try-catch
// (e.g., in branch validation, access checks, etc.)
// If it's already a Response (from error handlers), return it
if (err instanceof Response) {
return err;
}
// If it's a SvelteKit HttpError (from error() function), re-throw it
// SvelteKit errors have a status property and body property
if (err && typeof err === 'object' && 'status' in err && 'body' in err) {
throw err;
}
const errorMessage = err instanceof Error ? err.message : String(err);
const errorStack = err instanceof Error ? err.stack : undefined;
logger.error({
error: err,
errorStack,
npub,
repo,
filePath,
ref: url.searchParams.get('ref'),
errorMessage
}, 'Unexpected error in file endpoint (outside file reading block)');
// Check if it's a "not found" type error
const errorLower = errorMessage.toLowerCase();
if (errorLower.includes('not found') ||
errorLower.includes('repository not found')) {
return error(404, errorMessage);
}
return handleApiError(err, { operation: 'readFile', npub, repo, filePath }, 'Failed to read file');
}
};
export const POST: RequestHandler = async ({ params, url, request }: { params: { npub?: string; repo?: string }; url: URL; request: Request }) => {
const { npub, repo } = params;
if (!npub || !repo) {
return error(400, 'Missing npub or repo parameter');
}
let path: string | undefined;
try {
const body = await request.json();
path = body.path;
const { content, commitMessage, authorName, authorEmail, branch, action, userPubkey, useNIP07, nsecKey, commitSignatureEvent } = body;
// Check for NIP-98 authentication (for git operations)
const authHeader = request.headers.get('Authorization');
let nip98Event = null;
if (authHeader && authHeader.startsWith('Nostr ')) {
const requestUrl = `${request.headers.get('x-forwarded-proto') || (url.protocol === 'https:' ? 'https' : 'http')}://${request.headers.get('host') || url.host}${url.pathname}${url.search}`;
const authResult = verifyNIP98Auth(authHeader, requestUrl, request.method);
if (authResult.valid && authResult.event) {
nip98Event = authResult.event;
}
}
if (!path || !commitMessage) {
return error(400, 'Missing required fields: path, commitMessage');
}
// Fetch authorName and authorEmail from kind 0 event if not provided
let finalAuthorName = authorName;
let finalAuthorEmail = authorEmail;
if (!finalAuthorName || !finalAuthorEmail) {
if (!userPubkey) {
return error(400, 'Missing userPubkey. Cannot fetch author information without userPubkey.');
}
const userPubkeyHexForProfile = decodeNpubToHex(userPubkey) || userPubkey;
try {
if (!finalAuthorName) {
finalAuthorName = await fetchUserName(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS);
}
if (!finalAuthorEmail) {
finalAuthorEmail = await fetchUserEmail(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS);
}
} catch (err) {
logger.warn({ error: err, userPubkey }, 'Failed to fetch user profile for author info, using fallbacks');
// Use fallbacks if fetch fails
if (!finalAuthorName) {
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile);
finalAuthorName = npub.substring(0, 20);
}
if (!finalAuthorEmail) {
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile);
finalAuthorEmail = `${npub.substring(0, 20)}@gitrepublic.web`;
}
}
}
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
// Check if repo exists locally
if (!fileManager.repoExists(npub, repo)) {
// Try to fetch announcement to see if repo exists in Nostr
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Fetch repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
// Repository exists in Nostr but is not cloned locally
// For file editing, we need a local clone
return error(404, 'Repository is not cloned locally. To edit files, the repository must be cloned to the server first. Please use the "Clone to Server" button if you have unlimited access, or contact a server administrator.');
} else {
return error(404, 'Repository not found');
}
}
// Check if user is a maintainer
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Convert userPubkey to hex if needed
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, repoOwnerPubkey, repo);
if (!isMaintainer) {
return error(403, 'Only repository maintainers can edit files directly. Please submit a pull request instead.');
}
// Prepare signing options
// NOTE: nsecKey is intentionally NOT supported from client requests for security reasons.
// Clients should use NIP-07 (browser extension) or NIP-98 (HTTP auth) instead.
// nsecKey is only for server-side use via environment variables.
const signingOptions: {
useNIP07?: boolean;
nip98Event?: NostrEvent;
nsecKey?: string;
commitSignatureEvent?: NostrEvent;
} = {};
// If client sent a pre-signed commit signature event (from NIP-07), use it
if (commitSignatureEvent && commitSignatureEvent.sig && commitSignatureEvent.id) {
signingOptions.commitSignatureEvent = commitSignatureEvent;
} else if (nip98Event) {
signingOptions.nip98Event = nip98Event;
}
// Note: useNIP07 is no longer used since signing happens client-side
// Explicitly ignore nsecKey from client requests - it's a security risk
// Server-side signing is not recommended - commits should be signed by their authors
if (nsecKey) {
// Security: Log warning but never log the actual key value
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
logger.warn({ clientIp, npub, repo }, '[SECURITY] Client attempted to send nsecKey in request. This is not allowed for security reasons.');
auditLogger.log({
user: userPubkeyHex || undefined,
ip: clientIp,
action: 'auth_attempt',
resource: 'file_operation',
result: 'failure',
error: 'Client attempted to send private key in request body',
metadata: { reason: 'security_violation' }
});
}
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
if (action === 'delete') {
try {
// Get default branch if not provided
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo);
await fileManager.deleteFile(
npub,
repo,
path,
commitMessage,
finalAuthorName,
finalAuthorEmail,
targetBranch,
Object.keys(signingOptions).length > 0 ? signingOptions : undefined
);
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
'delete',
`${npub}/${repo}`,
path,
'success'
);
return json({ success: true, message: 'File deleted and committed' });
} catch (err) {
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
'delete',
`${npub}/${repo}`,
path,
'failure',
err instanceof Error ? err.message : String(err)
);
throw err;
}
} else if (action === 'create' || content !== undefined) {
if (content === undefined) {
return error(400, 'Content is required for create/update operations');
}
try {
// Get default branch if not provided
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo);
await fileManager.writeFile(
npub,
repo,
path,
content,
commitMessage,
finalAuthorName,
finalAuthorEmail,
targetBranch,
Object.keys(signingOptions).length > 0 ? signingOptions : undefined
);
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
action === 'create' ? 'create' : 'write',
`${npub}/${repo}`,
path,
'success'
);
return json({ success: true, message: 'File saved and committed' });
} catch (err) {
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
action === 'create' ? 'create' : 'write',
`${npub}/${repo}`,
path,
'failure',
err instanceof Error ? err.message : String(err)
);
throw err;
}
} else {
return error(400, 'Invalid action or missing content');
}
} catch (err) {
return handleApiError(err, { operation: 'writeFile', npub, repo, filePath: path }, 'Failed to write file');
}
};

1194
src/routes/api/repos/[npub]/[repo]/files/+server.ts

File diff suppressed because it is too large Load Diff

457
src/routes/api/repos/[npub]/[repo]/forks/+server.ts

@ -0,0 +1,457 @@ @@ -0,0 +1,457 @@
/**
* RESTful Forks Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/forks # List forks / Get fork info
* POST /api/repos/{npub}/{repo}/forks # Create fork (fork this repo)
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND, type NostrEvent } from '$lib/types/nostr.js';
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js';
import { nip19 } from 'nostr-tools';
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js';
import { existsSync } from 'fs';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import simpleGit from 'simple-git';
import { validateRepoPath } from '$lib/utils/security.js';
import { ResourceLimits } from '$lib/services/security/resource-limits.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js';
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import logger from '$lib/services/logger.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { repoManager, nostrClient, forkCountService } from '$lib/services/service-registry.js';
// Resolve GIT_REPO_ROOT to absolute path
const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos';
const repoRoot = resolve(repoRootEnv);
const resourceLimits = new ResourceLimits(repoRoot);
/**
* Retry publishing an event with exponential backoff
*/
async function publishEventWithRetry(
event: NostrEvent,
relays: string[],
eventName: string,
maxAttempts: number = 3,
context?: string
): Promise<{ success: string[]; failed: Array<{ relay: string; error: string }> }> {
let lastResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
const eventId = event.id.slice(0, 8);
const logContext = context || `[event:${eventId}]`;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
logger.info({ logContext, eventName, attempt, maxAttempts }, `[Fork] Publishing ${eventName} - Attempt ${attempt}/${maxAttempts}...`);
lastResult = await nostrClient.publishEvent(event, relays);
if (lastResult.success.length > 0) {
logger.info({ logContext, eventName, successCount: lastResult.success.length }, `[Fork] ${eventName} published successfully`);
return lastResult;
}
if (attempt < maxAttempts) {
const delayMs = Math.pow(2, attempt - 1) * 1000;
logger.warn({ logContext, eventName, attempt, delayMs }, `[Fork] ${eventName} failed on attempt ${attempt}. Retrying...`);
await new Promise(resolve => setTimeout(resolve, delayMs));
}
}
logger.error({ logContext, eventName, maxAttempts }, `[Fork] ${eventName} failed after ${maxAttempts} attempts`);
return lastResult!;
}
/**
* GET: Get fork information
* Returns whether this repo is a fork and original repo info
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
try {
// Get repo announcement (case-insensitive) with caching
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allAnnouncements, context.repo);
if (!announcement) {
return error(404, 'Repository announcement not found');
}
// Check if this is a fork
const isFork = announcement.tags.some(t => t[0] === 'fork');
// Get original repo reference
const originalRepoTag = announcement.tags.find(t => t[0] === 'fork');
let originalRepo: { npub: string; repo: string } | null = null;
if (originalRepoTag && originalRepoTag[1]) {
const match = originalRepoTag[1].match(new RegExp(`^${KIND.REPO_ANNOUNCEMENT}:([a-f0-9]{64}):(.+)$`));
if (match) {
const [, originalOwnerPubkey, originalRepoName] = match;
try {
const originalNpub = nip19.npubEncode(originalOwnerPubkey);
originalRepo = { npub: originalNpub, repo: originalRepoName };
} catch {
// Invalid pubkey
}
}
}
// Get fork count for this repo (if not a fork itself)
let forkCount = 0;
if (!isFork && context.repoOwnerPubkey && context.repo) {
try {
forkCount = await forkCountService.getForkCount(context.repoOwnerPubkey, context.repo);
} catch (err) {
logger.warn({ error: err, npub: context.npub, repo: context.repo }, 'Failed to get fork count');
}
}
return json({
isFork,
originalRepo,
forkCount
});
} catch (err) {
return handleApiError(err, { operation: 'getForkInfo', npub: context.npub, repo: context.repo }, 'Failed to get fork information');
}
},
{ operation: 'getForkInfo', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Create fork
* Body: { userPubkey, forkName?, localOnly? }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const body = await event.request.json();
const { userPubkey, forkName, localOnly } = body;
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
const isLocalOnly = localOnly === true;
const originalOwnerPubkey = context.repoOwnerPubkey;
// Decode user pubkey
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
const userNpub = nip19.npubEncode(userPubkeyHex);
// Determine fork name
const forkRepoName = forkName || context.repo;
// Check if user has unlimited access
const userLevel = getCachedUserLevel(userPubkeyHex);
if (!hasUnlimitedAccess(userLevel?.level)) {
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
'User does not have unlimited access'
);
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.');
}
// Check resource limits
const resourceCheck = await resourceLimits.canCreateRepo(userNpub);
if (!resourceCheck.allowed) {
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
resourceCheck.reason
);
return error(403, resourceCheck.reason || 'Resource limit exceeded');
}
// Check if original repo exists
const originalRepoPath = join(repoRoot, context.npub, `${context.repo}.git`);
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot);
if (!originalPathValidation.valid) {
return error(403, originalPathValidation.error || 'Invalid repository path');
}
if (!existsSync(originalRepoPath)) {
return error(404, 'Original repository not found');
}
// Get original repo announcement
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache);
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, context.repo);
if (!originalAnnouncement) {
return error(404, 'Original repository announcement not found');
}
// Check if fork already exists
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`);
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot);
if (!forkPathValidation.valid) {
return error(403, forkPathValidation.error || 'Invalid fork repository path');
}
if (existsSync(forkRepoPath)) {
return error(409, 'Fork already exists');
}
// Clone the repository
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'success'
);
const git = simpleGit();
await git.clone(originalRepoPath, forkRepoPath, ['--bare']);
// Invalidate resource limit cache
resourceLimits.invalidateCache(userNpub);
// Create fork announcement
const gitDomain = process.env.GIT_DOMAIN || 'localhost:6543';
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1');
const protocol = isLocalhost ? 'http' : 'https';
const forkGitUrl = `${protocol}://${gitDomain}/${userNpub}/${forkRepoName}.git`;
// Get Tor .onion URL if available
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js');
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName);
// Extract original clone URLs
const originalCloneUrls = originalAnnouncement.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string')
.filter(url => {
if (url.includes(gitDomain)) return false;
if (url.includes('.onion')) return false;
return true;
}) as string[];
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc');
const earliestCommit = earliestCommitTag?.[1];
// Get original repo name and description
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || context.repo;
const originalDescription = originalAnnouncement.tags.find(t => t[0] === 'description')?.[1] || '';
// Build clone URLs for fork
const forkCloneUrls: string[] = [];
if (!isLocalhost && !forkGitUrl.includes('localhost') && !forkGitUrl.includes('127.0.0.1')) {
forkCloneUrls.push(forkGitUrl);
}
if (torOnionUrl) {
forkCloneUrls.push(torOnionUrl);
}
forkCloneUrls.push(...originalCloneUrls);
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) {
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.');
}
// Preserve visibility and project-relay from original repo
const originalVisibility = getVisibility(originalAnnouncement);
const originalProjectRelays = getProjectRelays(originalAnnouncement);
// Build fork announcement tags
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${context.repo}`;
const tags: string[][] = [
['d', forkRepoName],
['name', `${originalName} (fork)`],
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`],
['clone', ...forkCloneUrls],
['relays', ...DEFAULT_NOSTR_RELAYS],
['fork', originalRepoTag],
['p', originalOwnerPubkey],
];
// Local-only forks are always private
if (isLocalOnly) {
tags.push(['visibility', 'private']);
tags.push(['local-only', 'true']);
} else {
if (originalVisibility !== 'public') {
tags.push(['visibility', originalVisibility]);
}
}
// Preserve project-relay tags
for (const relay of originalProjectRelays) {
tags.push(['project-relay', relay]);
}
// Add earliest unique commit if available
if (earliestCommit) {
tags.push(['r', earliestCommit, 'euc']);
}
// Create fork announcement event
const forkAnnouncementTemplate = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: '',
tags
};
// Sign fork announcement
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate);
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub;
const truncatedOriginalNpub = context.npub.length > 16 ? `${context.npub.slice(0, 12)}...` : context.npub;
const logContext = `[${truncatedOriginalNpub}/${context.repo}${truncatedNpub}/${forkRepoName}]`;
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let signedOwnershipEvent: NostrEvent | null = null;
if (isLocalOnly) {
// Local-only fork: Skip publishing to Nostr relays
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)');
publishResult = { success: [], failed: [] };
ownershipPublishResult = { success: [], failed: [] };
// Create synthetic ownership event
const ownershipService = new OwnershipTransferService([]);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
} else {
// Public fork: Publish to Nostr relays
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient);
const combinedRelays = combineRelays(outbox);
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length }, 'Starting fork process');
publishResult = await publishEventWithRetry(
signedForkAnnouncement,
combinedRelays,
'fork announcement',
3,
logContext
);
if (publishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`;
return json({
success: false,
error: 'Failed to publish fork announcement to relays after 3 attempts',
details: errorDetails,
eventName: 'fork announcement'
}, { status: 500 });
}
// Create and publish initial ownership proof
const ownershipService = new OwnershipTransferService(combinedRelays);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
ownershipPublishResult = await publishEventWithRetry(
signedOwnershipEvent,
combinedRelays,
'ownership transfer event',
3,
logContext
);
if (ownershipPublishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
// Publish deletion request (NIP-09)
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...');
const deletionRequest = {
kind: KIND.DELETION_REQUEST,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.',
tags: [
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`],
['k', KIND.REPO_ANNOUNCEMENT.toString()]
]
};
const signedDeletionRequest = await signEventWithNIP07(deletionRequest);
const deletionResult = await publishEventWithRetry(
signedDeletionRequest,
combinedRelays,
'deletion request',
3,
logContext
);
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`;
return json({
success: false,
error: 'Failed to publish ownership transfer event to relays after 3 attempts',
details: errorDetails,
eventName: 'ownership transfer event'
}, { status: 500 });
}
}
// Provision the fork repo
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...');
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false);
logger.info({
operation: 'fork',
originalRepo: `${context.npub}/${context.repo}`,
forkRepo: `${userNpub}/${forkRepoName}`,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
announcementRelays: publishResult?.success.length || 0,
ownershipRelays: ownershipPublishResult?.success.length || 0
}, 'Fork completed successfully');
const message = isLocalOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`;
return json({
success: true,
fork: {
npub: userNpub,
repo: forkRepoName,
url: forkGitUrl,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
publishedTo: isLocalOnly ? null : {
announcement: publishResult?.success.length || 0,
ownershipTransfer: ownershipPublishResult?.success.length || 0
}
},
message
});
} catch (err) {
return handleApiError(err, { operation: 'createFork', npub: context.npub, repo: context.repo }, 'Failed to fork repository');
}
},
{ operation: 'createFork', requireRepoExists: false }
);

371
src/routes/api/repos/[npub]/[repo]/maintainers/+server.ts

@ -1,31 +1,380 @@ @@ -1,31 +1,380 @@
/**
* API endpoint for checking maintainer status
* RESTful Maintainers Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/maintainers # List maintainers
* POST /api/repos/{npub}/{repo}/maintainers # Add maintainer
* DELETE /api/repos/{npub}/{repo}/maintainers/{npub} # Remove maintainer
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { maintainerService } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { nip19 } from 'nostr-tools';
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient } from '$lib/services/service-registry.js';
import { KIND } from '$lib/types/nostr.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import logger from '$lib/services/logger.js';
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js';
import { AnnouncementManager } from '$lib/services/git/announcement-manager.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
/**
* GET: List maintainers
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
const { maintainers, owner } = await maintainerService.getMaintainers(context.repoOwnerPubkey, context.repo);
// Convert hex pubkeys to npubs for response
const maintainerNpubs = maintainers.map(p => nip19.npubEncode(p));
const ownerNpub = nip19.npubEncode(owner);
// If userPubkey provided, check if they're a maintainer
// SECURITY: Do NOT leak userPubkey in response - only return boolean status
if (context.userPubkeyHex) {
const isMaintainer = maintainers.includes(context.userPubkeyHex);
return json({
maintainers,
owner,
maintainers: maintainerNpubs,
owner: ownerNpub,
isMaintainer
// SECURITY: Removed userPubkey leak - client already knows their own pubkey
});
}
return json({ maintainers, owner });
return json({
maintainers: maintainerNpubs,
owner: ownerNpub
});
},
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Add maintainer
* Body: { maintainer: "npub..." }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
if (!context.userPubkeyHex) {
return error(401, 'Authentication required');
}
// Verify user is owner or maintainer
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can add maintainers');
}
const body = await event.request.json();
const { maintainer } = body;
if (!maintainer) {
throw handleValidationError('Missing maintainer in request body', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Decode maintainer npub to hex
let maintainerHex: string;
// Try as hex first (most common case)
if (/^[0-9a-f]{64}$/i.test(maintainer)) {
maintainerHex = maintainer.toLowerCase();
} else {
// Try decoding as npub
try {
const decoded = nip19.decode(maintainer) as { type: string; data: unknown };
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') {
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
maintainerHex = decoded.data;
} catch (err) {
if (err instanceof Error && err.message.includes('Invalid maintainer format')) {
throw err;
}
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
}
// Get current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Get current maintainers
const { maintainers: currentMaintainers } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
// Check if already a maintainer
if (currentMaintainers.includes(maintainerHex)) {
return json({
success: true,
message: 'Maintainer already exists',
maintainer: maintainer
});
}
// Build updated tags
const tags: string[][] = [...announcement.tags];
// Remove existing maintainers tags
const maintainerTagIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'maintainers') {
maintainerTagIndices.push(index);
}
});
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) {
tags.splice(maintainerTagIndices[i], 1);
}
// Add all maintainers (including new one)
const allMaintainers = [...currentMaintainers, maintainerHex];
if (allMaintainers.length > 0) {
tags.push(['maintainers', ...allMaintainers]);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: context.userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign and publish
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])];
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
if (relaysToPublish.length > 0) {
await nostrClient.publishEvent(signedEvent, relaysToPublish);
}
// Save to repository
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err }, 'Failed to save maintainer update to repository');
}
return json({
success: true,
maintainer: maintainer,
message: 'Maintainer added successfully'
});
},
{ operation: 'addMaintainer', requireRepoExists: false }
);
/**
* DELETE: Remove maintainer
* Path: /api/repos/{npub}/{repo}/maintainers/{maintainerNpub}
*/
export const DELETE: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
if (!context.userPubkeyHex) {
return error(401, 'Authentication required');
}
// Get maintainer npub from path
const url = new URL(event.request.url);
const pathParts = url.pathname.split('/');
const maintainerNpub = pathParts[pathParts.length - 1]; // Last part of path
if (!maintainerNpub || maintainerNpub === 'maintainers') {
throw handleValidationError('Missing maintainer npub in path', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Verify user is owner or maintainer
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can remove maintainers');
}
// Decode maintainer npub to hex
let maintainerHex: string;
try {
const decoded = nip19.decode(maintainerNpub) as { type: string; data: unknown };
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') {
throw handleValidationError('Invalid maintainer npub format', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
maintainerHex = decoded.data;
} catch (err) {
if (err instanceof Error && err.message.includes('Invalid maintainer')) {
throw err;
}
throw handleValidationError('Invalid maintainer npub format', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Get current maintainers
const { maintainers: currentMaintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
// Cannot remove owner
if (maintainerHex === owner) {
return error(403, 'Cannot remove repository owner from maintainers');
}
// Check if maintainer exists
if (!currentMaintainers.includes(maintainerHex)) {
return json({
success: true,
message: 'Maintainer not found (may have already been removed)',
maintainer: maintainerNpub
});
}
// Get current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Build updated tags
const tags: string[][] = [...announcement.tags];
// Remove existing maintainers tags
const maintainerTagIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'maintainers') {
maintainerTagIndices.push(index);
}
});
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) {
tags.splice(maintainerTagIndices[i], 1);
}
// Add all maintainers except the one being removed
const remainingMaintainers = currentMaintainers.filter(m => m !== maintainerHex);
if (remainingMaintainers.length > 0) {
tags.push(['maintainers', ...remainingMaintainers]);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: context.userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign and publish
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])];
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
if (relaysToPublish.length > 0) {
await nostrClient.publishEvent(signedEvent, relaysToPublish);
}
// Save to repository
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err }, 'Failed to save maintainer update to repository');
}
return json({
success: true,
maintainer: maintainerNpub,
message: 'Maintainer removed successfully'
});
},
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false } // Maintainer list is public info, doesn't need repo to exist
{ operation: 'removeMaintainer', requireRepoExists: false }
);

23
src/routes/api/repos/[npub]/[repo]/patches/[patchId]/apply/+server.ts → src/routes/api/repos/[npub]/[repo]/patches/[id]/application/+server.ts

@ -1,9 +1,13 @@ @@ -1,9 +1,13 @@
/**
* API endpoint for applying patches
* Only maintainers and owners can apply patches
* RESTful Patch Application Endpoint
*
* POST /api/repos/{npub}/{repo}/patches/{id}/application
*
* Applies a patch to the repository. Only maintainers and owners can apply patches.
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
@ -19,7 +23,6 @@ import { writeFile, unlink } from 'fs/promises'; @@ -19,7 +23,6 @@ import { writeFile, unlink } from 'fs/promises';
import { tmpdir } from 'os';
import { join as pathJoin } from 'path';
import { spawn } from 'child_process';
import { promisify } from 'util';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
@ -27,12 +30,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -27,12 +30,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const { patchId } = event.params;
const id = (event.params as any).id;
const body = await event.request.json();
const { branch = 'main', commitMessage } = body;
if (!patchId) {
throw handleValidationError('Missing patchId', { operation: 'applyPatch', npub: repoContext.npub, repo: repoContext.repo });
if (!id) {
throw handleValidationError('Missing patch ID', { operation: 'applyPatch', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer or owner
@ -54,7 +57,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -54,7 +57,7 @@ export const POST: RequestHandler = withRepoValidation(
const patchEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PATCH],
ids: [patchId],
ids: [id],
limit: 1
}
]);
@ -71,7 +74,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -71,7 +74,7 @@ export const POST: RequestHandler = withRepoValidation(
}
// Create temporary patch file
const tmpPatchFile = pathJoin(tmpdir(), `patch-${patchId}-${Date.now()}.patch`);
const tmpPatchFile = pathJoin(tmpdir(), `patch-${id}-${Date.now()}.patch`);
await writeFile(tmpPatchFile, patchContent, 'utf-8');
try {
@ -132,7 +135,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -132,7 +135,7 @@ export const POST: RequestHandler = withRepoValidation(
await git.add('.');
// Commit the changes
const finalCommitMessage = commitMessage || `Apply patch ${patchId.substring(0, 8)}`;
const finalCommitMessage = commitMessage || `Apply patch ${id.substring(0, 8)}`;
await git.commit(finalCommitMessage);
// Get the commit hash
@ -152,7 +155,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -152,7 +155,7 @@ export const POST: RequestHandler = withRepoValidation(
}
}
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, patchId }, 'Error applying patch');
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, id }, 'Error applying patch');
throw err;
}
},

111
src/routes/api/repos/[npub]/[repo]/prs/merge/+server.ts

@ -1,111 +0,0 @@ @@ -1,111 +0,0 @@
/**
* API endpoint for merging Pull Requests
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { prsService, repoManager, fileManager, maintainerService } from '$lib/services/service-registry.js';
import { simpleGit } from 'simple-git';
import { join } from 'path';
import { existsSync } from 'fs';
import logger from '$lib/services/logger.js';
import { isValidBranchName } from '$lib/utils/security.js';
import { validatePubkey } from '$lib/utils/input-validation.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, prCommitId, targetBranch = 'main', mergeMessage } = body;
// Validate required fields
if (!prId || typeof prId !== 'string' || prId.length !== 64) {
throw handleValidationError('Invalid prId: must be a 64-character hex string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
if (!prAuthor || typeof prAuthor !== 'string') {
throw handleValidationError('Invalid prAuthor: must be a string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate pubkey format
const pubkeyValidation = validatePubkey(prAuthor);
if (!pubkeyValidation.valid) {
throw handleValidationError(`Invalid prAuthor: ${pubkeyValidation.error}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
if (!prCommitId || typeof prCommitId !== 'string' || prCommitId.length !== 40) {
throw handleValidationError('Invalid prCommitId: must be a 40-character commit hash', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate branch name
if (!isValidBranchName(targetBranch)) {
throw handleValidationError(`Invalid branch name: ${targetBranch}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate merge message if provided
if (mergeMessage && (typeof mergeMessage !== 'string' || mergeMessage.length > 10000)) {
throw handleValidationError('Invalid mergeMessage: must be a string with max 10000 characters', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can merge PRs'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
// Check if repo exists locally
const repoPath = join(repoRoot, repoContext.npub, `${repoContext.repo}.git`);
if (!existsSync(repoPath)) {
throw handleApiError(new Error('Repository not cloned locally. Please clone the repository first.'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
}
// Get user info for commit
const authorName = requestContext.userName || 'GitRepublic User';
const authorEmail = requestContext.userEmail || `${requestContext.userPubkeyHex?.slice(0, 20)}@gitrepublic.web`;
try {
const git = simpleGit(repoPath);
// Fetch latest changes
await git.fetch(['origin']).catch(() => {}); // Ignore errors if no remote
// Checkout target branch
await git.checkout(targetBranch);
// Merge the PR commit
const mergeMessageText = mergeMessage || `Merge pull request ${prId.slice(0, 7)}`;
await git.merge([prCommitId, '--no-ff', '-m', mergeMessageText]);
// Get the merge commit ID
const mergeCommitId = (await git.revparse(['HEAD'])).trim();
// Update PR status to merged
const statusEvent = await prsService.updatePRStatus(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
'merged',
mergeCommitId
);
return json({
success: true,
mergeCommitId,
statusEvent
});
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, prId, prCommitId }, 'Error merging PR');
throw handleApiError(err instanceof Error ? err : new Error('Failed to merge PR'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to merge pull request');
}
},
{ operation: 'mergePR', requireRepoAccess: true }
);

43
src/routes/api/repos/[npub]/[repo]/prs/update/+server.ts

@ -1,43 +0,0 @@ @@ -1,43 +0,0 @@
/**
* API endpoint for updating Pull Requests (kind 1619)
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { prsService } from '$lib/services/service-registry.js';
import { getGitUrl } from '$lib/config.js';
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, newCommitId, mergeBase } = body;
if (!prId || !prAuthor || !newCommitId) {
throw handleValidationError('Missing required fields: prId, prAuthor, newCommitId', { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Only PR author can update their PR
if (requestContext.userPubkeyHex !== prAuthor) {
throw handleApiError(new Error('Only the PR author can update the PR'), { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo);
const updateEvent = await prsService.updatePullRequest(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
newCommitId,
cloneUrl,
mergeBase
);
return json({ success: true, event: updateEvent });
},
{ operation: 'updatePR', requireRepoAccess: false }
);

56
src/routes/api/repos/[npub]/[repo]/prs/+server.ts → src/routes/api/repos/[npub]/[repo]/pull-requests/+server.ts

@ -1,5 +1,8 @@ @@ -1,5 +1,8 @@
/**
* API endpoint for Pull Requests (NIP-34 kind 1618)
* RESTful Pull Requests Collection Endpoint
*
* GET /api/repos/{npub}/{repo}/pull-requests # List pull requests
* POST /api/repos/{npub}/{repo}/pull-requests # Create pull request
*/
import { json } from '@sveltejs/kit';
@ -21,7 +24,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -21,7 +24,7 @@ export const GET: RequestHandler = createRepoGetHandler(
const prs = await prsService.getPullRequests(context.repoOwnerPubkey, context.repo);
return json(prs);
},
{ operation: 'getPRs', requireRepoExists: false, requireRepoAccess: false } // PRs are stored in Nostr, don't require local repo
{ operation: 'getPullRequests', requireRepoExists: false, requireRepoAccess: false } // PRs are stored in Nostr, don't require local repo
);
export const POST: RequestHandler = withRepoValidation(
@ -30,12 +33,12 @@ export const POST: RequestHandler = withRepoValidation( @@ -30,12 +33,12 @@ export const POST: RequestHandler = withRepoValidation(
const { event: prEvent } = body;
if (!prEvent) {
throw handleValidationError('Missing event in request body', { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo });
throw handleValidationError('Missing event in request body', { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify the event is properly signed
if (!prEvent.sig || !prEvent.id) {
throw handleValidationError('Invalid event: missing signature or ID', { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo });
throw handleValidationError('Invalid event: missing signature or ID', { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Get repository announcement to determine visibility and relay publishing
@ -51,7 +54,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -51,7 +54,7 @@ export const POST: RequestHandler = withRepoValidation(
: { success: [], failed: [] };
if (result.failed.length > 0 && result.success.length === 0) {
throw handleApiError(new Error('Failed to publish pull request to all relays'), { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish pull request to all relays');
throw handleApiError(new Error('Failed to publish pull request to all relays'), { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish pull request to all relays');
}
// Forward to messaging platforms if user has unlimited access and preferences configured
@ -65,46 +68,5 @@ export const POST: RequestHandler = withRepoValidation( @@ -65,46 +68,5 @@ export const POST: RequestHandler = withRepoValidation(
return json({ success: true, event: prEvent, published: result });
},
{ operation: 'createPR', requireRepoAccess: false } // PRs can be created by anyone with access
);
export const PATCH: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, status, mergeCommitId } = body;
if (!prId || !prAuthor || !status) {
throw handleValidationError('Missing required fields: prId, prAuthor, status', { operation: 'updatePRStatus', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can update PR status'), { operation: 'updatePRStatus', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
// Get repository announcement to determine visibility and relay publishing
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoContext.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repoContext.repo);
// Determine which relays to publish to based on visibility
const relaysToPublish = announcement ? getRelaysForEventPublishing(announcement) : DEFAULT_NOSTR_RELAYS;
// Update PR status with visibility-based relays
const statusEvent = await prsService.updatePRStatus(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
status,
mergeCommitId,
relaysToPublish
);
return json({ success: true, event: statusEvent });
},
{ operation: 'updatePRStatus', requireRepoAccess: false }
{ operation: 'createPullRequest', requireRepoAccess: false } // PRs can be created by anyone with access
);

136
src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/+server.ts

@ -0,0 +1,136 @@ @@ -0,0 +1,136 @@
/**
* RESTful Pull Request Individual Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/pull-requests/{id} # Get pull request
* PATCH /api/repos/{npub}/{repo}/pull-requests/{id} # Update pull request status
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { prsService, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler, withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { KIND } from '$lib/types/nostr.js';
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const id = (event.params as any).id;
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'getPullRequest', npub: context.npub, repo: context.repo });
}
try {
// Fetch the PR event
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Pull request not found');
}
return json(prEvents[0]);
} catch (err) {
throw handleApiError(err, { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Failed to get pull request');
}
},
{ operation: 'getPullRequest', requireRepoExists: false, requireRepoAccess: false }
);
export const PATCH: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const id = (event.params as any).id;
const body = await event.request.json();
const { status, mergeCommitId, newCommitId, mergeBase } = body;
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Fetch the PR to get the author
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
}
const prEvent = prEvents[0];
const prAuthor = prEvent.pubkey;
// If updating status, check if user is maintainer
if (status !== undefined) {
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can update PR status'), { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
if (!status) {
throw handleValidationError('Missing required field: status', { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo });
}
// Get repository announcement to determine visibility and relay publishing
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoContext.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repoContext.repo);
// Determine which relays to publish to based on visibility
const relaysToPublish = announcement ? getRelaysForEventPublishing(announcement) : DEFAULT_NOSTR_RELAYS;
// Update PR status with visibility-based relays
const statusEvent = await prsService.updatePRStatus(
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
status,
mergeCommitId,
relaysToPublish
);
return json({ success: true, event: statusEvent });
}
// If updating commit, only PR author can update
if (newCommitId !== undefined) {
if (requestContext.userPubkeyHex !== prAuthor) {
throw handleApiError(new Error('Only the PR author can update the PR commit'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const { getGitUrl } = await import('$lib/config.js');
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo);
const updateEvent = await prsService.updatePullRequest(
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
newCommitId,
cloneUrl,
mergeBase
);
return json({ success: true, event: updateEvent });
}
throw handleValidationError('Missing required field: status or newCommitId', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo });
},
{ operation: 'updatePullRequest', requireRepoAccess: false }
);

38
src/routes/api/repos/[npub]/[repo]/prs/[prId]/merge/+server.ts → src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/merge/+server.ts

@ -1,9 +1,13 @@ @@ -1,9 +1,13 @@
/**
* API endpoint for merging pull requests
* Only maintainers and owners can merge PRs
* RESTful Pull Request Merge Endpoint
*
* POST /api/repos/{npub}/{repo}/pull-requests/{id}/merge
*
* Merges a pull request. Only maintainers and owners can merge PRs.
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient, prsService } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
@ -23,12 +27,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -23,12 +27,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const { prId } = event.params;
const id = (event.params as any).id;
const body = await event.request.json();
const { targetBranch = 'main', mergeCommitMessage, mergeStrategy = 'merge' } = body;
if (!prId) {
throw handleValidationError('Missing prId', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer or owner
@ -36,13 +40,13 @@ export const POST: RequestHandler = withRepoValidation( @@ -36,13 +40,13 @@ export const POST: RequestHandler = withRepoValidation(
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can merge pull requests'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
throw handleApiError(new Error('Only repository owners and maintainers can merge pull requests'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const repoPath = join(repoRoot, repoContext.npub, `${repoContext.repo}.git`);
if (!existsSync(repoPath)) {
throw handleApiError(new Error('Repository not found locally'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
throw handleApiError(new Error('Repository not found locally'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
}
try {
@ -50,13 +54,13 @@ export const POST: RequestHandler = withRepoValidation( @@ -50,13 +54,13 @@ export const POST: RequestHandler = withRepoValidation(
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [prId],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
throw handleApiError(new Error('Pull request not found'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
}
const prEvent = prEvents[0];
@ -64,14 +68,14 @@ export const POST: RequestHandler = withRepoValidation( @@ -64,14 +68,14 @@ export const POST: RequestHandler = withRepoValidation(
// Get commit ID from PR
const commitTag = prEvent.tags.find(t => t[0] === 'c');
if (!commitTag || !commitTag[1]) {
throw handleApiError(new Error('Pull request does not have a commit ID'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Invalid pull request');
throw handleApiError(new Error('Pull request does not have a commit ID'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Invalid pull request');
}
const commitId = commitTag[1];
// Get branch name if available
const branchTag = prEvent.tags.find(t => t[0] === 'branch-name');
const sourceBranch = branchTag?.[1] || `pr-${prId.substring(0, 8)}`;
const sourceBranch = branchTag?.[1] || `pr-${id.substring(0, 8)}`;
const git = simpleGit(repoPath);
@ -89,7 +93,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -89,7 +93,7 @@ export const POST: RequestHandler = withRepoValidation(
try {
await git.show([commitId]);
} catch (showErr) {
throw handleApiError(new Error(`Commit ${commitId} not found in repository`), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Commit not found');
throw handleApiError(new Error(`Commit ${commitId} not found in repository`), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Commit not found');
}
let mergeCommitHash: string;
@ -99,7 +103,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -99,7 +103,7 @@ export const POST: RequestHandler = withRepoValidation(
await git.raw(['merge', '--squash', commitId]);
await git.add('.');
const finalMessage = mergeCommitMessage || `Merge PR ${prId.substring(0, 8)}\n\n${prEvent.content || ''}`;
const finalMessage = mergeCommitMessage || `Merge PR ${id.substring(0, 8)}\n\n${prEvent.content || ''}`;
await git.commit(finalMessage);
mergeCommitHash = (await git.revparse(['HEAD'])).trim();
@ -126,7 +130,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -126,7 +130,7 @@ export const POST: RequestHandler = withRepoValidation(
}
} else {
// Regular merge
const finalMessage = mergeCommitMessage || `Merge PR ${prId.substring(0, 8)}`;
const finalMessage = mergeCommitMessage || `Merge PR ${id.substring(0, 8)}`;
await git.merge([commitId, '-m', finalMessage]);
mergeCommitHash = (await git.revparse(['HEAD'])).trim();
}
@ -134,7 +138,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -134,7 +138,7 @@ export const POST: RequestHandler = withRepoValidation(
// Update PR status to merged
const prAuthor = prEvent.pubkey;
await prsService.updatePRStatus(
prId,
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
@ -148,9 +152,9 @@ export const POST: RequestHandler = withRepoValidation( @@ -148,9 +152,9 @@ export const POST: RequestHandler = withRepoValidation(
message: 'Pull request merged successfully'
});
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, prId }, 'Error merging pull request');
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, id }, 'Error merging pull request');
throw err;
}
},
{ operation: 'mergePR', requireRepoExists: true, requireRepoAccess: true }
{ operation: 'mergePullRequest', requireRepoExists: true, requireRepoAccess: true }
);

153
src/routes/api/repos/[npub]/[repo]/raw/+server.ts

@ -1,153 +0,0 @@ @@ -1,153 +0,0 @@
/**
* API endpoint for raw file access
*/
import type { RequestHandler } from './$types';
import { fileManager, repoManager } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { spawn } from 'child_process';
import { join } from 'path';
import { promisify } from 'util';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
// Check if a file extension is a binary image type
function isBinaryImage(ext: string): boolean {
const binaryImageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp', 'bmp', 'ico', 'apng', 'avif'];
return binaryImageExtensions.includes(ext.toLowerCase());
}
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const filePath = context.path || event.url.searchParams.get('path');
const ref = context.ref || event.url.searchParams.get('ref') || 'HEAD';
if (!filePath) {
throw handleValidationError('Missing path parameter', { operation: 'getRawFile', npub: context.npub, repo: context.repo });
}
// Determine content type based on file extension
const ext = filePath.split('.').pop()?.toLowerCase();
const contentTypeMap: Record<string, string> = {
'js': 'application/javascript',
'ts': 'application/typescript',
'json': 'application/json',
'css': 'text/css',
'html': 'text/html',
'xml': 'application/xml',
'svg': 'image/svg+xml',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'webp': 'image/webp',
'bmp': 'image/bmp',
'ico': 'image/x-icon',
'pdf': 'application/pdf',
'txt': 'text/plain',
'md': 'text/markdown',
'yml': 'text/yaml',
'yaml': 'text/yaml',
};
const contentType = contentTypeMap[ext || ''] || 'text/plain';
// For binary image files, use git cat-file to get raw binary data
if (ext && isBinaryImage(ext)) {
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// Get the blob hash for the file
return new Promise<Response>((resolve, reject) => {
// First, get the object hash using git ls-tree
const lsTreeProcess = spawn('git', ['ls-tree', ref, filePath], {
cwd: repoPath,
stdio: ['ignore', 'pipe', 'pipe']
});
let lsTreeOutput = '';
let lsTreeError = '';
lsTreeProcess.stdout.on('data', (data: Buffer) => {
lsTreeOutput += data.toString();
});
lsTreeProcess.stderr.on('data', (data: Buffer) => {
lsTreeError += data.toString();
});
lsTreeProcess.on('close', (code) => {
if (code !== 0) {
reject(new Error(`Failed to get file hash: ${lsTreeError || 'Unknown error'}`));
return;
}
// Parse the output: format is "mode type hash\tpath"
const match = lsTreeOutput.match(/^\d+\s+\w+\s+([a-f0-9]{40})\s+/);
if (!match) {
reject(new Error('Failed to parse file hash from git ls-tree output'));
return;
}
const blobHash = match[1];
// Now get the binary content using git cat-file
const catFileProcess = spawn('git', ['cat-file', 'blob', blobHash], {
cwd: repoPath,
stdio: ['ignore', 'pipe', 'pipe']
});
const chunks: Buffer[] = [];
let catFileError = '';
catFileProcess.stdout.on('data', (data: Buffer) => {
chunks.push(data);
});
catFileProcess.stderr.on('data', (data: Buffer) => {
catFileError += data.toString();
});
catFileProcess.on('close', (code) => {
if (code !== 0) {
reject(new Error(`Failed to get file content: ${catFileError || 'Unknown error'}`));
return;
}
const binaryContent = Buffer.concat(chunks);
resolve(new Response(binaryContent, {
headers: {
'Content-Type': contentType,
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`,
'Cache-Control': 'public, max-age=3600'
}
}));
});
catFileProcess.on('error', (err) => {
reject(new Error(`Failed to execute git cat-file: ${err.message}`));
});
});
lsTreeProcess.on('error', (err) => {
reject(new Error(`Failed to execute git ls-tree: ${err.message}`));
});
});
} else {
// For text files (including SVG), use the existing method
const fileData = await fileManager.getFileContent(context.npub, context.repo, filePath, ref);
return new Response(fileData.content, {
headers: {
'Content-Type': contentType,
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`,
'Cache-Control': 'public, max-age=3600'
}
});
}
},
{ operation: 'getRawFile' }
);

169
src/routes/api/repos/[npub]/[repo]/transfers/+server.ts

@ -0,0 +1,169 @@ @@ -0,0 +1,169 @@
/**
* RESTful Transfers Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/transfers # Get transfer history
* POST /api/repos/{npub}/{repo}/transfers # Transfer ownership
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError, handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { verifyEvent } from 'nostr-tools';
import { KIND } from '$lib/types/nostr.js';
import { ownershipTransferService, nostrClient, fileManager } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import { combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import logger from '$lib/services/logger.js';
/**
* GET: Get transfer history
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
try {
// Get current owner (may be different if transferred)
const currentOwner = await ownershipTransferService.getCurrentOwner(context.repoOwnerPubkey, context.repo);
// Fetch transfer events for history
const repoTag = `${KIND.REPO_ANNOUNCEMENT}:${context.repoOwnerPubkey}:${context.repo}`;
const transferEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.OWNERSHIP_TRANSFER],
'#a': [repoTag],
limit: 100
}
]);
// Sort by created_at descending
transferEvents.sort((a, b) => b.created_at - a.created_at);
return json({
originalOwner: context.repoOwnerPubkey,
currentOwner,
transferred: currentOwner !== context.repoOwnerPubkey,
transfers: transferEvents.map(event => {
const pTag = event.tags.find(t => t[0] === 'p');
return {
eventId: event.id,
from: event.pubkey,
to: pTag?.[1] || 'unknown',
timestamp: event.created_at,
createdAt: new Date(event.created_at * 1000).toISOString()
};
})
});
} catch (err) {
return handleApiError(err, { operation: 'getTransferHistory', npub: context.npub, repo: context.repo }, 'Failed to get transfer history');
}
},
{ operation: 'getTransferHistory', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Transfer ownership
* Body: { transferEvent }
*/
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
if (!requestContext.userPubkeyHex) {
throw handleApiError(new Error('Authentication required'), { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo }, 'Authentication required');
}
const body = await event.request.json();
const { transferEvent } = body;
if (!transferEvent) {
return handleValidationError('Missing transferEvent in request body', { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify the event is properly signed
if (!transferEvent.sig || !transferEvent.id) {
throw handleValidationError('Invalid event: missing signature or ID', { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
if (!verifyEvent(transferEvent)) {
throw handleValidationError('Invalid event signature', { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify user is the current owner
const canTransfer = await ownershipTransferService.canTransfer(
requestContext.userPubkeyHex,
repoContext.repoOwnerPubkey,
repoContext.repo
);
if (!canTransfer) {
throw handleAuthorizationError('Only the current repository owner can transfer ownership', { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify the transfer event is from the current owner
if (transferEvent.pubkey !== requestContext.userPubkeyHex) {
throw handleAuthorizationError('Transfer event must be signed by the current owner', { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify it's an ownership transfer event
if (transferEvent.kind !== KIND.OWNERSHIP_TRANSFER) {
throw handleValidationError(`Event must be kind ${KIND.OWNERSHIP_TRANSFER} (ownership transfer)`, { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify the 'a' tag references this repo
const aTag = transferEvent.tags.find(t => t[0] === 'a');
const expectedRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${repoContext.repoOwnerPubkey}:${repoContext.repo}`;
if (!aTag || aTag[1] !== expectedRepoTag) {
throw handleValidationError(`Transfer event must reference this repository: ${expectedRepoTag}`, { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Get user's relays and publish
const { outbox } = await getUserRelays(requestContext.userPubkeyHex, nostrClient);
const combinedRelays = combineRelays(outbox);
const result = await nostrClient.publishEvent(transferEvent as NostrEvent, combinedRelays);
if (result.success.length === 0) {
throw handleApiError(new Error('Failed to publish transfer event to any relays'), { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish transfer event to any relays');
}
// Save transfer event to repo (offline papertrail)
try {
// Save to repo if it exists locally
if (fileManager.repoExists(repoContext.npub, repoContext.repo)) {
const defaultBranch = await fileManager.getDefaultBranch(repoContext.npub, repoContext.repo).catch(() => 'main');
const repoPath = fileManager.getRepoPath(repoContext.npub, repoContext.repo);
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, repoContext.npub, repoContext.repo);
// Save to repo-events.jsonl
await fileManager.saveRepoEventToWorktree(workDir, transferEvent as NostrEvent, 'transfer').catch(err => {
logger.debug({ error: err }, 'Failed to save transfer event to repo-events.jsonl');
});
// Clean up worktree
await fileManager.removeWorktree(repoPath, workDir).catch(err => {
logger.debug({ error: err }, 'Failed to remove worktree after saving transfer event');
});
} else {
logger.debug({ npub: repoContext.npub, repo: repoContext.repo }, 'Repo does not exist locally, skipping transfer event save to repo');
}
} catch (err) {
logger.warn({ error: err, npub: repoContext.npub, repo: repoContext.repo }, 'Failed to save transfer event to repo');
}
// Clear cache so new owner is recognized immediately
ownershipTransferService.clearCache(repoContext.repoOwnerPubkey, repoContext.repo);
return json({
success: true,
event: transferEvent,
published: result,
message: 'Ownership transfer initiated successfully',
transferEvent: {
id: transferEvent.id,
from: transferEvent.pubkey,
to: aTag[2] || 'unknown'
}
});
}
);

327
src/routes/api/repos/[npub]/[repo]/tree/+server.ts

@ -1,327 +0,0 @@ @@ -1,327 +0,0 @@
/**
* API endpoint for listing files and directories in a repository
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError, handleNotFoundError } from '$lib/utils/error-handler.js';
import { KIND } from '$lib/types/nostr.js';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import logger from '$lib/services/logger.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
const repoRootEnv = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoRoot = resolve(repoRootEnv);
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// If repo doesn't exist, try to fetch it on-demand
if (!existsSync(repoPath)) {
try {
// Fetch repository announcement from Nostr (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
// Try API-based fetching first (no cloning)
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const { extractCloneUrls: extractCloneUrlsHelper } = await import('$lib/utils/nostr-utils.js');
const cloneUrlsForLogging = extractCloneUrlsHelper(announcement);
logger.debug({ npub: context.npub, repo: context.repo, cloneUrlCount: cloneUrlsForLogging.length, cloneUrls: cloneUrlsForLogging, path: context.path }, 'Attempting API fallback for tree');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files !== undefined) {
// Return empty array if no files (legitimate for empty repos)
// Only proceed if we have files to filter
if (apiData.files.length === 0) {
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, 'API fallback returned empty files array (repo may be empty)');
return json([]);
}
logger.debug({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback');
// Return API data directly without cloning
const path = context.path || '';
// Filter files by path if specified
let filteredFiles: typeof apiData.files;
if (path) {
// Normalize path: ensure it ends with / for directory matching
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
// Filter files that are directly in this directory (not in subdirectories)
filteredFiles = apiData.files.filter(f => {
// File must start with the normalized path
if (!f.path.startsWith(normalizedPath)) {
return false;
}
// Get the relative path after the directory prefix
const relativePath = f.path.slice(normalizedPath.length);
// If relative path is empty, skip (this would be the directory itself)
if (!relativePath) {
return false;
}
// Remove trailing slash from relative path for directories
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
// Check if it's directly in this directory (no additional / in the relative path)
// This works for both files (e.g., "icon.svg") and directories (e.g., "subfolder")
return !cleanRelativePath.includes('/');
});
} else {
// Root directory: show only files and directories in root
filteredFiles = apiData.files.filter(f => {
// Remove trailing slash for directories
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
// Include only items in root (single path segment)
return pathParts.length === 1;
});
}
// Normalize type: API returns 'dir' but frontend expects 'directory'
// Also update name to be just the filename/dirname for display
const normalizedFiles = filteredFiles.map(f => {
// Extract display name from path
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
// API fetch failed - repo is not cloned and API fetch didn't work
// Check if announcement has clone URLs to provide better error message
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const cloneUrls = extractCloneUrls(announcement);
const hasCloneUrls = cloneUrls.length > 0;
logger.debug({ npub: context.npub, repo: context.repo, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available');
throw handleNotFoundError(
hasCloneUrls
? 'Repository is not cloned locally and could not be fetched via API. Privileged users can clone this repository using the "Clone to Server" button.'
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
} else {
throw handleNotFoundError(
'Repository announcement not found in Nostr',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
} catch (err) {
// Check if repo was created by another concurrent request
if (existsSync(repoPath)) {
// Repo exists now, clear cache and continue with normal flow
repoCache.delete(RepoCache.repoExistsKey(context.npub, context.repo));
} else {
// If fetching fails, return 404
throw handleNotFoundError(
'Repository not found',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
}
}
// Double-check repo exists (should be true if we got here)
if (!existsSync(repoPath)) {
throw handleNotFoundError(
'Repository not found',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
// Get default branch if no ref specified
let ref = context.ref || 'HEAD';
// If ref is a branch name, validate it exists or use default branch
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
try {
const branches = await fileManager.getBranches(context.npub, context.repo);
if (!branches.includes(ref)) {
// Branch doesn't exist, use default branch
ref = await fileManager.getDefaultBranch(context.npub, context.repo);
}
} catch {
// If we can't get branches, fall back to HEAD
ref = 'HEAD';
}
}
const path = context.path || '';
try {
const files = await fileManager.listFiles(context.npub, context.repo, ref, path);
// If repo exists but has no files (empty repo), try API fallback
if (files.length === 0) {
logger.debug({ npub: context.npub, repo: context.repo, path, ref }, 'Repo exists but is empty, attempting API fallback for tree');
try {
// Fetch repository announcement for API fallback
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files && apiData.files.length > 0) {
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback for empty repo');
// Filter files by path if specified (same logic as above)
let filteredFiles: typeof apiData.files;
if (path) {
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
filteredFiles = apiData.files.filter(f => {
if (!f.path.startsWith(normalizedPath)) {
return false;
}
const relativePath = f.path.slice(normalizedPath.length);
if (!relativePath) {
return false;
}
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
return !cleanRelativePath.includes('/');
});
} else {
filteredFiles = apiData.files.filter(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
return pathParts.length === 1;
});
}
// Normalize type and name
const normalizedFiles = filteredFiles.map(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed for empty repo, returning empty files');
}
}
// Debug logging to help diagnose missing files
logger.debug({
npub: context.npub,
repo: context.repo,
path,
ref,
fileCount: files.length,
files: files.map(f => ({ name: f.name, path: f.path, type: f.type }))
}, '[Tree] Returning files from fileManager.listFiles');
return json(files);
} catch (err) {
// If error occurs, try API fallback before giving up
logger.debug({ error: err, npub: context.npub, repo: context.repo }, '[Tree] Error listing files, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files && apiData.files.length > 0) {
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback after error');
// Filter and normalize files (same logic as above)
const path = context.path || '';
let filteredFiles: typeof apiData.files;
if (path) {
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
filteredFiles = apiData.files.filter(f => {
if (!f.path.startsWith(normalizedPath)) return false;
const relativePath = f.path.slice(normalizedPath.length);
if (!relativePath) return false;
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
return !cleanRelativePath.includes('/');
});
} else {
filteredFiles = apiData.files.filter(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
return cleanPath.split('/').length === 1;
});
}
const normalizedFiles = filteredFiles.map(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed after error');
}
// Log the actual error for debugging
logger.error({ error: err, npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Error listing files');
// For optional paths (like "docs"), return empty array instead of 404
// This allows components to gracefully handle missing directories
const optionalPaths = ['docs'];
if (context.path && optionalPaths.includes(context.path.toLowerCase())) {
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Optional path not found, returning empty array');
return json([]);
}
// Check if it's a "not found" error for the repo itself
if (err instanceof Error && (err.message.includes('Repository not found') || err.message.includes('not cloned'))) {
throw handleNotFoundError(
err.message,
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
// For other errors with optional paths, return empty array
if (context.path && optionalPaths.includes(context.path.toLowerCase())) {
return json([]);
}
// Otherwise, it's a server error
throw handleApiError(
err,
{ operation: 'listFiles', npub: context.npub, repo: context.repo },
'Failed to list files'
);
}
},
{ operation: 'listFiles', requireRepoExists: false, requireRepoAccess: false } // Tree listing should be publicly accessible for public repos
);

329
src/routes/api/repos/[npub]/[repo]/verification/+server.ts

@ -0,0 +1,329 @@ @@ -0,0 +1,329 @@
/**
* RESTful Verification Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/verification # Get verification status
* POST /api/repos/{npub}/{repo}/verification # Save announcement to repo
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { fileManager } from '$lib/services/service-registry.js';
import { verifyRepositoryOwnership } from '$lib/services/nostr/repo-verification.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import { nostrClient } from '$lib/services/service-registry.js';
import { existsSync } from 'fs';
import { join } from 'path';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { AnnouncementManager } from '$lib/services/git/announcement-manager.js';
import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js';
import simpleGit from 'simple-git';
import logger from '$lib/services/logger.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const announcementManager = new AnnouncementManager(repoRoot);
/**
* GET: Get verification status
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
// Fetch the repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
return json({
verified: false,
error: 'Repository announcement not found',
message: 'Could not find a NIP-34 repository announcement for this repository.'
});
}
// Extract clone URLs from announcement
const cloneUrls: string[] = [];
for (const tag of announcement.tags) {
if (tag[0] === 'clone') {
for (let i = 1; i < tag.length; i++) {
const url = tag[i];
if (url && typeof url === 'string') {
cloneUrls.push(url);
}
}
}
}
// Verify ownership for each clone separately
const cloneVerifications: Array<{ url: string; verified: boolean; ownerPubkey: string | null; error?: string }> = [];
// First, verify the local GitRepublic clone (if it exists)
let localVerified = false;
let localOwner: string | null = null;
let localError: string | undefined;
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
const repoExists = existsSync(repoPath);
if (repoExists) {
// Repo is cloned - verify the announcement file matches
try {
localOwner = await fileManager.getCurrentOwnerFromRepo(context.npub, context.repo);
if (localOwner) {
try {
const repoEventsFile = await fileManager.getFileContent(context.npub, context.repo, 'nostr/repo-events.jsonl', 'HEAD');
const lines = repoEventsFile.content.trim().split('\n').filter(Boolean);
let repoAnnouncement: NostrEvent | null = null;
let latestTimestamp = 0;
for (const line of lines) {
try {
const entry = JSON.parse(line);
if (entry.type === 'announcement' && entry.event && entry.timestamp) {
if (entry.timestamp > latestTimestamp) {
latestTimestamp = entry.timestamp;
repoAnnouncement = entry.event;
}
}
} catch {
continue;
}
}
if (repoAnnouncement) {
const verification = verifyRepositoryOwnership(announcement, JSON.stringify(repoAnnouncement));
localVerified = verification.valid;
if (!verification.valid) {
localError = verification.error;
}
} else {
localVerified = false;
localError = 'No announcement found in nostr/repo-events.jsonl';
}
} catch (err) {
localVerified = false;
localError = 'Announcement file not found in repository';
}
} else {
localVerified = false;
localError = 'No announcement found in repository';
}
} catch (err) {
localVerified = false;
localError = err instanceof Error ? err.message : 'Failed to verify local clone';
}
} else {
// Repo is not cloned yet - verify from Nostr announcement alone
if (announcement.pubkey === context.repoOwnerPubkey) {
localVerified = true;
localOwner = context.repoOwnerPubkey;
localError = undefined;
} else {
localVerified = false;
localOwner = announcement.pubkey;
localError = 'Announcement pubkey does not match repository owner';
}
}
// Add local clone verification
const localUrl = cloneUrls.find(url => url.includes(context.npub) || url.includes(context.repoOwnerPubkey));
if (localUrl) {
cloneVerifications.push({
url: localUrl,
verified: localVerified,
ownerPubkey: localOwner,
error: localError
});
}
// Overall verification: at least one clone must be verified
const overallVerified = cloneVerifications.some(cv => cv.verified);
const verifiedClones = cloneVerifications.filter(cv => cv.verified);
const currentOwner = localOwner || context.repoOwnerPubkey;
if (overallVerified) {
return json({
verified: true,
announcementId: announcement.id,
ownerPubkey: currentOwner,
verificationMethod: 'announcement-file',
cloneVerifications: cloneVerifications.map(cv => ({
url: cv.url,
verified: cv.verified,
ownerPubkey: cv.ownerPubkey,
error: cv.error
})),
message: `Repository ownership verified successfully for ${verifiedClones.length} clone(s)`
});
} else {
return json({
verified: false,
error: localError || 'Repository ownership verification failed',
announcementId: announcement.id,
verificationMethod: 'announcement-file',
cloneVerifications: cloneVerifications.map(cv => ({
url: cv.url,
verified: cv.verified,
ownerPubkey: cv.ownerPubkey,
error: cv.error
})),
message: 'Repository ownership verification failed for all clones'
});
}
},
{ operation: 'getVerification', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Save announcement to repo
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const requestContext = extractRequestContext(event);
const userPubkeyHex = requestContext.userPubkeyHex;
if (!userPubkeyHex) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
// Check if user is a maintainer or the repository owner
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, context.repoOwnerPubkey, context.repo);
const isOwner = userPubkeyHex === context.repoOwnerPubkey;
if (!isMaintainer && !isOwner) {
return error(403, 'Only repository owners and maintainers can save announcements.');
}
// Check if repository is cloned
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
if (!existsSync(repoPath)) {
return error(404, 'Repository is not cloned locally. Please clone the repository first.');
}
// Fetch the repository announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
return error(404, 'Repository announcement not found');
}
try {
// Check if repository has any commits
const git = simpleGit(repoPath);
let hasCommits = false;
let defaultBranch = process.env.DEFAULT_BRANCH || 'master';
try {
const commitCount = await git.raw(['rev-list', '--count', '--all']);
hasCommits = parseInt(commitCount.trim(), 10) > 0;
} catch {
hasCommits = false;
}
if (hasCommits) {
try {
defaultBranch = await fileManager.getDefaultBranch(context.npub, context.repo);
} catch {
defaultBranch = process.env.DEFAULT_BRANCH || 'master';
}
}
// Get worktree for the default branch
logger.info({ npub: context.npub, repo: context.repo, branch: defaultBranch, hasCommits }, 'Getting worktree for announcement commit');
const worktreePath = await fileManager.getWorktree(repoPath, defaultBranch, context.npub, context.repo);
// Check if announcement already exists
const hasAnnouncement = await announcementManager.hasAnnouncementInRepo(worktreePath, announcement.id);
if (hasAnnouncement) {
logger.debug({ npub: context.npub, repo: context.repo, eventId: announcement.id }, 'Announcement already exists, updating anyway');
}
// Save announcement to worktree
const saved = await announcementManager.saveRepoEventToWorktree(worktreePath, announcement, 'announcement', false);
if (!saved) {
return error(500, 'Failed to save announcement to repository');
}
// Stage the file
const workGit = simpleGit(worktreePath);
await workGit.add('nostr/repo-events.jsonl');
// Get author info
let authorName = await fetchUserName(userPubkeyHex, requestContext.userPubkey || '', DEFAULT_NOSTR_RELAYS);
let authorEmail = await fetchUserEmail(userPubkeyHex, requestContext.userPubkey || '', DEFAULT_NOSTR_RELAYS);
if (!authorName) {
const { nip19 } = await import('nostr-tools');
const npub = requestContext.userPubkey || nip19.npubEncode(userPubkeyHex);
authorName = npub.substring(0, 20);
}
if (!authorEmail) {
const { nip19 } = await import('nostr-tools');
const npub = requestContext.userPubkey || nip19.npubEncode(userPubkeyHex);
authorEmail = `${npub.substring(0, 20)}@gitrepublic.web`;
}
// Commit the announcement
const commitMessage = `Verify repository ownership by committing repo announcement event\n\nEvent ID: ${announcement.id}`;
// For empty repositories, ensure the branch is set up in the worktree
if (!hasCommits) {
try {
const currentBranch = await workGit.revparse(['--abbrev-ref', 'HEAD']).catch(() => null);
if (!currentBranch || currentBranch === 'HEAD') {
logger.debug({ npub: context.npub, repo: context.repo, branch: defaultBranch }, 'Creating orphan branch in worktree');
await workGit.raw(['checkout', '--orphan', defaultBranch]);
} else if (currentBranch !== defaultBranch) {
logger.debug({ npub: context.npub, repo: context.repo, currentBranch, targetBranch: defaultBranch }, 'Switching to target branch in worktree');
await workGit.checkout(defaultBranch);
}
} catch (branchErr) {
logger.warn({ error: branchErr, npub: context.npub, repo: context.repo, branch: defaultBranch }, 'Branch setup in worktree failed, attempting commit anyway');
}
}
logger.info({ npub: context.npub, repo: context.repo, branch: defaultBranch, hasCommits }, 'Committing announcement file');
await workGit.commit(commitMessage, ['nostr/repo-events.jsonl'], {
'--author': `${authorName} <${authorEmail}>`
});
// Verify commit was created
const commitHash = await workGit.revparse(['HEAD']).catch(() => null);
if (!commitHash) {
throw new Error('Commit was created but HEAD is not pointing to a valid commit');
}
logger.info({ npub: context.npub, repo: context.repo, commitHash, branch: defaultBranch }, 'Announcement committed successfully');
// Push to default branch (if there's a remote)
try {
await workGit.push('origin', defaultBranch);
} catch (pushErr) {
logger.debug({ error: pushErr, npub: context.npub, repo: context.repo }, 'Push failed (may not have remote)');
}
// Clean up worktree
await fileManager.removeWorktree(repoPath, worktreePath);
return json({
success: true,
message: 'Repository announcement committed successfully. Verification should update shortly.',
announcementId: announcement.id
});
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo }, 'Failed to commit announcement for verification');
return handleApiError(err, { operation: 'saveAnnouncement', npub: context.npub, repo: context.repo }, 'Failed to commit announcement');
}
},
{ operation: 'saveAnnouncement', requireRepoExists: true, requireRepoAccess: true }
);

328
src/routes/api/search/+server.ts

@ -7,7 +7,7 @@ import { json } from '@sveltejs/kit'; @@ -7,7 +7,7 @@ import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS, DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { KIND } from '$lib/types/nostr.js';
import type { NostrEvent, NostrFilter } from '$lib/types/nostr.js';
import { nip19 } from 'nostr-tools';
@ -19,6 +19,11 @@ import { eventCache } from '$lib/services/nostr/event-cache.js'; @@ -19,6 +19,11 @@ import { eventCache } from '$lib/services/nostr/event-cache.js';
import { decodeNostrAddress } from '$lib/services/nostr/nip19-utils.js';
import logger from '$lib/services/logger.js';
import { isParameterizedReplaceable } from '$lib/utils/nostr-event-utils.js';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
import { simpleGit } from 'simple-git';
import { fileManager } from '$lib/services/service-registry.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
@ -46,7 +51,9 @@ function getDeduplicationKey(event: NostrEvent): string { @@ -46,7 +51,9 @@ function getDeduplicationKey(event: NostrEvent): string {
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const limit = parseInt(event.url.searchParams.get('limit') || '20', 10);
const type = event.url.searchParams.get('type') || 'repos'; // Default to repos search
const limit = parseInt(event.url.searchParams.get('limit') || (type === 'code' ? '100' : '20'), 10);
const repoFilter = event.url.searchParams.get('repo'); // For code search: filter by specific repo (npub/repo format)
// Extract user pubkey for privacy filtering
const requestContext = extractRequestContext(event);
@ -60,6 +67,13 @@ export const GET: RequestHandler = async (event) => { @@ -60,6 +67,13 @@ export const GET: RequestHandler = async (event) => {
return handleValidationError('Query must be at least 2 characters', { operation: 'search', query });
}
// If type is 'code', delegate to code search logic
if (type === 'code') {
return handleCodeSearch(event, query, limit, repoFilter, requestContext);
}
// Otherwise, continue with repository search (type === 'repos' or default)
try {
// Collect all available relays - prioritize DEFAULT_NOSTR_SEARCH_RELAYS
const allRelays = new Set<string>();
@ -634,3 +648,313 @@ function filterRepos( @@ -634,3 +648,313 @@ function filterRepos(
return false;
});
}
/**
* Handle code search (type=code)
*/
async function handleCodeSearch(
event: { url: URL; request: Request },
query: string,
limit: number,
repoFilter: string | null,
requestContext: ReturnType<typeof extractRequestContext>
) {
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
interface CodeSearchResult {
repo: string;
npub: string;
file: string;
line: number;
content: string;
branch: string;
}
const results: CodeSearchResult[] = [];
try {
// If repo filter is specified, search only that repo
if (repoFilter) {
const [npub, repo] = repoFilter.split('/');
if (npub && repo) {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (existsSync(repoPath)) {
const repoResults = await searchInRepoForCode(npub, repo, query, limit);
results.push(...repoResults);
}
}
return json(results);
}
// Search across all repositories
// First, get list of all repos from filesystem
if (!existsSync(repoRoot)) {
return json([]);
}
const users = await readdir(repoRoot);
for (const user of users) {
const userPath = join(repoRoot, user);
const userStat = await stat(userPath);
if (!userStat.isDirectory()) {
continue;
}
const repos = await readdir(userPath);
for (const repo of repos) {
if (!repo.endsWith('.git')) {
continue;
}
const repoName = repo.replace(/\.git$/, '');
const repoPath = join(userPath, repo);
const repoStat = await stat(repoPath);
if (!repoStat.isDirectory()) {
continue;
}
// Check access for private repos
try {
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
// Decode npub to hex
let repoOwnerPubkey: string;
try {
const decoded = nip19.decode(user);
if (decoded.type === 'npub') {
repoOwnerPubkey = decoded.data as string;
} else {
repoOwnerPubkey = user; // Assume it's already hex
}
} catch {
repoOwnerPubkey = user; // Assume it's already hex
}
const canView = await maintainerService.canView(
requestContext.userPubkeyHex || null,
repoOwnerPubkey,
repoName
);
if (!canView) {
continue; // Skip private repos user can't access
}
} catch (accessErr) {
logger.debug({ error: accessErr, user, repo: repoName }, 'Error checking access, skipping repo');
continue;
}
// Search in this repo
try {
const repoResults = await searchInRepoForCode(user, repoName, query, limit - results.length);
results.push(...repoResults);
if (results.length >= limit) {
break;
}
} catch (searchErr) {
logger.debug({ error: searchErr, user, repo: repoName }, 'Error searching repo, continuing');
continue;
}
}
if (results.length >= limit) {
break;
}
}
return json(results.slice(0, limit));
} catch (err) {
logger.error({ error: err, query }, 'Error performing code search');
throw handleApiError(err, { operation: 'codeSearch' }, 'Failed to perform code search');
}
}
async function searchInRepoForCode(
npub: string,
repo: string,
query: string,
limit: number
): Promise<Array<{ repo: string; npub: string; file: string; line: number; content: string; branch: string }>> {
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (!existsSync(repoPath)) {
return [];
}
const results: Array<{ repo: string; npub: string; file: string; line: number; content: string; branch: string }> = [];
const git = simpleGit(repoPath);
try {
// Get default branch
let branch = 'HEAD';
try {
const branches = await git.branchLocal();
branch = branches.current || 'HEAD';
// If no current branch, try common defaults
if (!branch || branch === 'HEAD') {
const allBranches = branches.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
branch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
branch = 'main';
}
// For bare repositories, we need to use a worktree or search the index
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
actualBranch = 'main';
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, npub, repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub, repo, branch }, 'Could not create worktree, trying git grep with tree reference');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
repo,
npub,
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep with tree reference
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with tree reference for bare repos
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
repo,
npub,
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
}
} catch (err) {
logger.debug({ error: err, npub, repo, query }, 'Error searching in repo');
return [];
}
return results;
}

Loading…
Cancel
Save