From 9743c0a2fceac3607a6e1f32343ca6e8d97e8c4a Mon Sep 17 00:00:00 2001 From: Silberengel Date: Sat, 21 Feb 2026 22:22:12 +0100 Subject: [PATCH] bug-fixes Nostr-Signature: 6d8832125b76095b2e7ed57b71e26a6c05d9b19a14dfa76724c71f392147fe95 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc 6ddebfa995b5b3f469db5f3cdbd7d13fa2307d7988c2667479015d6bc2ff442be357ee97e51340a944eb34fed73522db3930016d343810927486bdbcabddae5c --- nostr/commit-signatures.jsonl | 1 + src/lib/services/git/repo-manager.ts | 148 +++++++++- src/lib/styles/repo.css | 146 +++++++++- src/lib/utils/nostr-utils.ts | 87 +++++- src/routes/api/git/[...path]/+server.ts | 15 +- .../repos/[npub]/[repo]/branches/+server.ts | 25 +- .../api/repos/[npub]/[repo]/clone/+server.ts | 73 +++-- .../repos/[npub]/[repo]/commits/+server.ts | 18 +- .../repos/[npub]/[repo]/download/+server.ts | 13 +- .../api/repos/[npub]/[repo]/file/+server.ts | 32 +-- .../api/repos/[npub]/[repo]/fork/+server.ts | 38 +-- .../api/repos/[npub]/[repo]/readme/+server.ts | 19 +- .../api/repos/[npub]/[repo]/tree/+server.ts | 18 +- .../repos/[npub]/[repo]/validate/+server.ts | 17 +- .../api/repos/[npub]/[repo]/verify/+server.ts | 18 +- src/routes/api/repos/local/+server.ts | 19 +- src/routes/api/transfers/pending/+server.ts | 39 ++- src/routes/repos/+page.svelte | 266 +++++++++++++++--- src/routes/repos/[npub]/[repo]/+page.svelte | 81 ++++-- src/routes/repos/[npub]/[repo]/+page.ts | 37 +-- 20 files changed, 840 insertions(+), 270 deletions(-) diff --git a/nostr/commit-signatures.jsonl b/nostr/commit-signatures.jsonl index bcdba17..253737b 100644 --- a/nostr/commit-signatures.jsonl +++ b/nostr/commit-signatures.jsonl @@ -45,3 +45,4 @@ {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771688902,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor"]],"content":"Signed commit: refactor","id":"62b813f817173c9e35eb05088240f7ec50ecab697c8c6d4a5c19d47664ef3837","sig":"ca9c70fc7bf8b1bb1726461bb843127d1bddc4de96652cfc7497698a3f5c4dc4a8c3f5a7a240710db77afabeee2a3b7d594f75f42a0a8b28aeeef50f66b506c9"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771690183,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","get rid of tabs on repo page"]],"content":"Signed commit: get rid of tabs on repo page","id":"d34fb23385a23f479c683e76f5676356a11d63bcd0ecf71d25f1b85dbb0cfe57","sig":"1f6454f9961b9245d1e32f4a903ee9636201670491145d0185e95e7b7d33bf1027ac5b8e370070640e103740ab19e9915baa7755c6008fd32fe41e9cb86d33b8"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771691277,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix docs"]],"content":"Signed commit: fix docs","id":"4671648712f19537cbf0fd00cf19e254eae4a1ac9c1274ea396e62dac193b88c","sig":"49a3e89e312ec4caebfeacdaade3e4cc6d027ab9c50d8e6aa1998f120a81d8d51235ae397df6e42b9efca4147497b8881731dda6d58fee7d28d2ac07cec295ec"} +{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771705699,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"59d0c409196dccb8109a29829002df69dbca43c5e95c1fdc1e7baa0b88ee5927","sig":"af8726a86e30c64b098ad13946d5bc84cb08d5ea8b75f08641c03fbdd8b9c91683e8091b206159dde2239ea8964cb3589bcb4ec2892541d2980f186a0fb09af9"} diff --git a/src/lib/services/git/repo-manager.ts b/src/lib/services/git/repo-manager.ts index 0e89f85..3090474 100644 --- a/src/lib/services/git/repo-manager.ts +++ b/src/lib/services/git/repo-manager.ts @@ -3,7 +3,7 @@ * Handles repo provisioning, syncing, and NIP-34 integration */ -import { existsSync, mkdirSync, statSync } from 'fs'; +import { existsSync, mkdirSync, statSync, accessSync, constants } from 'fs'; import { join } from 'path'; import { readdir, readFile } from 'fs/promises'; import { spawn } from 'child_process'; @@ -16,6 +16,14 @@ import { shouldUseTor, getTorProxy } from '../../utils/tor.js'; import { sanitizeError } from '../../utils/security.js'; import { isPrivateRepo as checkIsPrivateRepo } from '../../utils/repo-privacy.js'; import { extractCloneUrls } from '../../utils/nostr-utils.js'; +/** + * Check if a URL is a GRASP (Git Repository Access via Secure Protocol) URL + * GRASP URLs contain npub (Nostr public key) in the path: https://host/npub.../repo.git + */ +function isGraspUrl(url: string): boolean { + // GRASP URLs have npub (starts with npub1) in the path + return /\/npub1[a-z0-9]+/i.test(url); +} /** * Execute git command with custom environment variables safely @@ -293,6 +301,33 @@ Your commits will all be signed by your Nostr keys and saved to the event files * Supports GitHub tokens via GITHUB_TOKEN environment variable * Returns the original URL if no token is needed or available */ + /** + * Convert SSH URL to HTTPS URL if possible + * e.g., git@github.com:user/repo.git -> https://github.com/user/repo.git + */ + private convertSshToHttps(url: string): string | null { + // Check if it's an SSH URL (git@host:path or ssh://) + const sshMatch = url.match(/^git@([^:]+):(.+)$/); + if (sshMatch) { + const [, host, path] = sshMatch; + // Remove .git suffix if present, we'll add it back + const cleanPath = path.replace(/\.git$/, ''); + return `https://${host}/${cleanPath}.git`; + } + + // Check for ssh:// URLs + if (url.startsWith('ssh://')) { + const sshUrlMatch = url.match(/^ssh:\/\/([^/]+)\/(.+)$/); + if (sshUrlMatch) { + const [, host, path] = sshUrlMatch; + const cleanPath = path.replace(/\.git$/, ''); + return `https://${host}/${cleanPath}.git`; + } + } + + return null; + } + private injectAuthToken(url: string): string { try { const urlObj = new URL(url); @@ -633,7 +668,7 @@ Your commits will all be signed by your Nostr keys and saved to the event files npub: string, repoName: string, announcementEvent?: NostrEvent - ): Promise<{ success: boolean; needsAnnouncement?: boolean; announcement?: NostrEvent }> { + ): Promise<{ success: boolean; needsAnnouncement?: boolean; announcement?: NostrEvent; error?: string; cloneUrls?: string[]; remoteUrls?: string[] }> { const repoPath = join(this.repoRoot, npub, `${repoName}.git`); // If repo already exists, check if it has an announcement @@ -702,17 +737,54 @@ Your commits will all be signed by your Nostr keys and saved to the event files try { - // Filter out localhost URLs and our own domain (we want external sources) - const externalUrls = cloneUrls.filter(url => { + // Filter and convert URLs: + // 1. Skip SSH URLs (git@... or ssh://) - convert to HTTPS when possible + // 2. Filter out localhost and our own domain + // 3. Prioritize HTTPS non-GRASP URLs, then GRASP URLs + const httpsUrls: string[] = []; + const sshUrls: string[] = []; + + for (const url of cloneUrls) { const lowerUrl = url.toLowerCase(); - return !lowerUrl.includes('localhost') && - !lowerUrl.includes('127.0.0.1') && - !url.includes(this.domain); - }); + + // Skip localhost and our own domain + if (lowerUrl.includes('localhost') || + lowerUrl.includes('127.0.0.1') || + url.includes(this.domain)) { + continue; + } + + // Check if it's an SSH URL + if (url.startsWith('git@') || url.startsWith('ssh://')) { + sshUrls.push(url); + // Try to convert to HTTPS + const httpsUrl = this.convertSshToHttps(url); + if (httpsUrl) { + httpsUrls.push(httpsUrl); + } + } else { + // It's already HTTPS/HTTP + httpsUrls.push(url); + } + } + + // Separate HTTPS URLs into non-GRASP and GRASP + const nonGraspHttpsUrls = httpsUrls.filter(url => !isGraspUrl(url)); + const graspHttpsUrls = httpsUrls.filter(url => isGraspUrl(url)); + + // Prioritize: non-GRASP HTTPS, then GRASP HTTPS, then converted SSH->HTTPS, finally SSH (if no HTTPS available) + remoteUrls = [...nonGraspHttpsUrls, ...graspHttpsUrls]; + + // If no HTTPS URLs, try SSH URLs (but log a warning) + if (remoteUrls.length === 0 && sshUrls.length > 0) { + logger.warn({ npub, repoName, sshUrls }, 'No HTTPS URLs available, attempting SSH URLs (may fail without SSH keys configured)'); + remoteUrls = sshUrls; + } // If no external URLs, try any URL that's not our domain - remoteUrls = externalUrls.length > 0 ? externalUrls : - cloneUrls.filter(url => !url.includes(this.domain)); + if (remoteUrls.length === 0) { + remoteUrls = cloneUrls.filter(url => !url.includes(this.domain)); + } // If still no remote URLs, but there are *any* clone URLs, try the first one // This handles cases where the only clone URL is our own domain, but the repo doesn't exist locally yet @@ -728,10 +800,52 @@ Your commits will all be signed by your Nostr keys and saved to the event files logger.debug({ npub, repoName, cloneUrls, remoteUrls, isPublic }, 'On-demand fetch details'); + // Check if repoRoot exists and is writable + if (!existsSync(this.repoRoot)) { + try { + mkdirSync(this.repoRoot, { recursive: true }); + logger.info({ repoRoot: this.repoRoot }, 'Created repos root directory'); + } catch (err) { + const error = err instanceof Error ? err : new Error(String(err)); + logger.error({ + repoRoot: this.repoRoot, + error: error.message + }, 'Failed to create repos root directory'); + throw new Error(`Cannot create repos root directory at ${this.repoRoot}. Please check permissions: ${error.message}`); + } + } else { + // Check if repoRoot is writable + try { + accessSync(this.repoRoot, constants.W_OK); + } catch (err) { + const error = err instanceof Error ? err : new Error(String(err)); + logger.error({ + repoRoot: this.repoRoot, + error: error.message + }, 'Repos root directory is not writable'); + throw new Error(`Repos root directory at ${this.repoRoot} is not writable. Please fix permissions (e.g., chmod 755 ${this.repoRoot} or chown to the correct user).`); + } + } + // Create directory structure const repoDir = join(this.repoRoot, npub); if (!existsSync(repoDir)) { - mkdirSync(repoDir, { recursive: true }); + try { + mkdirSync(repoDir, { recursive: true }); + } catch (err) { + const error = err instanceof Error ? err : new Error(String(err)); + if (error.message.includes('EACCES') || error.message.includes('permission denied')) { + logger.error({ + npub, + repoName, + repoDir, + repoRoot: this.repoRoot, + error: error.message + }, 'Permission denied when creating repository directory'); + throw new Error(`Permission denied: Cannot create repository directory at ${repoDir}. Please check that the server has write permissions to ${this.repoRoot}.`); + } + throw error; + } } // Try to clone from the first available remote URL @@ -816,15 +930,23 @@ Your commits will all be signed by your Nostr keys and saved to the event files return { success: true, announcement: announcementEvent }; } catch (error) { const sanitizedError = sanitizeError(error); + const errorMessage = error instanceof Error ? error.message : String(error); logger.error({ error: sanitizedError, npub, repoName, cloneUrls, isPublic, - remoteUrls + remoteUrls, + errorMessage }, 'Failed to fetch repository on-demand'); - return { success: false, needsAnnouncement: false }; + return { + success: false, + needsAnnouncement: false, + error: errorMessage, + cloneUrls, + remoteUrls + }; } } diff --git a/src/lib/styles/repo.css b/src/lib/styles/repo.css index 19b43c9..244662d 100644 --- a/src/lib/styles/repo.css +++ b/src/lib/styles/repo.css @@ -1512,6 +1512,21 @@ .readme-content.markdown { padding: 1.5rem; + line-height: 1.7; + font-size: 1rem; + color: var(--text-primary, #1a1a1a); +} + +.readme-content.markdown :global(*) { + line-height: inherit; +} + +.readme-content.markdown :global(*:first-child) { + margin-top: 0; +} + +.readme-content.markdown :global(*:last-child) { + margin-bottom: 0; } .readme-content.markdown :global(h1), @@ -1523,11 +1538,22 @@ margin-top: 1.5rem; margin-bottom: 0.75rem; color: var(--text-primary); + line-height: 1.4; +} + +.readme-content.markdown :global(h1:first-child), +.readme-content.markdown :global(h2:first-child), +.readme-content.markdown :global(h3:first-child), +.readme-content.markdown :global(h4:first-child), +.readme-content.markdown :global(h5:first-child), +.readme-content.markdown :global(h6:first-child) { + margin-top: 0; } .readme-content.markdown :global(p) { + margin-top: 0; margin-bottom: 1rem; - line-height: 1.6; + line-height: 1.7; } .readme-content.markdown :global(code) { @@ -1536,6 +1562,7 @@ border-radius: 3px; font-family: 'IBM Plex Mono', monospace; font-size: 0.9em; + line-height: 1.5; } .readme-content.markdown :global(pre) { @@ -1545,6 +1572,7 @@ overflow-x: auto; border: 1px solid var(--border-light); margin: 1rem 0; + line-height: 1.5; } .readme-content.markdown :global(pre code) { @@ -1552,6 +1580,122 @@ padding: 0; } +.readme-content.markdown :global(ul), +.readme-content.markdown :global(ol) { + margin: 1rem 0; + padding-left: 2rem; + line-height: 1.7; +} + +.readme-content.markdown :global(li) { + margin: 0.5rem 0; + line-height: 1.7; +} + +.readme-content.markdown :global(blockquote) { + border-left: 4px solid var(--border-color, #e0e0e0); + padding-left: 1rem; + margin: 1rem 0; + color: var(--text-secondary, #666); + font-style: italic; +} + +.readme-content.markdown :global(a) { + color: var(--accent, #007bff); + text-decoration: none; +} + +.readme-content.markdown :global(a:hover) { + text-decoration: underline; +} + +.readme-content.markdown :global(img) { + max-width: 100%; + height: auto; + border-radius: 4px; + margin: 1rem 0; +} + +.readme-content.markdown :global(table) { + width: 100%; + border-collapse: collapse; + margin: 1rem 0; +} + +.readme-content.markdown :global(th), +.readme-content.markdown :global(td) { + border: 1px solid var(--border-color, #e0e0e0); + padding: 0.5rem; + text-align: left; +} + +.readme-content.markdown :global(th) { + background: var(--bg-secondary, #f5f5f5); + font-weight: 600; +} + +.readme-content.markdown :global(hr) { + border: none; + border-top: 1px solid var(--border-color, #e0e0e0); + margin: 2rem 0; +} + +.readme-content.markdown :global(strong), +.readme-content.markdown :global(b) { + font-weight: 600; + color: var(--text-primary, #1a1a1a); +} + +.readme-content.markdown :global(em), +.readme-content.markdown :global(i) { + font-style: italic; +} + +.readme-content.markdown :global(h1) { + font-size: 2rem; + font-weight: 700; + margin-top: 2rem; + margin-bottom: 1rem; + border-bottom: 2px solid var(--border-color, #e0e0e0); + padding-bottom: 0.5rem; +} + +.readme-content.markdown :global(h2) { + font-size: 1.5rem; + font-weight: 600; + margin-top: 1.5rem; + margin-bottom: 0.75rem; +} + +.readme-content.markdown :global(h3) { + font-size: 1.25rem; + font-weight: 600; + margin-top: 1.25rem; + margin-bottom: 0.5rem; +} + +.readme-content.markdown :global(h4) { + font-size: 1.1rem; + font-weight: 600; + margin-top: 1rem; + margin-bottom: 0.5rem; +} + +.readme-content.markdown :global(h5) { + font-size: 1rem; + font-weight: 600; + margin-top: 0.75rem; + margin-bottom: 0.5rem; +} + +.readme-content.markdown :global(h6) { + font-size: 0.9rem; + font-weight: 600; + margin-top: 0.75rem; + margin-bottom: 0.5rem; + color: var(--text-secondary, #666); +} + .readme-content :global(.hljs) { background: var(--bg-secondary); padding: 1rem; diff --git a/src/lib/utils/nostr-utils.ts b/src/lib/utils/nostr-utils.ts index 71d2419..3253356 100644 --- a/src/lib/utils/nostr-utils.ts +++ b/src/lib/utils/nostr-utils.ts @@ -3,7 +3,8 @@ * Used across web-app, CLI, and API to ensure consistency */ -import type { NostrEvent } from '../types/nostr.js'; +import type { NostrEvent, NostrFilter } from '../types/nostr.js'; +import { KIND } from '../types/nostr.js'; /** * Extract clone URLs from a NIP-34 repo announcement event @@ -70,3 +71,87 @@ export function normalizeCloneUrl(url: string): string { return url; } + +/** + * Fetch repository announcements by author with caching (case-insensitive) + * This helper function provides consistent caching behavior across all endpoints + * + * @param nostrClient - The Nostr client to use for fetching + * @param authorPubkey - The author's pubkey (hex) + * @param eventCache - The event cache instance (optional, will import if not provided) + * @returns Promise resolving to all announcements by the author + */ +export async function fetchRepoAnnouncementsWithCache( + nostrClient: { fetchEvents: (filters: NostrFilter[]) => Promise }, + authorPubkey: string, + eventCache?: { get: (filters: NostrFilter[]) => NostrEvent[] | null; set: (filters: NostrFilter[], events: NostrEvent[]) => void } | null +): Promise { + const filters: NostrFilter[] = [ + { + kinds: [KIND.REPO_ANNOUNCEMENT], + authors: [authorPubkey], + limit: 100 // Fetch more to allow case-insensitive filtering + } + ]; + + // Lazy import eventCache if not provided (for server-side usage) + let cache = eventCache; + if (!cache) { + try { + const cacheModule = await import('../services/nostr/event-cache.js'); + cache = cacheModule.eventCache; + } catch { + // Cache not available, skip caching + cache = null; + } + } + + // Check cache first + if (cache) { + const cachedEvents = cache.get(filters); + if (cachedEvents && cachedEvents.length > 0) { + // Return cached events immediately, fetch fresh in background + nostrClient.fetchEvents(filters).then(freshEvents => { + // Merge fresh events with cached ones (deduplicate by event ID) + const eventMap = new Map(); + cachedEvents.forEach(e => eventMap.set(e.id, e)); + freshEvents.forEach(e => { + const existing = eventMap.get(e.id); + if (!existing || e.created_at > existing.created_at) { + eventMap.set(e.id, e); + } + }); + const mergedEvents = Array.from(eventMap.values()); + cache!.set(filters, mergedEvents); + }).catch(() => { + // Ignore background fetch errors + }); + + return cachedEvents; + } + } + + // No cache, fetch from relays + const freshEvents = await nostrClient.fetchEvents(filters); + // Cache the results + if (cache && freshEvents.length > 0) { + cache.set(filters, freshEvents); + } + return freshEvents; +} + +/** + * Find a repository announcement by repo name (case-insensitive) + * + * @param events - Array of announcement events + * @param repoName - The repository name to find + * @returns The matching announcement event or null + */ +export function findRepoAnnouncement(events: NostrEvent[], repoName: string): NostrEvent | null { + const repoLower = repoName.toLowerCase(); + const matching = events.filter(event => { + const dTag = event.tags.find((t: string[]) => t[0] === 'd')?.[1]; + return dTag && dTag.toLowerCase() === repoLower; + }); + return matching.length > 0 ? matching[0] : null; +} diff --git a/src/routes/api/git/[...path]/+server.ts b/src/routes/api/git/[...path]/+server.ts index d93e8cb..f355c4e 100644 --- a/src/routes/api/git/[...path]/+server.ts +++ b/src/routes/api/git/[...path]/+server.ts @@ -21,7 +21,8 @@ import { BranchProtectionService } from '$lib/services/nostr/branch-protection-s import logger from '$lib/services/logger.js'; import { auditLogger } from '$lib/services/security/audit-logger.js'; import { isValidBranchName, sanitizeError } from '$lib/utils/security.js'; -import { extractCloneUrls } from '$lib/utils/nostr-utils.js'; +import { extractCloneUrls, fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; // Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths) const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos'; @@ -113,14 +114,10 @@ async function getRepoAnnouncement(npub: string, repoName: string): Promise 0 ? events[0] : null; } catch { diff --git a/src/routes/api/repos/[npub]/[repo]/branches/+server.ts b/src/routes/api/repos/[npub]/[repo]/branches/+server.ts index 6f24f7c..3997e0e 100644 --- a/src/routes/api/repos/[npub]/[repo]/branches/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/branches/+server.ts @@ -16,6 +16,7 @@ import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js'; import { NostrClient } from '$lib/services/nostr/nostr-client.js'; import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; import logger from '$lib/services/logger.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -29,30 +30,22 @@ export const GET: RequestHandler = createRepoGetHandler( // If repo doesn't exist, try to fetch it on-demand if (!existsSync(repoPath)) { try { - // Try cached client first (cache-first lookup) - const filters = [ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]; - - let events = await nostrClient.fetchEvents(filters); + // Fetch repository announcement (case-insensitive) with caching + let allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + let announcement = findRepoAnnouncement(allEvents, context.repo); // If no events found in cache/default relays, try all relays (default + search) - // But first invalidate the cache entry so we don't get the same cached empty result - if (events.length === 0) { + if (!announcement) { const allRelays = [...new Set([...DEFAULT_NOSTR_RELAYS, ...DEFAULT_NOSTR_SEARCH_RELAYS])]; // Only create new client if we have additional relays to try if (allRelays.length > DEFAULT_NOSTR_RELAYS.length) { - // Invalidate the cache entry so we can try fresh with all relays - eventCache.invalidate(filters); const allRelaysClient = new NostrClient(allRelays); - events = await allRelaysClient.fetchEvents(filters); + allEvents = await fetchRepoAnnouncementsWithCache(allRelaysClient, context.repoOwnerPubkey, eventCache); + announcement = findRepoAnnouncement(allEvents, context.repo); } } + + const events = announcement ? [announcement] : []; if (events.length > 0) { // Try API-based fetching first (no cloning) diff --git a/src/routes/api/repos/[npub]/[repo]/clone/+server.ts b/src/routes/api/repos/[npub]/[repo]/clone/+server.ts index f417648..babc7e7 100644 --- a/src/routes/api/repos/[npub]/[repo]/clone/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/clone/+server.ts @@ -20,8 +20,13 @@ import { handleApiError, handleValidationError } from '$lib/utils/error-handler. import { verifyRelayWriteProofFromAuth, verifyRelayWriteProof } from '$lib/services/nostr/relay-write-proof.js'; import { verifyEvent } from 'nostr-tools'; import type { NostrEvent } from '$lib/types/nostr.js'; +import { resolve } from 'path'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; -const repoRoot = process.env.GIT_REPO_ROOT || '/repos'; +// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths) +const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos'; +const repoRoot = resolve(repoRootEnv); const repoManager = new RepoManager(repoRoot); const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS); @@ -166,13 +171,7 @@ export const POST: RequestHandler = async (event) => { let authorAnnouncements: NostrEvent[]; try { - authorAnnouncements = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [repoOwnerPubkey], - limit: 100 // Fetch more to ensure we find the repo even if author has many repos - } - ]); + authorAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); logger.debug({ npub, @@ -194,14 +193,10 @@ export const POST: RequestHandler = async (event) => { ); } - // Filter case-insensitively to find the matching repo - const repoLower = repo.toLowerCase(); - const events = authorAnnouncements.filter(event => { - const dTag = event.tags.find(t => t[0] === 'd')?.[1]; - return dTag && dTag.toLowerCase() === repoLower; - }); + // Find the matching repo announcement (case-insensitive) + const announcementEvent = findRepoAnnouncement(authorAnnouncements, repo); - if (events.length === 0) { + if (!announcementEvent) { const dTags = authorAnnouncements .map(e => e.tags.find(t => t[0] === 'd')?.[1]) .filter(Boolean); @@ -224,8 +219,26 @@ export const POST: RequestHandler = async (event) => { { operation: 'cloneRepo', npub, repo } ); } - - const announcementEvent = events[0]; + + // Extract and log clone URLs for debugging + const cloneUrls: string[] = []; + for (const tag of announcementEvent.tags) { + if (tag[0] === 'clone') { + for (let i = 1; i < tag.length; i++) { + const url = tag[i]; + if (url && typeof url === 'string') { + cloneUrls.push(url); + } + } + } + } + + logger.debug({ + npub, + repo, + cloneUrlCount: cloneUrls.length, + cloneUrls: cloneUrls.slice(0, 5) // Log first 5 to avoid huge logs + }, 'Repository announcement clone URLs'); // Attempt to clone the repository const result = await repoManager.fetchRepoOnDemand(npub, repo, announcementEvent); @@ -237,10 +250,32 @@ export const POST: RequestHandler = async (event) => { { operation: 'cloneRepo', npub, repo } ); } + + // Build detailed error message + let errorMessage = 'Could not clone repository.'; + if (result.error) { + errorMessage += ` ${result.error}`; + } + if (result.cloneUrls && result.cloneUrls.length === 0) { + errorMessage += ' No clone URLs found in the repository announcement.'; + } else if (result.remoteUrls && result.remoteUrls.length === 0) { + errorMessage += ' No accessible remote clone URLs found.'; + } else if (result.cloneUrls && result.cloneUrls.length > 0) { + errorMessage += ` Attempted to clone from: ${result.cloneUrls.join(', ')}`; + } + + logger.error({ + npub, + repo, + error: result.error, + cloneUrls: result.cloneUrls, + remoteUrls: result.remoteUrls + }, 'Failed to clone repository'); + throw handleApiError( - new Error('Failed to clone repository from remote URLs'), + new Error(result.error || 'Failed to clone repository from remote URLs'), { operation: 'cloneRepo', npub, repo }, - 'Could not clone repository. Please check that the repository has valid clone URLs and is accessible.' + errorMessage ); } diff --git a/src/routes/api/repos/[npub]/[repo]/commits/+server.ts b/src/routes/api/repos/[npub]/[repo]/commits/+server.ts index e16d22d..7b1ffec 100644 --- a/src/routes/api/repos/[npub]/[repo]/commits/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/commits/+server.ts @@ -13,6 +13,8 @@ import { join } from 'path'; import { existsSync } from 'fs'; import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; import logger from '$lib/services/logger.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT @@ -25,20 +27,14 @@ export const GET: RequestHandler = createRepoGetHandler( // If repo doesn't exist, try to fetch it on-demand if (!existsSync(repoPath)) { try { - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]); + // Fetch repository announcement from Nostr (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, context.repo); - if (events.length > 0) { + if (announcement) { // Try API-based fetching first (no cloning) const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); - const apiData = await tryApiFetch(events[0], context.npub, context.repo); + const apiData = await tryApiFetch(announcement, context.npub, context.repo); if (apiData && apiData.commits) { // Return API data directly without cloning diff --git a/src/routes/api/repos/[npub]/[repo]/download/+server.ts b/src/routes/api/repos/[npub]/[repo]/download/+server.ts index 4bd7345..5a34afe 100644 --- a/src/routes/api/repos/[npub]/[repo]/download/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/download/+server.ts @@ -29,15 +29,10 @@ export const GET: RequestHandler = createRepoGetHandler( // If repo doesn't exist, try to fetch it on-demand if (!existsSync(repoPath)) { try { - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]); + // Fetch repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, context.repo); + const events = announcement ? [announcement] : []; if (events.length > 0) { // Download requires the actual repo files, so we can't use API fetching diff --git a/src/routes/api/repos/[npub]/[repo]/file/+server.ts b/src/routes/api/repos/[npub]/[repo]/file/+server.ts index 94fb1f7..b001854 100644 --- a/src/routes/api/repos/[npub]/[repo]/file/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/file/+server.ts @@ -21,6 +21,8 @@ import { existsSync } from 'fs'; import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; import { extractRequestContext } from '$lib/utils/api-context.js'; import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT @@ -66,21 +68,15 @@ export const GET: RequestHandler = async (event) => { return error(400, 'Invalid npub format'); } - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [repoOwnerPubkey], - '#d': [repo], - limit: 1 - } - ]); + // Fetch repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, repo); - if (events.length > 0) { + if (announcement) { // Try API-based fetching first (no cloning) try { const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js'); - const fileContent = await tryApiFetchFile(events[0], npub, repo, filePath, ref); + const fileContent = await tryApiFetchFile(announcement, npub, repo, filePath, ref); if (fileContent && fileContent.content) { return json(fileContent); @@ -357,17 +353,11 @@ export const POST: RequestHandler = async ({ params, url, request }: { params: { return error(400, 'Invalid npub format'); } - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [repoOwnerPubkey], - '#d': [repo], - limit: 1 - } - ]); + // Fetch repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, repo); - if (events.length > 0) { + if (announcement) { // Repository exists in Nostr but is not cloned locally // For file editing, we need a local clone return error(404, 'Repository is not cloned locally. To edit files, the repository must be cloned to the server first. Please use the "Clone to Server" button if you have unlimited access, or contact a server administrator.'); diff --git a/src/routes/api/repos/[npub]/[repo]/fork/+server.ts b/src/routes/api/repos/[npub]/[repo]/fork/+server.ts index 7c6b231..7aa7675 100644 --- a/src/routes/api/repos/[npub]/[repo]/fork/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/fork/+server.ts @@ -25,8 +25,12 @@ import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js'; import { hasUnlimitedAccess } from '$lib/utils/user-access.js'; import logger from '$lib/services/logger.js'; import { handleApiError, handleValidationError, handleNotFoundError, handleAuthorizationError } from '$lib/utils/error-handler.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; -const repoRoot = process.env.GIT_REPO_ROOT || '/repos'; +// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths) +const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos'; +const repoRoot = resolve(repoRootEnv); const repoManager = new RepoManager(repoRoot); const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS); const resourceLimits = new ResourceLimits(repoRoot); @@ -150,22 +154,14 @@ export const POST: RequestHandler = async ({ params, request }) => { return error(404, 'Original repository not found'); } - // Get original repo announcement - const originalAnnouncements = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [originalOwnerPubkey], - '#d': [repo], - limit: 1 - } - ]); + // Get original repo announcement (case-insensitive) with caching + const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache); + const originalAnnouncement = findRepoAnnouncement(allAnnouncements, repo); - if (originalAnnouncements.length === 0) { + if (!originalAnnouncement) { return error(404, 'Original repository announcement not found'); } - const originalAnnouncement = originalAnnouncements[0]; - // Check if fork already exists const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`); // Security: Ensure resolved path is within repoRoot @@ -450,21 +446,15 @@ export const GET: RequestHandler = async ({ params }) => { return error(400, 'Invalid npub format'); } - // Get repo announcement - const announcements = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [ownerPubkey], - '#d': [repo], - limit: 1 - } - ]); + // Get repo announcement (case-insensitive) with caching + const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, ownerPubkey, eventCache); + const announcement = findRepoAnnouncement(allAnnouncements, repo); - if (announcements.length === 0) { + if (!announcement) { return error(404, 'Repository announcement not found'); } - const announcement = announcements[0]; + // announcement is already set above const isFork = announcement.tags.some(t => t[0] === 't' && t[1] === 'fork'); // Get original repo reference diff --git a/src/routes/api/repos/[npub]/[repo]/readme/+server.ts b/src/routes/api/repos/[npub]/[repo]/readme/+server.ts index 4d5590c..0de31f5 100644 --- a/src/routes/api/repos/[npub]/[repo]/readme/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/readme/+server.ts @@ -11,6 +11,8 @@ import { handleApiError } from '$lib/utils/error-handler.js'; import { KIND } from '$lib/types/nostr.js'; import { join } from 'path'; import { existsSync } from 'fs'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT @@ -34,20 +36,15 @@ export const GET: RequestHandler = createRepoGetHandler( // If repo doesn't exist, try to fetch it on-demand if (!existsSync(repoPath)) { try { - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]); + // Fetch repository announcement (case-insensitive) with caching + // Fetch repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, context.repo); - if (events.length > 0) { + if (announcement) { // Try API-based fetching first (no cloning) const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); - const apiData = await tryApiFetch(events[0], context.npub, context.repo); + const apiData = await tryApiFetch(announcement, context.npub, context.repo); if (apiData && apiData.files) { // Try to find README in API files diff --git a/src/routes/api/repos/[npub]/[repo]/tree/+server.ts b/src/routes/api/repos/[npub]/[repo]/tree/+server.ts index 3b85f2d..00f6d4c 100644 --- a/src/routes/api/repos/[npub]/[repo]/tree/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/tree/+server.ts @@ -13,6 +13,8 @@ import { join } from 'path'; import { existsSync } from 'fs'; import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; import logger from '$lib/services/logger.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT @@ -25,20 +27,14 @@ export const GET: RequestHandler = createRepoGetHandler( // If repo doesn't exist, try to fetch it on-demand if (!existsSync(repoPath)) { try { - // Fetch repository announcement from Nostr - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]); + // Fetch repository announcement from Nostr (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, context.repo); - if (events.length > 0) { + if (announcement) { // Try API-based fetching first (no cloning) const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js'); - const apiData = await tryApiFetch(events[0], context.npub, context.repo); + const apiData = await tryApiFetch(announcement, context.npub, context.repo); if (apiData && apiData.files) { // Return API data directly without cloning diff --git a/src/routes/api/repos/[npub]/[repo]/validate/+server.ts b/src/routes/api/repos/[npub]/[repo]/validate/+server.ts index 1ec0248..a5a0c88 100644 --- a/src/routes/api/repos/[npub]/[repo]/validate/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/validate/+server.ts @@ -12,6 +12,8 @@ import { KIND } from '$lib/types/nostr.js'; import { requireNpubHex } from '$lib/utils/npub-utils.js'; import type { NostrEvent } from '$lib/types/nostr.js'; import logger from '$lib/services/logger.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; /** * GET - Validate repository announcement @@ -71,17 +73,12 @@ export const GET: RequestHandler = createRepoGetHandler( // Check announcement on relays try { - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [repoOwnerPubkey], - '#d': [repo], - limit: 1 - } - ]); + // Fetch repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, repo); - if (events.length > 0) { - relayAnnouncement = events[0]; + if (announcement) { + relayAnnouncement = announcement; onRelays = true; } } catch (err) { diff --git a/src/routes/api/repos/[npub]/[repo]/verify/+server.ts b/src/routes/api/repos/[npub]/[repo]/verify/+server.ts index c833b58..6238978 100644 --- a/src/routes/api/repos/[npub]/[repo]/verify/+server.ts +++ b/src/routes/api/repos/[npub]/[repo]/verify/+server.ts @@ -16,6 +16,8 @@ import { decodeNpubToHex } from '$lib/utils/npub-utils.js'; import { createRepoGetHandler } from '$lib/utils/api-handlers.js'; import type { RepoRequestContext } from '$lib/utils/api-context.js'; import { handleApiError } from '$lib/utils/error-handler.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT @@ -26,17 +28,11 @@ export const GET: RequestHandler = createRepoGetHandler( // Check if repository exists - verification doesn't require the repo to be cloned locally // We can verify ownership from Nostr events alone - // Fetch the repository announcement - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [context.repoOwnerPubkey], - '#d': [context.repo], - limit: 1 - } - ]); + // Fetch the repository announcement (case-insensitive) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache); + const announcement = findRepoAnnouncement(allEvents, context.repo); - if (events.length === 0) { + if (!announcement) { return json({ verified: false, error: 'Repository announcement not found', @@ -44,8 +40,6 @@ export const GET: RequestHandler = createRepoGetHandler( }); } - const announcement = events[0]; - // Extract clone URLs from announcement const cloneUrls: string[] = []; for (const tag of announcement.tags) { diff --git a/src/routes/api/repos/local/+server.ts b/src/routes/api/repos/local/+server.ts index 34c8f62..255e8ca 100644 --- a/src/routes/api/repos/local/+server.ts +++ b/src/routes/api/repos/local/+server.ts @@ -18,6 +18,8 @@ import { extractRequestContext } from '$lib/utils/api-context.js'; import logger from '$lib/services/logger.js'; import type { NostrEvent } from '$lib/types/nostr.js'; import type { RequestEvent } from '@sveltejs/kit'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS); const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); @@ -134,25 +136,16 @@ async function enrichLocalRepos( // Fetch announcements for each owner for (const [pubkey, repoNames] of ownerMap.entries()) { try { - const events = await nostrClient.fetchEvents([ - { - kinds: [KIND.REPO_ANNOUNCEMENT], - authors: [pubkey], - '#d': repoNames, - limit: repoNames.length - } - ]); + // Fetch all announcements by this author (case-insensitive matching) with caching + const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, pubkey, eventCache); - // Match announcements to repos + // Match announcements to repos (case-insensitive) for (const repo of repos) { try { const decoded = nip19.decode(repo.npub); if (decoded.type !== 'npub' || decoded.data !== pubkey) continue; - const announcement = events.find(e => { - const dTag = e.tags.find(t => t[0] === 'd')?.[1]; - return dTag === repo.repoName; - }); + const announcement = findRepoAnnouncement(allEvents, repo.repoName); if (announcement) { // Check if registered (has domain in clone URLs) diff --git a/src/routes/api/transfers/pending/+server.ts b/src/routes/api/transfers/pending/+server.ts index ad2e2f0..21aede7 100644 --- a/src/routes/api/transfers/pending/+server.ts +++ b/src/routes/api/transfers/pending/+server.ts @@ -11,6 +11,8 @@ import type { NostrEvent } from '$lib/types/nostr.js'; import { verifyEvent } from 'nostr-tools'; import { getUserRelays } from '$lib/services/nostr/user-relays.js'; import logger from '$lib/services/logger.js'; +import { eventCache } from '$lib/services/nostr/event-cache.js'; +import { findRepoAnnouncement } from '$lib/utils/nostr-utils.js'; export const GET: RequestHandler = async ({ request }) => { const userPubkeyHex = request.headers.get('X-User-Pubkey'); @@ -87,15 +89,46 @@ export const GET: RequestHandler = async ({ request }) => { // Check if transfer is already completed by checking for a newer repo announcement from the new owner // This is a simple check - if there's a newer announcement from the new owner for this repo, transfer is complete - const newerAnnouncements = await searchClient.fetchEvents([ + // Fetch announcements (case-insensitive) with caching + // Note: We use 'since' parameter, so we can't use the standard cache helper + const allNewerAnnouncements = await searchClient.fetchEvents([ { kinds: [KIND.REPO_ANNOUNCEMENT], authors: [userPubkeyHex], - '#d': [repoName], since: event.created_at, - limit: 1 + limit: 100 // Fetch more to allow case-insensitive filtering } ]); + + // Cache the results (without 'since' filter for better cache hit rate) + const cacheFilters = [ + { + kinds: [KIND.REPO_ANNOUNCEMENT], + authors: [userPubkeyHex], + limit: 100 + } + ]; + const cached = eventCache.get(cacheFilters); + if (cached) { + // Merge with cached events + const eventMap = new Map(); + cached.forEach(e => eventMap.set(e.id, e)); + allNewerAnnouncements.forEach(e => { + const existing = eventMap.get(e.id); + if (!existing || e.created_at > existing.created_at) { + eventMap.set(e.id, e); + } + }); + eventCache.set(cacheFilters, Array.from(eventMap.values())); + } else if (allNewerAnnouncements.length > 0) { + eventCache.set(cacheFilters, allNewerAnnouncements); + } + + // Filter case-insensitively to find the matching repo + const newerAnnouncements = allNewerAnnouncements.filter(announcement => { + const dTag = announcement.tags.find((t: string[]) => t[0] === 'd')?.[1]; + return dTag && dTag.toLowerCase() === repoName.toLowerCase(); + }); // If there's a newer announcement from the new owner, transfer is complete if (newerAnnouncements.length > 0) { diff --git a/src/routes/repos/+page.svelte b/src/routes/repos/+page.svelte index 92d4b29..fdbb217 100644 --- a/src/routes/repos/+page.svelte +++ b/src/routes/repos/+page.svelte @@ -754,25 +754,21 @@ {/if}
- {#if repoImage} - Repository - {/if}
-

{getRepoName(repo)}

+
+ {#if repoImage} + Repository + {/if} +

{getRepoName(repo)}

+
{#if getRepoDescription(repo)}

{getRepoDescription(repo)}

{/if}
- - View & Edit → + + View
-
- Clone URLs: - {#each getCloneUrls(repo) as url} - {url} - {/each} -
Created: {new Date(repo.created_at * 1000).toLocaleDateString()} {#if getForkCount(repo) > 0} @@ -813,11 +809,13 @@ {/if}
- {#if repoImage} - Repository - {/if}
-

{repo ? getRepoName(repo) : item.repoName}

+
+ {#if repoImage} + Repository + {/if} +

{repo ? getRepoName(repo) : item.repoName}

+
{#if repo && getRepoDescription(repo)}

{getRepoDescription(repo)}

{:else} @@ -825,37 +823,21 @@ {/if}
- - View & Edit → + + View - {#if userPubkey} - {#if canDelete} - - {:else if hasUnlimitedAccess($userStore.userLevel)} - - {/if} + {#if userPubkey && canDelete} + {/if}
- {#if repo} -
- Clone URLs: - {#each getCloneUrls(repo) as url} - {url} - {/each} -
- {/if}
Last modified: {new Date(item.lastModified).toLocaleDateString()} {#if repo} @@ -1011,4 +993,202 @@ color: var(--text-secondary, #666); font-size: 0.9rem; } + + .repos-list { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(280px, 1fr)); + gap: 1rem; + margin-top: 1rem; + } + + .repo-card { + background: var(--card-bg, #ffffff); + border: 1px solid var(--border-color, #e0e0e0); + border-radius: 0.5rem; + overflow: hidden; + transition: all 0.2s ease; + display: flex; + flex-direction: column; + } + + .repo-card:hover { + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); + transform: translateY(-2px); + } + + .repo-card-banner { + width: 100%; + height: 120px; + overflow: hidden; + background: var(--bg-secondary, #f5f5f5); + } + + .repo-card-banner img { + width: 100%; + height: 100%; + object-fit: cover; + } + + .repo-card-content { + padding: 1rem; + flex: 1; + display: flex; + flex-direction: column; + } + + .repo-header { + display: flex; + align-items: flex-start; + gap: 0.75rem; + margin-bottom: 0.75rem; + } + + .repo-title-row { + display: flex; + align-items: center; + gap: 0.5rem; + } + + .repo-avatar { + width: 24px; + height: 24px; + border-radius: 50%; + object-fit: cover; + flex-shrink: 0; + border: 1px solid var(--border-color, #e0e0e0); + } + + .repo-header-text { + flex: 1; + min-width: 0; + } + + .repo-header-text h3 { + margin: 0 0 0.25rem 0; + font-size: 1rem; + font-weight: 600; + color: var(--text-primary, #1a1a1a); + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + } + + .repo-header-text .description { + margin: 0; + font-size: 0.875rem; + color: var(--text-secondary, #666); + overflow: hidden; + text-overflow: ellipsis; + display: -webkit-box; + -webkit-line-clamp: 2; + line-clamp: 2; + -webkit-box-orient: vertical; + line-height: 1.4; + } + + .view-button { + display: flex; + align-items: center; + justify-content: center; + width: 32px; + height: 32px; + padding: 0; + background: var(--accent, #007bff); + color: var(--accent-text, #ffffff); + text-decoration: none; + border-radius: 0.25rem; + transition: all 0.2s ease; + flex-shrink: 0; + } + + .view-button:hover { + background: var(--accent-hover, #0056b3); + transform: translateX(2px); + } + + .view-button img { + width: 18px; + height: 18px; + filter: brightness(0) invert(1); + } + + .repo-actions { + display: flex; + gap: 0.5rem; + align-items: center; + } + + .delete-button { + padding: 0.375rem 0.75rem; + background: var(--error, #dc3545); + color: var(--error-text, #ffffff); + border: none; + border-radius: 0.25rem; + font-size: 0.875rem; + font-weight: 500; + cursor: pointer; + transition: all 0.2s ease; + } + + .delete-button:hover:not(:disabled) { + background: var(--error-hover, #c82333); + } + + .delete-button:disabled { + opacity: 0.6; + cursor: not-allowed; + } + + .repo-meta { + display: flex; + flex-wrap: wrap; + gap: 0.75rem; + margin-top: auto; + padding-top: 0.75rem; + border-top: 1px solid var(--border-color, #e0e0e0); + font-size: 0.75rem; + color: var(--text-secondary, #666); + } + + .fork-count { + color: var(--text-secondary, #666); + } + + .repo-section { + margin: 2rem 0; + } + + .section-header { + display: flex; + align-items: center; + gap: 1rem; + margin-bottom: 1rem; + } + + .section-header h3 { + margin: 0; + font-size: 1.25rem; + color: var(--text-primary, #1a1a1a); + } + + .section-badge { + padding: 0.25rem 0.75rem; + background: var(--bg-secondary, #f5f5f5); + border: 1px solid var(--border-color, #e0e0e0); + border-radius: 1rem; + font-size: 0.875rem; + color: var(--text-secondary, #666); + } + + .section-description { + font-size: 0.875rem; + color: var(--text-secondary, #666); + margin-left: auto; + } + + @media (max-width: 768px) { + .repos-list { + grid-template-columns: 1fr; + } + } diff --git a/src/routes/repos/[npub]/[repo]/+page.svelte b/src/routes/repos/[npub]/[repo]/+page.svelte index f48f918..ebe854d 100644 --- a/src/routes/repos/[npub]/[repo]/+page.svelte +++ b/src/routes/repos/[npub]/[repo]/+page.svelte @@ -64,7 +64,7 @@ let userPubkey = $state(null); let userPubkeyHex = $state(null); let showCommitDialog = $state(false); - let activeTab = $state<'files' | 'history' | 'tags' | 'issues' | 'prs' | 'docs' | 'discussions'>('discussions'); + let activeTab = $state<'files' | 'history' | 'tags' | 'issues' | 'prs' | 'docs' | 'discussions' | 'patches'>('files'); let showRepoMenu = $state(false); // Tabs will be defined as derived after issues and prs are declared @@ -309,13 +309,15 @@ let selectedPR = $state(null); // Tabs menu - defined after issues and prs + // Order: Files, Issues, PRs, Patches, Discussion, History, Tags, Docs const tabs = $derived([ - { id: 'discussions', label: 'Discussions', icon: '/icons/message-circle.svg' }, { id: 'files', label: 'Files', icon: '/icons/file-text.svg' }, - { id: 'history', label: 'History', icon: '/icons/git-commit.svg' }, - { id: 'tags', label: 'Tags', icon: '/icons/tag.svg' }, { id: 'issues', label: 'Issues', icon: '/icons/alert-circle.svg', count: issues.length }, { id: 'prs', label: 'Pull Requests', icon: '/icons/git-pull-request.svg', count: prs.length }, + { id: 'patches', label: 'Patches', icon: '/icons/clipboard-list.svg' }, + { id: 'discussions', label: 'Discussions', icon: '/icons/message-circle.svg' }, + { id: 'history', label: 'History', icon: '/icons/git-commit.svg' }, + { id: 'tags', label: 'Tags', icon: '/icons/tag.svg' }, { id: 'docs', label: 'Docs', icon: '/icons/book.svg' } ]); @@ -721,27 +723,41 @@ // Render markdown if needed if (readmeIsMarkdown && readmeContent) { - const MarkdownIt = (await import('markdown-it')).default; - const hljsModule = await import('highlight.js'); - const hljs = hljsModule.default || hljsModule; - - const md = new MarkdownIt({ - highlight: function (str: string, lang: string): string { - if (lang && hljs.getLanguage(lang)) { - try { - return '
' +
-                           hljs.highlight(str, { language: lang }).value +
-                           '
'; - } catch (err) { - // Fallback to escaped HTML if highlighting fails - // This is expected for unsupported languages + try { + const MarkdownIt = (await import('markdown-it')).default; + const hljsModule = await import('highlight.js'); + const hljs = hljsModule.default || hljsModule; + + const md = new MarkdownIt({ + html: true, // Enable HTML tags in source + linkify: true, // Autoconvert URL-like text to links + typographer: true, // Enable some language-neutral replacement + quotes beautification + breaks: true, // Convert '\n' in paragraphs into
+ highlight: function (str: string, lang: string): string { + if (lang && hljs.getLanguage(lang)) { + try { + return '
' +
+                             hljs.highlight(str, { language: lang }).value +
+                             '
'; + } catch (err) { + // Fallback to escaped HTML if highlighting fails + // This is expected for unsupported languages + } } + return '
' + md.utils.escapeHtml(str) + '
'; } - return '
' + md.utils.escapeHtml(str) + '
'; - } - }); - - readmeHtml = md.render(readmeContent); + }); + + readmeHtml = md.render(readmeContent); + console.log('[README] Markdown rendered successfully, HTML length:', readmeHtml.length); + } catch (err) { + console.error('[README] Error rendering markdown:', err); + // Fallback: show as plain text if rendering fails + readmeHtml = ''; + } + } else { + // Clear HTML if not markdown + readmeHtml = ''; } } } @@ -3542,7 +3558,20 @@ $effect(() => { if (activeTab !== lastTab) { lastTab = activeTab; - if (activeTab === 'history') { + if (activeTab === 'files') { + // Files tab - ensure files are loaded and README is shown if available + if (files.length === 0 || currentPath !== '') { + loadFiles(''); + } else if (files.length > 0 && !currentFile) { + // Files already loaded, ensure README is shown + const readmeFile = findReadmeFile(files); + if (readmeFile) { + setTimeout(() => { + loadFile(readmeFile.path); + }, 100); + } + } + } else if (activeTab === 'history') { loadCommitHistory(); } else if (activeTab === 'tags') { loadTags(); @@ -3554,6 +3583,8 @@ loadDocumentation(); } else if (activeTab === 'discussions') { loadDiscussions(); + } else if (activeTab === 'patches') { + // Patches tab - patches are loaded on demand when creating/viewing } } }); @@ -4057,7 +4088,7 @@
{#if loadingReadme}
Loading README...
- {:else if readmeIsMarkdown && readmeHtml} + {:else if readmeIsMarkdown && readmeHtml && readmeHtml.trim()}
{@html readmeHtml}
diff --git a/src/routes/repos/[npub]/[repo]/+page.ts b/src/routes/repos/[npub]/[repo]/+page.ts index 0f68c6e..6696cc4 100644 --- a/src/routes/repos/[npub]/[repo]/+page.ts +++ b/src/routes/repos/[npub]/[repo]/+page.ts @@ -7,6 +7,7 @@ import { NostrClient } from '$lib/services/nostr/nostr-client.js'; import { MaintainerService } from '$lib/services/nostr/maintainer-service.js'; import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js'; import { KIND } from '$lib/types/nostr.js'; +import type { NostrEvent } from '$lib/types/nostr.js'; import { nip19 } from 'nostr-tools'; import { extractRequestContext } from '$lib/utils/api-context.js'; @@ -33,7 +34,7 @@ export const load: PageLoad = async ({ params, url, parent }) => { const repoOwnerPubkey = decoded.data as string; // Check if announcement was passed from search results via sessionStorage - let announcement: any = null; + let announcement: NostrEvent | null = null; if (typeof window !== 'undefined') { const repoKey = `${npub}/${repo}`; const storedAnnouncement = sessionStorage.getItem(`repo_announcement_${repoKey}`); @@ -81,7 +82,7 @@ export const load: PageLoad = async ({ params, url, parent }) => { // The page load function runs server-side but doesn't have access to client auth headers // So we'll mark it as private and let the frontend handle access denial const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); - const isPrivate = announcement.tags.some(t => + const isPrivate = announcement.tags.some((t: string[]) => (t[0] === 'private' && t[1] === 'true') || (t[0] === 't' && t[1] === 'private') ); @@ -90,34 +91,34 @@ export const load: PageLoad = async ({ params, url, parent }) => { // The frontend will need to check access via API and show appropriate error // We still expose basic metadata (name) but the API will enforce access - const name = announcement.tags.find(t => t[0] === 'name')?.[1] || repo; - const description = announcement.tags.find(t => t[0] === 'description')?.[1] || ''; - const image = announcement.tags.find(t => t[0] === 'image')?.[1]; - const banner = announcement.tags.find(t => t[0] === 'banner')?.[1]; + const name = announcement.tags.find((t: string[]) => t[0] === 'name')?.[1] || repo; + const description = announcement.tags.find((t: string[]) => t[0] === 'description')?.[1] || ''; + const image = announcement.tags.find((t: string[]) => t[0] === 'image')?.[1]; + const banner = announcement.tags.find((t: string[]) => t[0] === 'banner')?.[1]; // Debug: log image and banner tags if found if (image) console.log('[Page Load] Found image tag:', image); if (banner) console.log('[Page Load] Found banner tag:', banner); if (!image && !banner) { console.log('[Page Load] No image or banner tags found. Available tags:', - announcement.tags.filter(t => t[0] === 'image' || t[0] === 'banner').map(t => t[0])); + announcement.tags.filter((t: string[]) => t[0] === 'image' || t[0] === 'banner').map((t: string[]) => t[0])); } const cloneUrls = announcement.tags - .filter(t => t[0] === 'clone') - .flatMap(t => t.slice(1)) - .filter(url => url && typeof url === 'string') as string[]; + .filter((t: string[]) => t[0] === 'clone') + .flatMap((t: string[]) => t.slice(1)) + .filter((url: string) => url && typeof url === 'string') as string[]; const maintainers = announcement.tags - .filter(t => t[0] === 'maintainers') - .flatMap(t => t.slice(1)) - .filter(m => m && typeof m === 'string') as string[]; + .filter((t: string[]) => t[0] === 'maintainers') + .flatMap((t: string[]) => t.slice(1)) + .filter((m: string) => m && typeof m === 'string') as string[]; // Owner is the author of the announcement event const ownerPubkey = announcement.pubkey; - const language = announcement.tags.find(t => t[0] === 'language')?.[1]; + const language = announcement.tags.find((t: string[]) => t[0] === 'language')?.[1]; const topics = announcement.tags - .filter(t => t[0] === 't' && t[1] !== 'private') - .map(t => t[1]) - .filter(t => t && typeof t === 'string') as string[]; - const website = announcement.tags.find(t => t[0] === 'website')?.[1]; + .filter((t: string[]) => t[0] === 't' && t[1] !== 'private') + .map((t: string[]) => t[1]) + .filter((t: string) => t && typeof t === 'string') as string[]; + const website = announcement.tags.find((t: string[]) => t[0] === 'website')?.[1]; // Get git domain for constructing URLs const layoutData = await parent();