diff --git a/.dockerignore b/.dockerignore
index 4eb0c30..e8332b1 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -19,3 +19,5 @@ build
repos
*.md
!README.md
+!docs/
+!docs/**/*.md
diff --git a/Dockerfile b/Dockerfile
index fd24900..6059d89 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -45,6 +45,8 @@ RUN npm ci --only=production
# Copy built application from builder
COPY --from=builder /app/build ./build
COPY --from=builder /app/package.json ./
+# Copy docs directory for documentation pages
+COPY --from=builder /app/docs ./docs
# Create directory for git repositories
RUN mkdir -p /repos && chmod 755 /repos
@@ -56,7 +58,8 @@ RUN mkdir -p /app/logs && chmod 755 /app/logs
# Using a dedicated user (not generic 'nodejs') is better security practice
RUN addgroup -g 1001 -S gitrepublic && \
adduser -S gitrepublic -u 1001 -G gitrepublic && \
- chown -R gitrepublic:gitrepublic /app /repos /app/logs
+ chown -R gitrepublic:gitrepublic /app /repos /app/logs && \
+ chown -R gitrepublic:gitrepublic /app/docs
# Switch to non-root user
USER gitrepublic
diff --git a/nostr/commit-signatures.jsonl b/nostr/commit-signatures.jsonl
index 1142601..bcdba17 100644
--- a/nostr/commit-signatures.jsonl
+++ b/nostr/commit-signatures.jsonl
@@ -44,3 +44,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771682804,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","repo page refactor"]],"content":"Signed commit: repo page refactor","id":"9ad7610ff7aa61d62d3772d6ae7c0589cda8ff95cd7a60b81c84ba879e0f9d8a","sig":"8918f36d426d352a6787543daaa044cf51855632e2257f29cc18bb87db31d61c877b525113e21045d3bc135376e1c0574454e28bd409d3135bcb80079bc11947"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771688902,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor"]],"content":"Signed commit: refactor","id":"62b813f817173c9e35eb05088240f7ec50ecab697c8c6d4a5c19d47664ef3837","sig":"ca9c70fc7bf8b1bb1726461bb843127d1bddc4de96652cfc7497698a3f5c4dc4a8c3f5a7a240710db77afabeee2a3b7d594f75f42a0a8b28aeeef50f66b506c9"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771690183,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","get rid of tabs on repo page"]],"content":"Signed commit: get rid of tabs on repo page","id":"d34fb23385a23f479c683e76f5676356a11d63bcd0ecf71d25f1b85dbb0cfe57","sig":"1f6454f9961b9245d1e32f4a903ee9636201670491145d0185e95e7b7d33bf1027ac5b8e370070640e103740ab19e9915baa7755c6008fd32fe41e9cb86d33b8"}
+{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771691277,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix docs"]],"content":"Signed commit: fix docs","id":"4671648712f19537cbf0fd00cf19e254eae4a1ac9c1274ea396e62dac193b88c","sig":"49a3e89e312ec4caebfeacdaade3e4cc6d027ab9c50d8e6aa1998f120a81d8d51235ae397df6e42b9efca4147497b8881731dda6d58fee7d28d2ac07cec295ec"}
diff --git a/src/lib/services/nostr/relay-write-proof.ts b/src/lib/services/nostr/relay-write-proof.ts
index b9f3a63..c20f87e 100644
--- a/src/lib/services/nostr/relay-write-proof.ts
+++ b/src/lib/services/nostr/relay-write-proof.ts
@@ -168,7 +168,7 @@ export async function verifyRelayWriteProofFromAuth(
authHeader: string | null,
userPubkey: string,
relays: string[] = DEFAULT_NOSTR_RELAYS
-): Promise<{ valid: boolean; error?: string; relay?: string }> {
+): Promise<{ valid: boolean; error?: string; relay?: string; relayDown?: boolean }> {
if (!authHeader || !authHeader.startsWith('Nostr ')) {
return {
valid: false,
diff --git a/src/routes/api/repos/[npub]/[repo]/clone/+server.ts b/src/routes/api/repos/[npub]/[repo]/clone/+server.ts
index f9df866..f417648 100644
--- a/src/routes/api/repos/[npub]/[repo]/clone/+server.ts
+++ b/src/routes/api/repos/[npub]/[repo]/clone/+server.ts
@@ -13,10 +13,13 @@ import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND } from '$lib/types/nostr.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
-import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js';
+import { getCachedUserLevel, cacheUserLevel } from '$lib/services/security/user-level-cache.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import logger from '$lib/services/logger.js';
import { handleApiError, handleValidationError } from '$lib/utils/error-handler.js';
+import { verifyRelayWriteProofFromAuth, verifyRelayWriteProof } from '$lib/services/nostr/relay-write-proof.js';
+import { verifyEvent } from 'nostr-tools';
+import type { NostrEvent } from '$lib/types/nostr.js';
const repoRoot = process.env.GIT_REPO_ROOT || '/repos';
const repoManager = new RepoManager(repoRoot);
@@ -37,10 +40,109 @@ export const POST: RequestHandler = async (event) => {
throw error(401, 'Authentication required. Please log in to clone repositories.');
}
- // Check if user has unlimited access
- const userLevel = getCachedUserLevel(userPubkeyHex);
- if (!hasUnlimitedAccess(userLevel?.level)) {
- throw error(403, 'Only users with unlimited access can clone repositories to the server.');
+ // Check if user has unlimited access (check cache first)
+ let userLevel = getCachedUserLevel(userPubkeyHex);
+
+ logger.debug({
+ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...',
+ cachedLevel: userLevel?.level || 'none',
+ hasUnlimitedAccess: userLevel ? hasUnlimitedAccess(userLevel.level) : false
+ }, 'Checking user access level for clone operation');
+
+ // If cache is empty, try to verify from proof event in body, NIP-98 auth header, or return helpful error
+ if (!userLevel || !hasUnlimitedAccess(userLevel.level)) {
+ let verification: { valid: boolean; error?: string; relay?: string; relayDown?: boolean } | null = null;
+
+ // Try to get proof event from request body first (if content-type is JSON)
+ const contentType = event.request.headers.get('content-type') || '';
+ if (contentType.includes('application/json')) {
+ try {
+ // Clone the request to read body without consuming it (if possible)
+ // Note: Request body can only be read once, so we need to be careful
+ const bodyText = await event.request.text().catch(() => '');
+ if (bodyText) {
+ try {
+ const body = JSON.parse(bodyText);
+ if (body.proofEvent && typeof body.proofEvent === 'object') {
+ const proofEvent = body.proofEvent as NostrEvent;
+
+ // Validate proof event signature and pubkey
+ if (verifyEvent(proofEvent) && proofEvent.pubkey === userPubkeyHex) {
+ logger.debug({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'Cache empty or expired, attempting to verify from proof event in request body');
+ verification = await verifyRelayWriteProof(proofEvent, userPubkeyHex, DEFAULT_NOSTR_RELAYS);
+ } else {
+ logger.warn({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'Invalid proof event in request body');
+ }
+ }
+ } catch (parseErr) {
+ // Not valid JSON or missing proofEvent - continue to check auth header
+ logger.debug({ error: parseErr }, 'Request body is not valid JSON or missing proofEvent');
+ }
+ }
+ } catch (err) {
+ // Body reading failed - continue to check auth header
+ logger.debug({ error: err }, 'Failed to read request body, checking auth header');
+ }
+ }
+
+ // If no proof event in body, try NIP-98 auth header
+ if (!verification) {
+ const authHeader = event.request.headers.get('authorization') || event.request.headers.get('Authorization');
+
+ if (authHeader) {
+ logger.debug({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'Cache empty or expired, attempting to verify from NIP-98 auth header');
+ verification = await verifyRelayWriteProofFromAuth(authHeader, userPubkeyHex, DEFAULT_NOSTR_RELAYS);
+ }
+ }
+
+ // Process verification result
+ if (verification) {
+ try {
+ if (verification.valid) {
+ // User has write access - cache unlimited level
+ cacheUserLevel(userPubkeyHex, 'unlimited');
+ logger.info({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'Verified unlimited access from proof event');
+ userLevel = getCachedUserLevel(userPubkeyHex); // Get the cached value
+ } else {
+ // Check if relays are down
+ if (verification.relayDown) {
+ // Relays are down - check cache again (might have been cached from previous request)
+ userLevel = getCachedUserLevel(userPubkeyHex);
+ if (!userLevel || !hasUnlimitedAccess(userLevel.level)) {
+ logger.warn({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...', error: verification.error }, 'Relays down and no cached unlimited access');
+ throw error(503, 'Relays are temporarily unavailable and no cached access level found. Please verify your access level first by visiting your profile page.');
+ }
+ } else {
+ // Verification failed - user doesn't have write access
+ logger.warn({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...', error: verification.error }, 'User does not have unlimited access');
+ throw error(403, `Only users with unlimited access can clone repositories to the server. ${verification.error || 'Please verify you can write to at least one default Nostr relay.'}`);
+ }
+ }
+ } catch (err) {
+ // If it's already an error response, re-throw it
+ if (err && typeof err === 'object' && 'status' in err) {
+ throw err;
+ }
+ logger.error({ error: err, userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'Error verifying user level');
+ // Fall through to check cache one more time
+ userLevel = getCachedUserLevel(userPubkeyHex);
+ }
+ } else {
+ // No proof event or auth header - check if we have any cached level
+ if (!userLevel) {
+ logger.warn({ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...' }, 'No cached user level and no proof event or NIP-98 auth header');
+ throw error(403, 'Only users with unlimited access can clone repositories to the server. Please verify your access level first by visiting your profile page or ensuring you can write to at least one default Nostr relay.');
+ }
+ }
+ }
+
+ // Final check - user must have unlimited access
+ if (!userLevel || !hasUnlimitedAccess(userLevel.level)) {
+ logger.warn({
+ userPubkeyHex: userPubkeyHex.slice(0, 16) + '...',
+ cachedLevel: userLevel?.level || 'none'
+ }, 'User does not have unlimited access');
+ throw error(403, 'Only users with unlimited access can clone repositories to the server. Please verify you can write to at least one default Nostr relay.');
}
try {
@@ -57,19 +159,68 @@ export const POST: RequestHandler = async (event) => {
});
}
- // Fetch repository announcement
- const events = await nostrClient.fetchEvents([
- {
- kinds: [KIND.REPO_ANNOUNCEMENT],
- authors: [repoOwnerPubkey],
- '#d': [repo],
- limit: 1
- }
- ]);
+ // Fetch repository announcement (case-insensitive)
+ // Note: Nostr d-tag filters are case-sensitive, so we fetch all announcements by the author
+ // and filter case-insensitively in JavaScript
+ logger.debug({ npub, repo, repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...' }, 'Fetching repository announcement from Nostr (case-insensitive)');
+
+ let authorAnnouncements: NostrEvent[];
+ try {
+ authorAnnouncements = await nostrClient.fetchEvents([
+ {
+ kinds: [KIND.REPO_ANNOUNCEMENT],
+ authors: [repoOwnerPubkey],
+ limit: 100 // Fetch more to ensure we find the repo even if author has many repos
+ }
+ ]);
+
+ logger.debug({
+ npub,
+ repo,
+ authorAnnouncementCount: authorAnnouncements.length,
+ eventIds: authorAnnouncements.map(e => e.id)
+ }, 'Fetched repository announcements by author');
+ } catch (err) {
+ logger.error({
+ error: err,
+ npub,
+ repo,
+ repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...'
+ }, 'Error fetching repository announcement from Nostr');
+ throw handleApiError(
+ err instanceof Error ? err : new Error(String(err)),
+ { operation: 'cloneRepo', npub, repo },
+ 'Failed to fetch repository announcement from Nostr relays. Please check that the repository exists and the relays are accessible.'
+ );
+ }
+
+ // Filter case-insensitively to find the matching repo
+ const repoLower = repo.toLowerCase();
+ const events = authorAnnouncements.filter(event => {
+ const dTag = event.tags.find(t => t[0] === 'd')?.[1];
+ return dTag && dTag.toLowerCase() === repoLower;
+ });
if (events.length === 0) {
+ const dTags = authorAnnouncements
+ .map(e => e.tags.find(t => t[0] === 'd')?.[1])
+ .filter(Boolean);
+
+ logger.warn({
+ npub,
+ repo,
+ repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...',
+ authorAnnouncementCount: authorAnnouncements.length,
+ authorRepos: dTags,
+ searchedRepo: repo
+ }, 'Repository announcement not found in Nostr (case-insensitive search)');
+
+ const errorMessage = authorAnnouncements.length > 0
+ ? `Repository announcement not found in Nostr for ${npub}/${repo}. Found ${authorAnnouncements.length} other repository announcement(s) by this author. Please verify the repository name is correct.`
+ : `Repository announcement not found in Nostr for ${npub}/${repo}. Please verify that the repository exists and has been announced on Nostr relays.`;
+
throw handleValidationError(
- 'Repository announcement not found in Nostr',
+ errorMessage,
{ operation: 'cloneRepo', npub, repo }
);
}
diff --git a/src/routes/api/search/+server.ts b/src/routes/api/search/+server.ts
index 5245111..af7c2f6 100644
--- a/src/routes/api/search/+server.ts
+++ b/src/routes/api/search/+server.ts
@@ -72,7 +72,9 @@ export const GET: RequestHandler = async (event) => {
description: string;
owner: string;
npub: string;
+ repoId?: string; // The actual repo ID (d-tag) from the announcement
maintainers?: Array<{ pubkey: string; isOwner: boolean }>;
+ announcement?: any; // Full announcement event
}>;
} = {
repos: []
@@ -559,7 +561,9 @@ export const GET: RequestHandler = async (event) => {
description: description || '',
owner: event.pubkey,
npub,
- maintainers: allMaintainers
+ repoId, // Include the actual repo ID (d-tag) for proper matching
+ maintainers: allMaintainers,
+ announcement: event // Include the full announcement event
});
} catch {
// Skip if npub encoding fails
diff --git a/src/routes/docs/+page.server.ts b/src/routes/docs/+page.server.ts
index be8fc2e..c5672e7 100644
--- a/src/routes/docs/+page.server.ts
+++ b/src/routes/docs/+page.server.ts
@@ -10,41 +10,52 @@ import type { PageServerLoad } from './$types';
import logger from '$lib/services/logger.js';
export const load: PageServerLoad = async () => {
- try {
- let filePath: string = '';
- let lastError: Error | null = null;
+ const attemptedPaths: string[] = [];
+ let lastError: Error | null = null;
- // Try method 1: Use process.cwd() (works in most cases)
+ // List of paths to try
+ const pathsToTry = [
+ // Method 1: process.cwd() (works in most cases)
+ () => join(process.cwd(), 'docs', 'tutorial.md'),
+ // Method 2: process.cwd() from build directory
+ () => join(process.cwd(), '..', 'docs', 'tutorial.md'),
+ // Method 3: import.meta.url - go up from route file to project root
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', 'docs', 'tutorial.md');
+ },
+ // Method 4: import.meta.url - alternative path calculation
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', '..', 'docs', 'tutorial.md');
+ },
+ // Method 5: Check if running from build directory
+ () => join(process.cwd(), 'build', 'docs', 'tutorial.md'),
+ ];
+
+ for (const getPath of pathsToTry) {
try {
- filePath = join(process.cwd(), 'docs', 'tutorial.md');
+ const filePath = getPath();
+ attemptedPaths.push(filePath);
+
if (existsSync(filePath)) {
+ logger.info({ filePath }, 'Found documentation file');
const content = await readFile(filePath, 'utf-8');
return { content };
}
- throw new Error(`File not found at ${filePath}`);
} catch (err) {
lastError = err instanceof Error ? err : new Error(String(err));
-
- // Try method 2: Use import.meta.url to find project root
- try {
- // Get the directory of this file, then go up to project root
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = join(__filename, '..', '..', '..', '..');
- filePath = join(__dirname, 'docs', 'tutorial.md');
- if (existsSync(filePath)) {
- const content = await readFile(filePath, 'utf-8');
- return { content };
- }
- throw new Error(`File not found at ${filePath}`);
- } catch (err2) {
- lastError = err2 instanceof Error ? err2 : new Error(String(err2));
- const attemptedPath = filePath || 'unknown';
- logger.error({ error: lastError, attemptedPaths: [attemptedPath] }, 'Error loading documentation');
- return { content: null, error: 'Failed to load documentation' };
- }
+ // Continue to next path
}
- } catch (error) {
- logger.error({ error }, 'Error loading documentation');
- return { content: null, error: 'Failed to load documentation' };
}
+
+ // All paths failed
+ logger.error({
+ error: lastError,
+ attemptedPaths,
+ cwd: process.cwd(),
+ importMetaUrl: import.meta.url
+ }, 'Error loading documentation - all paths failed');
+
+ return { content: null, error: 'Failed to load documentation' };
};
diff --git a/src/routes/docs/nip-a3/+page.server.ts b/src/routes/docs/nip-a3/+page.server.ts
index 970bdbe..b07d423 100644
--- a/src/routes/docs/nip-a3/+page.server.ts
+++ b/src/routes/docs/nip-a3/+page.server.ts
@@ -10,41 +10,45 @@ import type { PageServerLoad } from './$types';
import logger from '$lib/services/logger.js';
export const load: PageServerLoad = async () => {
- try {
- let filePath: string = '';
- let lastError: Error | null = null;
+ const attemptedPaths: string[] = [];
+ let lastError: Error | null = null;
- // Try method 1: Use process.cwd() (works in most cases)
+ // List of paths to try
+ const pathsToTry = [
+ () => join(process.cwd(), 'docs', 'NIP-A3.md'),
+ () => join(process.cwd(), '..', 'docs', 'NIP-A3.md'),
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', 'docs', 'NIP-A3.md');
+ },
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', '..', 'docs', 'NIP-A3.md');
+ },
+ () => join(process.cwd(), 'build', 'docs', 'NIP-A3.md'),
+ ];
+
+ for (const getPath of pathsToTry) {
try {
- filePath = join(process.cwd(), 'docs', 'NIP-A3.md');
+ const filePath = getPath();
+ attemptedPaths.push(filePath);
+
if (existsSync(filePath)) {
+ logger.info({ filePath }, 'Found NIP-A3 documentation file');
const content = await readFile(filePath, 'utf-8');
return { content };
}
- throw new Error(`File not found at ${filePath}`);
} catch (err) {
lastError = err instanceof Error ? err : new Error(String(err));
-
- // Try method 2: Use import.meta.url to find project root
- try {
- // Get the directory of this file, then go up to project root
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = join(__filename, '..', '..', '..', '..');
- filePath = join(__dirname, 'docs', 'NIP-A3.md');
- if (existsSync(filePath)) {
- const content = await readFile(filePath, 'utf-8');
- return { content };
- }
- throw new Error(`File not found at ${filePath}`);
- } catch (err2) {
- lastError = err2 instanceof Error ? err2 : new Error(String(err2));
- const attemptedPath = filePath || 'unknown';
- logger.error({ error: lastError, attemptedPaths: [attemptedPath] }, 'Error loading NIP-A3 documentation');
- return { content: null, error: 'Failed to load NIP-A3 documentation' };
- }
}
- } catch (error) {
- logger.error({ error }, 'Error loading NIP-A3 documentation');
- return { content: null, error: 'Failed to load NIP-A3 documentation' };
}
+
+ logger.error({
+ error: lastError,
+ attemptedPaths,
+ cwd: process.cwd(),
+ importMetaUrl: import.meta.url
+ }, 'Error loading NIP-A3 documentation - all paths failed');
+
+ return { content: null, error: 'Failed to load NIP-A3 documentation' };
};
diff --git a/src/routes/docs/nip34/+page.server.ts b/src/routes/docs/nip34/+page.server.ts
index 9ab0167..264d6e1 100644
--- a/src/routes/docs/nip34/+page.server.ts
+++ b/src/routes/docs/nip34/+page.server.ts
@@ -10,41 +10,45 @@ import type { PageServerLoad } from './$types';
import logger from '$lib/services/logger.js';
export const load: PageServerLoad = async () => {
- try {
- let filePath: string = '';
- let lastError: Error | null = null;
+ const attemptedPaths: string[] = [];
+ let lastError: Error | null = null;
- // Try method 1: Use process.cwd() (works in most cases)
+ // List of paths to try
+ const pathsToTry = [
+ () => join(process.cwd(), 'docs', '34.md'),
+ () => join(process.cwd(), '..', 'docs', '34.md'),
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', 'docs', '34.md');
+ },
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', '..', 'docs', '34.md');
+ },
+ () => join(process.cwd(), 'build', 'docs', '34.md'),
+ ];
+
+ for (const getPath of pathsToTry) {
try {
- filePath = join(process.cwd(), 'docs', '34.md');
+ const filePath = getPath();
+ attemptedPaths.push(filePath);
+
if (existsSync(filePath)) {
+ logger.info({ filePath }, 'Found NIP-34 documentation file');
const content = await readFile(filePath, 'utf-8');
return { content };
}
- throw new Error(`File not found at ${filePath}`);
} catch (err) {
lastError = err instanceof Error ? err : new Error(String(err));
-
- // Try method 2: Use import.meta.url to find project root
- try {
- // Get the directory of this file, then go up to project root
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = join(__filename, '..', '..', '..', '..');
- filePath = join(__dirname, 'docs', '34.md');
- if (existsSync(filePath)) {
- const content = await readFile(filePath, 'utf-8');
- return { content };
- }
- throw new Error(`File not found at ${filePath}`);
- } catch (err2) {
- lastError = err2 instanceof Error ? err2 : new Error(String(err2));
- const attemptedPath = filePath || 'unknown';
- logger.error({ error: lastError, attemptedPaths: [attemptedPath] }, 'Error loading NIP-34 documentation');
- return { content: null, error: 'Failed to load NIP-34 documentation' };
- }
}
- } catch (error) {
- logger.error({ error }, 'Error loading NIP-34 documentation');
- return { content: null, error: 'Failed to load NIP-34 documentation' };
}
+
+ logger.error({
+ error: lastError,
+ attemptedPaths,
+ cwd: process.cwd(),
+ importMetaUrl: import.meta.url
+ }, 'Error loading NIP-34 documentation - all paths failed');
+
+ return { content: null, error: 'Failed to load NIP-34 documentation' };
};
diff --git a/src/routes/docs/nip34/spec/+page.server.ts b/src/routes/docs/nip34/spec/+page.server.ts
index 603905b..270da54 100644
--- a/src/routes/docs/nip34/spec/+page.server.ts
+++ b/src/routes/docs/nip34/spec/+page.server.ts
@@ -10,41 +10,45 @@ import type { PageServerLoad } from './$types';
import logger from '$lib/services/logger.js';
export const load: PageServerLoad = async () => {
- try {
- let filePath: string = '';
- let lastError: Error | null = null;
+ const attemptedPaths: string[] = [];
+ let lastError: Error | null = null;
- // Try method 1: Use process.cwd() (works in most cases)
+ // List of paths to try
+ const pathsToTry = [
+ () => join(process.cwd(), 'docs', '34.md'),
+ () => join(process.cwd(), '..', 'docs', '34.md'),
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', '..', 'docs', '34.md');
+ },
+ () => {
+ const __filename = fileURLToPath(import.meta.url);
+ return join(__filename, '..', '..', '..', '..', '..', '..', 'docs', '34.md');
+ },
+ () => join(process.cwd(), 'build', 'docs', '34.md'),
+ ];
+
+ for (const getPath of pathsToTry) {
try {
- filePath = join(process.cwd(), 'docs', '34.md');
+ const filePath = getPath();
+ attemptedPaths.push(filePath);
+
if (existsSync(filePath)) {
+ logger.info({ filePath }, 'Found NIP-34 specification file');
const content = await readFile(filePath, 'utf-8');
return { content };
}
- throw new Error(`File not found at ${filePath}`);
} catch (err) {
lastError = err instanceof Error ? err : new Error(String(err));
-
- // Try method 2: Use import.meta.url to find project root
- try {
- // Get the directory of this file, then go up to project root
- const __filename = fileURLToPath(import.meta.url);
- const __dirname = join(__filename, '..', '..', '..', '..', '..');
- filePath = join(__dirname, 'docs', '34.md');
- if (existsSync(filePath)) {
- const content = await readFile(filePath, 'utf-8');
- return { content };
- }
- throw new Error(`File not found at ${filePath}`);
- } catch (err2) {
- lastError = err2 instanceof Error ? err2 : new Error(String(err2));
- const attemptedPath = filePath || 'unknown';
- logger.error({ error: lastError, attemptedPaths: [attemptedPath] }, 'Error loading NIP-34 specification');
- return { content: null, error: 'Failed to load NIP-34 specification' };
- }
}
- } catch (error) {
- logger.error({ error }, 'Error loading NIP-34 specification');
- return { content: null, error: 'Failed to load NIP-34 specification' };
}
+
+ logger.error({
+ error: lastError,
+ attemptedPaths,
+ cwd: process.cwd(),
+ importMetaUrl: import.meta.url
+ }, 'Error loading NIP-34 specification - all paths failed');
+
+ return { content: null, error: 'Failed to load NIP-34 specification' };
};
diff --git a/src/routes/repos/[npub]/[repo]/+page.ts b/src/routes/repos/[npub]/[repo]/+page.ts
index 0e3bd5f..0f68c6e 100644
--- a/src/routes/repos/[npub]/[repo]/+page.ts
+++ b/src/routes/repos/[npub]/[repo]/+page.ts
@@ -32,25 +32,50 @@ export const load: PageLoad = async ({ params, url, parent }) => {
const repoOwnerPubkey = decoded.data as string;
- // Fetch repository announcement
- const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
- const events = await nostrClient.fetchEvents([
- {
- kinds: [KIND.REPO_ANNOUNCEMENT],
- authors: [repoOwnerPubkey],
- '#d': [repo],
- limit: 1
+ // Check if announcement was passed from search results via sessionStorage
+ let announcement: any = null;
+ if (typeof window !== 'undefined') {
+ const repoKey = `${npub}/${repo}`;
+ const storedAnnouncement = sessionStorage.getItem(`repo_announcement_${repoKey}`);
+ if (storedAnnouncement) {
+ try {
+ announcement = JSON.parse(storedAnnouncement);
+ // Clean up after using it
+ sessionStorage.removeItem(`repo_announcement_${repoKey}`);
+ } catch {
+ // Invalid JSON, continue to fetch
+ }
}
- ]);
-
- if (events.length === 0) {
- return {
- title: `${repo} - Repository Not Found`,
- description: 'Repository announcement not found'
- };
}
- const announcement = events[0];
+ // If not found in sessionStorage, fetch from Nostr (case-insensitive)
+ if (!announcement) {
+ const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
+ // Fetch all announcements by this author and filter case-insensitively
+ const allEvents = await nostrClient.fetchEvents([
+ {
+ kinds: [KIND.REPO_ANNOUNCEMENT],
+ authors: [repoOwnerPubkey],
+ limit: 100
+ }
+ ]);
+
+ // Filter case-insensitively to find the matching repo
+ const repoLower = repo.toLowerCase();
+ const matchingEvents = allEvents.filter(event => {
+ const dTag = event.tags.find(t => t[0] === 'd')?.[1];
+ return dTag && dTag.toLowerCase() === repoLower;
+ });
+
+ if (matchingEvents.length === 0) {
+ return {
+ title: `${repo} - Repository Not Found`,
+ description: 'Repository announcement not found'
+ };
+ }
+
+ announcement = matchingEvents[0];
+ }
// Check privacy - for private repos, we'll let the API endpoints handle access control
// The page load function runs server-side but doesn't have access to client auth headers
diff --git a/src/routes/search/+page.svelte b/src/routes/search/+page.svelte
index 9b1ee75..8ff4074 100644
--- a/src/routes/search/+page.svelte
+++ b/src/routes/search/+page.svelte
@@ -35,13 +35,15 @@
}
});
let results = $state<{
- repos: Array<{
+ repos: Array<{
id: string;
name: string;
description: string;
owner: string;
npub: string;
+ repoId?: string; // The actual repo ID (d-tag) from the announcement
maintainers?: Array<{ pubkey: string; isOwner: boolean }>;
+ announcement?: any; // Full announcement event
}>;
total: number;
} | null>(null);
@@ -218,11 +220,27 @@
class="repo-item"
role="button"
tabindex="0"
- onclick={() => goto(`/repos/${repo.npub}/${repo.name.toLowerCase().replace(/\s+/g, '-')}`)}
+ onclick={() => {
+ // Store announcement event in sessionStorage for the repo page to use
+ // Use the actual repoId (d-tag) if available, otherwise use the name
+ const repoPath = repo.repoId || repo.name.toLowerCase().replace(/\s+/g, '-');
+ if (repo.announcement) {
+ const repoKey = `${repo.npub}/${repoPath}`;
+ sessionStorage.setItem(`repo_announcement_${repoKey}`, JSON.stringify(repo.announcement));
+ }
+ goto(`/repos/${repo.npub}/${repoPath}`);
+ }}
onkeydown={(e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
- goto(`/repos/${repo.npub}/${repo.name.toLowerCase().replace(/\s+/g, '-')}`);
+ // Store announcement event in sessionStorage for the repo page to use
+ // Use the actual repoId (d-tag) if available, otherwise use the name
+ const repoPath = repo.repoId || repo.name.toLowerCase().replace(/\s+/g, '-');
+ if (repo.announcement) {
+ const repoKey = `${repo.npub}/${repoPath}`;
+ sessionStorage.setItem(`repo_announcement_${repoKey}`, JSON.stringify(repo.announcement));
+ }
+ goto(`/repos/${repo.npub}/${repoPath}`);
}
}}
style="cursor: pointer;">
diff --git a/src/routes/users/[npub]/+page.svelte b/src/routes/users/[npub]/+page.svelte
index ecc799c..3a9c20e 100644
--- a/src/routes/users/[npub]/+page.svelte
+++ b/src/routes/users/[npub]/+page.svelte
@@ -14,6 +14,7 @@
import { fetchUserProfile, extractProfileData } from '$lib/utils/user-profile.js';
import { combineRelays } from '$lib/config.js';
import { KIND, isEphemeralKind, isReplaceableKind } from '$lib/types/nostr.js';
+ import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
const npub = ($page.params as { npub?: string }).npub || '';
@@ -1658,6 +1659,24 @@ i *
+ {#if isOwnProfile && typeof window !== 'undefined'}
+ {@const userLevel = $userStore.userLevel}
+