Browse Source

bug-fixes and fallback relay

Nostr-Signature: 1d85d0c5e1451c90bca5d59e08043f29adeaad4db4ac5495c8e9a4247775780f 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc a1960b76c78db9f64dad20378d26f500ffc09f1f6d137314db548470202712222a1d391f682146ba281fd23355c574fcbb260310db61b3458bba3dec0c724a18
main
Silberengel 2 weeks ago
parent
commit
02d4fa85dd
  1. 1
      nostr/commit-signatures.jsonl
  2. 14
      src/lib/config.ts
  3. 197
      src/lib/services/nostr/nostr-client.ts
  4. 22
      src/routes/+layout.svelte
  5. 46
      src/routes/docs/[slug]/+page.svelte
  6. 23
      src/routes/repos/[npub]/[repo]/services/commit-operations.ts

1
nostr/commit-signatures.jsonl

@ -117,3 +117,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772261455,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision"]],"content":"Signed commit: fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision","id":"b0da119e7477b46f5d82be831693a92e117f25379476488f19351e2bac8f88b8","sig":"b8ca18e8215a9f5b3fc877ce113936c582353d44f8d03cdccd9f9ee70fb3e6fdd64db7cc6a3ca15339fb21b9ca87ea8471a38b587721a594a189d97cc2964ad9"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772261455,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision"]],"content":"Signed commit: fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision","id":"b0da119e7477b46f5d82be831693a92e117f25379476488f19351e2bac8f88b8","sig":"b8ca18e8215a9f5b3fc877ce113936c582353d44f8d03cdccd9f9ee70fb3e6fdd64db7cc6a3ca15339fb21b9ca87ea8471a38b587721a594a189d97cc2964ad9"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772264490,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","polling update"]],"content":"Signed commit: polling update","id":"42c1a2a63a4568c65d82d78701451b3b4363bdf9c8c57e804535b5f3f0d7b6fc","sig":"8e5f32ecb79da876ac41eba04c3b1541b21d039ae50d1b9fefa630d35f31c97dd29af64e4b695742fa7d4eaec17db8f4a066b4db99ce628aed596971975d4a87"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772264490,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","polling update"]],"content":"Signed commit: polling update","id":"42c1a2a63a4568c65d82d78701451b3b4363bdf9c8c57e804535b5f3f0d7b6fc","sig":"8e5f32ecb79da876ac41eba04c3b1541b21d039ae50d1b9fefa630d35f31c97dd29af64e4b695742fa7d4eaec17db8f4a066b4db99ce628aed596971975d4a87"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772267611,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor API"]],"content":"Signed commit: refactor API","id":"934f8809638cea0bc7b8158fca959bc60880e0cae9ab8ff653687313adcd2f57","sig":"c9d8e5b821ae8182f8d39599c50fd0a4db6040ead1d8d83730a608a1d94d5078770a6ccbfc525a98691e98fabd9f9d24f0298680fb564c6b76c2f34bed9889b5"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772267611,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor API"]],"content":"Signed commit: refactor API","id":"934f8809638cea0bc7b8158fca959bc60880e0cae9ab8ff653687313adcd2f57","sig":"c9d8e5b821ae8182f8d39599c50fd0a4db6040ead1d8d83730a608a1d94d5078770a6ccbfc525a98691e98fabd9f9d24f0298680fb564c6b76c2f34bed9889b5"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772269280,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","api refactor part 2"]],"content":"Signed commit: api refactor part 2","id":"ece894a60057bba46ebd4ac0dca2aca55ffce05e44671fe07b29516809fc86f6","sig":"176706a271659834e441ea5eab4bb1480667dad4468fe8315803284f4a183debf595523dd33d0d3cabe0c35013f4a72b9169b5f10afefaf8a82a721d8b0f3b08"}

14
src/lib/config.ts

@ -25,6 +25,20 @@ export const DEFAULT_NOSTR_RELAYS =
'wss://nostr.land', 'wss://nostr.land',
]; ];
/**
* Fallback Nostr relays to use when primary relays fail
* Can be overridden by NOSTR_FALLBACK_RELAYS env var (comma-separated list)
* These relays are automatically used when primary relays are unavailable
*/
export const FALLBACK_NOSTR_RELAYS =
typeof process !== 'undefined' && process.env?.NOSTR_FALLBACK_RELAYS
? process.env.NOSTR_FALLBACK_RELAYS.split(',').map(r => r.trim()).filter(r => r.length > 0)
: [
'wss://orly-relay.imwald.eu',
'wss://nostr.sovbit.host',
'wss://nostr21.com',
];
/** /**
* Nostr relays to use for searching for repositories, profiles, or other events * Nostr relays to use for searching for repositories, profiles, or other events
* Can be overridden by NOSTR_SEARCH_RELAYS env var (comma-separated list) * Can be overridden by NOSTR_SEARCH_RELAYS env var (comma-separated list)

197
src/lib/services/nostr/nostr-client.ts

@ -9,6 +9,7 @@ import { isNIP07Available, getPublicKeyWithNIP07, signEventWithNIP07 } from './n
import { SimplePool, type Filter } from 'nostr-tools'; import { SimplePool, type Filter } from 'nostr-tools';
import { KIND } from '../../types/nostr.js'; import { KIND } from '../../types/nostr.js';
import { isParameterizedReplaceable } from '../../utils/nostr-event-utils.js'; import { isParameterizedReplaceable } from '../../utils/nostr-event-utils.js';
import { FALLBACK_NOSTR_RELAYS } from '../../config.js';
// Replaceable event kinds (only latest per pubkey matters) // Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
@ -236,36 +237,140 @@ export class NostrClient {
return this.fetchAndMergeFromRelays(filters, []); return this.fetchAndMergeFromRelays(filters, []);
} }
/**
* Sanitize a filter to ensure all values are valid
* Removes invalid authors (non-strings, null, undefined, non-hex)
* Ensures all array fields contain only valid strings
*/
private sanitizeFilter(filter: NostrFilter): Filter {
const sanitized: Filter = {};
// Sanitize authors - must be array of valid hex pubkeys (64 chars)
if (filter.authors) {
const validAuthors = filter.authors
.filter((author): author is string =>
typeof author === 'string' &&
author.length === 64 &&
/^[0-9a-f]{64}$/i.test(author)
);
if (validAuthors.length > 0) {
sanitized.authors = validAuthors;
}
}
// Sanitize ids - must be array of valid hex strings (64 chars)
if (filter.ids) {
const validIds = filter.ids
.filter((id): id is string =>
typeof id === 'string' &&
id.length === 64 &&
/^[0-9a-f]{64}$/i.test(id)
);
if (validIds.length > 0) {
sanitized.ids = validIds;
}
}
// Sanitize kinds - must be array of numbers
if (filter.kinds) {
const validKinds = filter.kinds.filter((kind): kind is number => typeof kind === 'number');
if (validKinds.length > 0) {
sanitized.kinds = validKinds;
}
}
// Sanitize tag filters - must be arrays of strings
const tagFields = ['#e', '#p', '#d', '#a', '#E', '#K', '#P', '#A', '#I'] as const;
for (const tagField of tagFields) {
const value = filter[tagField];
if (value) {
const validValues = value.filter((v): v is string => typeof v === 'string' && v.length > 0);
if (validValues.length > 0) {
sanitized[tagField] = validValues;
}
}
}
// Copy other valid fields
if (filter.since !== undefined && typeof filter.since === 'number') {
sanitized.since = filter.since;
}
if (filter.until !== undefined && typeof filter.until === 'number') {
sanitized.until = filter.until;
}
if (filter.limit !== undefined && typeof filter.limit === 'number' && filter.limit > 0) {
sanitized.limit = filter.limit;
}
if (filter.search && typeof filter.search === 'string') {
sanitized.search = filter.search;
}
return sanitized;
}
/** /**
* Fetch events from relays and merge with existing events * Fetch events from relays and merge with existing events
* Never deletes valid events, only appends/integrates new ones * Never deletes valid events, only appends/integrates new ones
* Automatically falls back to fallback relays if primary relays fail
*/ */
private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[]): Promise<NostrEvent[]> { private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[]): Promise<NostrEvent[]> {
const events: NostrEvent[] = []; const events: NostrEvent[] = [];
// Sanitize all filters before sending to relays
const sanitizedFilters = filters.map(f => this.sanitizeFilter(f));
// Use nostr-tools SimplePool to fetch from all relays in parallel // Use nostr-tools SimplePool to fetch from all relays in parallel
// SimplePool handles connection management, retries, and error handling automatically // SimplePool handles connection management, retries, and error handling automatically
try { try {
// querySync takes a single filter, so we query each filter and combine results // querySync takes a single filter, so we query each filter and combine results
// Wrap each query individually to catch errors from individual relays // Wrap each query individually to catch errors from individual relays
const queryPromises = filters.map(filter => const queryPromises = sanitizedFilters.map(filter =>
this.pool.querySync(this.relays, filter as Filter, { maxWait: 8000 }) this.pool.querySync(this.relays, filter, { maxWait: 8000 })
.catch(err => { .catch(err => {
// Log individual relay errors but don't fail the entire request // Log individual relay errors but don't fail the entire request
logger.debug({ error: err, filter }, 'Individual relay query failed'); logger.debug({ error: err, filter, relays: this.relays }, 'Primary relay query failed, trying fallback');
return []; // Return empty array for failed queries return []; // Return empty array for failed queries
}) })
); );
const results = await Promise.allSettled(queryPromises); const results = await Promise.allSettled(queryPromises);
let hasResults = false;
for (const result of results) { for (const result of results) {
if (result.status === 'fulfilled') { if (result.status === 'fulfilled' && result.value.length > 0) {
events.push(...result.value); events.push(...result.value);
} else { hasResults = true;
} else if (result.status === 'rejected') {
// Log rejected promises (shouldn't happen since we catch above, but just in case) // Log rejected promises (shouldn't happen since we catch above, but just in case)
logger.debug({ error: result.reason }, 'Query promise rejected'); logger.debug({ error: result.reason }, 'Query promise rejected');
} }
} }
// If no results from primary relays and we have fallback relays, try them
if (!hasResults && events.length === 0 && FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: this.relays, fallbackRelays: FALLBACK_NOSTR_RELAYS }, 'No results from primary relays, trying fallback relays');
try {
const fallbackPromises = sanitizedFilters.map(filter =>
this.pool.querySync(FALLBACK_NOSTR_RELAYS, filter, { maxWait: 8000 })
.catch(err => {
logger.debug({ error: err, filter }, 'Fallback relay query failed');
return [];
})
);
const fallbackResults = await Promise.allSettled(fallbackPromises);
for (const result of fallbackResults) {
if (result.status === 'fulfilled') {
events.push(...result.value);
}
}
if (events.length > 0) {
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, eventCount: events.length }, 'Successfully fetched events from fallback relays');
}
} catch (fallbackErr) {
logger.debug({ error: fallbackErr }, 'Fallback relay query failed completely');
}
}
} catch (err) { } catch (err) {
logger.debug({ error: err, filters }, 'Pool querySync failed'); logger.debug({ error: err, filters }, 'Pool querySync failed');
// Continue with empty events - will use cached events // Continue with empty events - will use cached events
@ -509,10 +614,90 @@ export class NostrClient {
} }
}); });
} else { } else {
// If publish failed or timed out, mark all as failed // If publish failed or timed out to primary relays, try fallback relays
if (FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: targetRelays, fallbackRelays: FALLBACK_NOSTR_RELAYS, eventId: event.id }, 'Primary relay publish failed, trying fallback relays');
try {
const fallbackPublishPromise = new Promise<string[]>((resolve, reject) => {
const timeout = setTimeout(() => {
reject(new Error('Fallback publish timeout after 30 seconds'));
}, 30000);
try {
const fallbackPublishPromises = this.pool.publish(FALLBACK_NOSTR_RELAYS, event);
Promise.all(fallbackPublishPromises)
.then((results) => {
clearTimeout(timeout);
resolve(results);
})
.catch((error: unknown) => {
clearTimeout(timeout);
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('restricted') ||
errorMessage.includes('Pay on') ||
errorMessage.includes('payment required') ||
errorMessage.includes('rate limit')) {
logger.debug({ error: errorMessage, eventId: event.id }, 'Fallback relay restriction encountered');
resolve([]);
} else {
reject(error);
}
});
} catch (syncError) {
clearTimeout(timeout);
reject(syncError);
}
});
const fallbackPublishedRelays: string[] = await Promise.race([
fallbackPublishPromise,
new Promise<string[]>((_, reject) =>
setTimeout(() => reject(new Error('Fallback publish timeout')), 30000)
)
]).catch((error: unknown): string[] => {
logger.debug({ error: error instanceof Error ? error.message : String(error), eventId: event.id }, 'Error publishing to fallback relays');
return [];
});
if (fallbackPublishedRelays && fallbackPublishedRelays.length > 0) {
success.push(...fallbackPublishedRelays);
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, publishedCount: fallbackPublishedRelays.length, eventId: event.id }, 'Successfully published to fallback relays');
// Mark primary relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Primary relay failed, used fallback' });
});
// Mark fallback relays not in success as failed
FALLBACK_NOSTR_RELAYS.forEach(relay => {
if (!fallbackPublishedRelays.includes(relay)) {
failed.push({ relay, error: 'Fallback relay did not accept event' });
}
});
} else {
// Both primary and fallback failed
targetRelays.forEach(relay => { targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' }); failed.push({ relay, error: 'Publish failed or timed out' });
}); });
FALLBACK_NOSTR_RELAYS.forEach(relay => {
failed.push({ relay, error: 'Fallback relay publish failed or timed out' });
});
}
} catch (fallbackError) {
logger.debug({ error: fallbackError, eventId: event.id }, 'Fallback relay publish failed completely');
// Mark all relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
FALLBACK_NOSTR_RELAYS.forEach(relay => {
failed.push({ relay, error: 'Fallback relay publish failed' });
});
}
} else {
// No fallback relays available, mark all primary relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
}
} }
} catch (error) { } catch (error) {
// Catch any synchronous errors // Catch any synchronous errors

22
src/routes/+layout.svelte

@ -2,7 +2,7 @@
import '../app.css'; import '../app.css';
import { onMount, onDestroy, setContext } from 'svelte'; import { onMount, onDestroy, setContext } from 'svelte';
import { page } from '$app/stores'; import { page } from '$app/stores';
import { goto } from '$app/navigation'; import { goto, beforeNavigate } from '$app/navigation';
import Footer from '$lib/components/Footer.svelte'; import Footer from '$lib/components/Footer.svelte';
import NavBar from '$lib/components/NavBar.svelte'; import NavBar from '$lib/components/NavBar.svelte';
import TransferNotification from '$lib/components/TransferNotification.svelte'; import TransferNotification from '$lib/components/TransferNotification.svelte';
@ -447,6 +447,26 @@
} }
}); });
// Intercept navigation to .md files and redirect to /docs/ route
beforeNavigate((navigation) => {
if (!navigation.to || typeof window === 'undefined') return;
// NavigationTarget can be a URL or a route object, get the pathname
const toUrl = navigation.to instanceof URL ? navigation.to : new URL(navigation.to.url, 'http://localhost');
const path = toUrl.pathname;
// Check if path ends with .md and doesn't already start with /docs/
if (path.endsWith('.md') && !path.startsWith('/docs/')) {
// Extract filename without .md extension
const filename = path.replace(/\.md$/, '').replace(/^\//, '');
// Security: Only allow alphanumeric, hyphens, underscores
if (/^[a-zA-Z0-9_-]+$/.test(filename)) {
navigation.cancel();
goto(`/docs/${filename}`, { replaceState: true });
}
}
});
</script> </script>
{#if !isSplashPage} {#if !isSplashPage}

46
src/routes/docs/[slug]/+page.svelte

@ -45,9 +45,29 @@
}); });
// Convert relative markdown links to docs routes // Convert relative markdown links to docs routes
rendered = rendered.replace(/<a href="\.\/([^"]+\.md)"/g, (match, file) => { // Handle various link formats:
const slug = file.replace('.md', ''); // - ./file.md -> /docs/file
// - file.md -> /docs/file
// - /file.md -> /docs/file (though this shouldn't happen in markdown)
rendered = rendered.replace(/<a href="([^"]*\.md)"/g, (match, file) => {
// Remove leading ./ or / if present
const cleanFile = file.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanFile.replace(/\.md$/, '');
// Only process if it's a relative link (not already starting with /docs or http)
if (!slug.startsWith('docs/') && !slug.startsWith('http')) {
return `<a href="/docs/${slug}"`; return `<a href="/docs/${slug}"`;
}
return match; // Return original if already processed or external
});
// Also handle links with anchors: ./file.md#section -> /docs/file#section
rendered = rendered.replace(/<a href="([^"]*\.md)(#[^"]*)"/g, (match, file, anchor) => {
const cleanFile = file.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanFile.replace(/\.md$/, '');
if (!slug.startsWith('docs/') && !slug.startsWith('http')) {
return `<a href="/docs/${slug}${anchor}"`;
}
return match;
}); });
content = rendered; content = rendered;
@ -66,15 +86,31 @@
if (markdownContent) { if (markdownContent) {
markdownContent.addEventListener('click', (e) => { markdownContent.addEventListener('click', (e) => {
const target = e.target as HTMLElement; const target = e.target as HTMLElement;
if (target.tagName === 'A' && target.getAttribute('href')?.startsWith('#')) { if (target.tagName === 'A') {
const id = target.getAttribute('href')?.substring(1); const href = target.getAttribute('href');
if (id) { if (!href) return;
// Handle anchor links
if (href.startsWith('#')) {
const id = href.substring(1);
const element = document.getElementById(id); const element = document.getElementById(id);
if (element) { if (element) {
e.preventDefault(); e.preventDefault();
element.scrollIntoView({ behavior: 'smooth', block: 'start' }); element.scrollIntoView({ behavior: 'smooth', block: 'start' });
window.history.pushState(null, '', `#${id}`); window.history.pushState(null, '', `#${id}`);
} }
return;
}
// Handle .md file links that weren't converted properly
if (href.endsWith('.md') && !href.startsWith('/docs/') && !href.startsWith('http')) {
e.preventDefault();
// Remove leading ./ or / if present, then remove .md extension
const cleanHref = href.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanHref.replace(/\.md$/, '');
// Navigate to docs route
window.location.href = `/docs/${slug}`;
return;
} }
} }
}); });

23
src/routes/repos/[npub]/[repo]/services/commit-operations.ts

@ -25,16 +25,33 @@ export async function loadCommitHistory(
const url = `/api/repos/${state.npub}/${state.repo}/commits?branch=${encodeURIComponent(branch)}&limit=50`; const url = `/api/repos/${state.npub}/${state.repo}/commits?branch=${encodeURIComponent(branch)}&limit=50`;
console.log('[loadCommitHistory] Fetching commits:', { url, branch, currentBranch: state.git.currentBranch, defaultBranch: state.git.defaultBranch }); console.log('[loadCommitHistory] Fetching commits:', { url, branch, currentBranch: state.git.currentBranch, defaultBranch: state.git.defaultBranch });
const data = await apiRequest<Array<{ const response = await apiRequest<Array<{
hash?: string; hash?: string;
sha?: string; sha?: string;
message?: string; message?: string;
author?: string; author?: string;
date?: string; date?: string;
files?: string[]; files?: string[];
}>>(url); }> | { commitCount?: number; data?: Array<any> }>(url);
console.log('[loadCommitHistory] Received data:', { commitCount: data?.length || 0, data }); // Handle both array and object response formats
// API should return array, but handle object wrappers like { data: [] } or { commits: [] }
let data: Array<any>;
if (Array.isArray(response)) {
data = response;
} else if (response && typeof response === 'object') {
// Try common wrapper formats
data = (response as any).data || (response as any).commits || [];
} else {
data = [];
}
console.log('[loadCommitHistory] Received response:', {
responseType: Array.isArray(response) ? 'array' : typeof response,
responseKeys: typeof response === 'object' && response !== null ? Object.keys(response) : [],
commitCount: data?.length || 0,
data
});
// Normalize commits: API-based commits use 'sha', local commits use 'hash' // Normalize commits: API-based commits use 'sha', local commits use 'hash'
state.git.commits = data.map((commit: any) => ({ state.git.commits = data.map((commit: any) => ({

Loading…
Cancel
Save