Browse Source

fix websocket problems

handle replaceable events correctly
fix css for docs

Nostr-Signature: 88c007de2bd48c32c879b9950f0908270b009c6341a97b1c0164982648beb3d9 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc c9250a23d38671a5b1c0d3389e003931222385ca9591b9b332585c8c639e2af2a7b2e8cac9c1ca5bd47df19b330622b1a1874e586f112fa84a4a7aa4347c7456
main
Silberengel 3 weeks ago
parent
commit
53441778df
  1. 1
      nostr/commit-signatures.jsonl
  2. 22
      src/lib/components/SettingsModal.svelte
  3. 66
      src/lib/services/git/file-manager.ts
  4. 481
      src/lib/services/nostr/nostr-client.ts
  5. 221
      src/lib/services/nostr/persistent-event-cache.ts
  6. 4
      src/lib/services/settings-store.ts
  7. 13
      src/routes/api/repos/[npub]/[repo]/branches/+server.ts
  8. 71
      src/routes/docs/+page.svelte
  9. 71
      src/routes/docs/nip34/+page.svelte
  10. 71
      src/routes/docs/nip34/spec/+page.svelte
  11. 31
      src/routes/repos/[npub]/[repo]/+page.svelte

1
nostr/commit-signatures.jsonl

@ -21,3 +21,4 @@ @@ -21,3 +21,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771604372,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"9a1ba983e0b0db8cff3675a078a376df5c9ad351c3988ea893f3e8084a65a1e6","sig":"724a326cbd6a33f1ff6a2c37b242c7571e35149281609e9eb1c6a197422a13834d9ac2f5d0719026bc66126bd0022df49adf50aa08af93dd95076f407b0f0456"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771607520,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"2040e0adbed520ee9a21c6a1c7df48fae27021c1d3474b584388cd5ddafc6a49","sig":"893b4881e3876c0f556e3be991e9c6e99c9f5933bc9755e4075c1d0bfea95750b2318f3d3409d689c7e9a862cf053db0e7d3083ee28cf48ffbe794583c3ad783"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771612082,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","harmonize user.name and user.email\nadd settings menu\nautosave OFF 10 min"]],"content":"Signed commit: harmonize user.name and user.email\nadd settings menu\nautosave OFF 10 min","id":"80834df600e5ad22f44fc26880333d28054895b7b5fde984921fab008a27ce6d","sig":"41991d089d26e3f90094dcebd1dee7504c59cadd0ea2f4dfe8693106d9000a528157fb905aec9001e0b8f3ef9e8590557f3df6961106859775d9416b546a44c0"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771612354,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","remove theme button from bar"]],"content":"Signed commit: remove theme button from bar","id":"fc758a0681c072108b196911bbeee6d49df1efe635d5d78427b7874be4d6e657","sig":"6c0e991e960a29c623c936ab2a31478a85907780eda692c035762deabc740ca0a76df113f5ce853a6d839b023e2b483ce2d7686c40b91c4cea5f32945799a31f"}

22
src/lib/components/SettingsModal.svelte

@ -16,6 +16,7 @@ @@ -16,6 +16,7 @@
let userName = $state('');
let userEmail = $state('');
let theme = $state<'gitrepublic-light' | 'gitrepublic-dark' | 'gitrepublic-black'>('gitrepublic-dark');
let defaultBranch = $state('master');
let loading = $state(true);
let saving = $state(false);
let loadingPresets = $state(false);
@ -37,6 +38,7 @@ @@ -37,6 +38,7 @@
userName = settings.userName;
userEmail = settings.userEmail;
theme = settings.theme;
defaultBranch = settings.defaultBranch;
} catch (err) {
console.error('Failed to load settings:', err);
} finally {
@ -91,7 +93,8 @@ @@ -91,7 +93,8 @@
autoSave,
userName: userName.trim() || '', // Empty string means use preset
userEmail: userEmail.trim() || '', // Empty string means use preset
theme
theme,
defaultBranch: defaultBranch.trim() || 'master'
});
// Apply theme immediately
@ -230,6 +233,23 @@ @@ -230,6 +233,23 @@
</p>
</div>
<!-- Default Branch -->
<div class="setting-group">
<label class="setting-label" for="default-branch">
<span class="label-text">Default Branch Name</span>
</label>
<input
type="text"
id="default-branch"
bind:value={defaultBranch}
placeholder="master"
class="setting-input"
/>
<p class="setting-description">
Default branch name to use when creating new repositories. This will be used as the base branch when creating the first branch in a new repo.
</p>
</div>
<!-- Theme Selector -->
<div class="setting-group">
<div class="setting-label">

66
src/lib/services/git/file-manager.ts

@ -1068,19 +1068,65 @@ export class FileManager { @@ -1068,19 +1068,65 @@ export class FileManager {
}
try {
// Use git worktree instead of cloning (much more efficient)
const workDir = await this.getWorktree(repoPath, fromBranch, npub, repoName);
const workGit: SimpleGit = simpleGit(workDir);
const git: SimpleGit = simpleGit(repoPath);
// Check if repo has any branches
let hasBranches = false;
try {
const branches = await git.branch(['-a']);
const branchList = branches.all
.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''))
.filter(b => !b.includes('HEAD') && !b.startsWith('*'));
hasBranches = branchList.length > 0;
} catch {
// If branch listing fails, assume no branches exist
hasBranches = false;
}
// If no branches exist, create an orphan branch (branch with no parent)
if (!hasBranches) {
// Create worktree for the new branch directly (orphan branch)
const worktreeRoot = join(this.repoRoot, npub, `${repoName}.worktrees`);
const worktreePath = resolve(join(worktreeRoot, branchName));
const { mkdir, rm } = await import('fs/promises');
if (!existsSync(worktreeRoot)) {
await mkdir(worktreeRoot, { recursive: true });
}
// Remove existing worktree if it exists
if (existsSync(worktreePath)) {
try {
await git.raw(['worktree', 'remove', worktreePath, '--force']);
} catch {
await rm(worktreePath, { recursive: true, force: true });
}
}
// Create worktree with orphan branch
await git.raw(['worktree', 'add', worktreePath, '--orphan', branchName]);
// Set the default branch to the new branch in the bare repo
await git.raw(['symbolic-ref', 'HEAD', `refs/heads/${branchName}`]);
// Clean up worktree
await this.removeWorktree(repoPath, worktreePath);
} else {
// Repo has branches - use normal branch creation
// Use git worktree instead of cloning (much more efficient)
const workDir = await this.getWorktree(repoPath, fromBranch, npub, repoName);
const workGit: SimpleGit = simpleGit(workDir);
// Create and checkout new branch
await workGit.checkout(['-b', branchName]);
// Create and checkout new branch
await workGit.checkout(['-b', branchName]);
// Note: No push needed - worktrees of bare repos share the same object database,
// so the branch is already in the bare repository. We don't push to remote origin
// to avoid requiring remote authentication and to keep changes local-only.
// Note: No push needed - worktrees of bare repos share the same object database,
// so the branch is already in the bare repository. We don't push to remote origin
// to avoid requiring remote authentication and to keep changes local-only.
// Clean up worktree
await this.removeWorktree(repoPath, workDir);
// Clean up worktree
await this.removeWorktree(repoPath, workDir);
}
} catch (error) {
logger.error({ error, repoPath, branchName, npub }, 'Error creating branch');
throw new Error(`Failed to create branch: ${error instanceof Error ? error.message : String(error)}`);

481
src/lib/services/nostr/nostr-client.ts

@ -12,6 +12,32 @@ import { KIND } from '../../types/nostr.js'; @@ -12,6 +12,32 @@ import { KIND } from '../../types/nostr.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
/**
* Check if an event is a parameterized replaceable event (NIP-33)
* Parameterized replaceable events have kind >= 10000 && kind < 20000 and a 'd' tag
*/
function isParameterizedReplaceable(event: NostrEvent): boolean {
return event.kind >= 10000 && event.kind < 20000 &&
event.tags.some(t => t[0] === 'd' && t[1]);
}
/**
* Get the deduplication key for an event
* For replaceable events: kind:pubkey
* For parameterized replaceable events: kind:pubkey:d-tag
* For regular events: event.id
*/
function getDeduplicationKey(event: NostrEvent): string {
if (REPLACEABLE_KINDS.includes(event.kind)) {
return `${event.kind}:${event.pubkey}`;
}
if (isParameterizedReplaceable(event)) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || '';
return `${event.kind}:${event.pubkey}:${dTag}`;
}
return event.id;
}
// Lazy load persistent cache (only in browser)
let persistentEventCache: typeof import('./persistent-event-cache.js').persistentEventCache | null = null;
async function getPersistentCache() {
@ -146,13 +172,196 @@ async function createWebSocketWithTor(url: string): Promise<WebSocket> { @@ -146,13 +172,196 @@ async function createWebSocketWithTor(url: string): Promise<WebSocket> {
}
}
// Connection pool for WebSocket connections
interface RelayConnection {
ws: WebSocket;
lastUsed: number;
pendingRequests: number;
reconnectAttempts: number;
messageHandlers: Map<string, (message: any) => void>; // subscription ID -> handler
nextSubscriptionId: number;
}
export class NostrClient {
private relays: string[] = [];
private authenticatedRelays: Set<string> = new Set();
private processingDeletions: boolean = false; // Guard to prevent recursive deletion processing
private connectionPool: Map<string, RelayConnection> = new Map();
private readonly CONNECTION_TIMEOUT = 30000; // Close idle connections after 30 seconds
private readonly MAX_RECONNECT_ATTEMPTS = 3;
private readonly RECONNECT_DELAY = 2000; // 2 seconds between reconnect attempts
private connectionAttempts: Map<string, { count: number; lastAttempt: number }> = new Map();
private readonly MAX_CONCURRENT_CONNECTIONS = 3; // Max concurrent connections per relay
private readonly CONNECTION_BACKOFF_BASE = 1000; // Base backoff in ms
constructor(relays: string[]) {
this.relays = relays;
// Clean up idle connections periodically
if (typeof window !== 'undefined') {
setInterval(() => this.cleanupIdleConnections(), 10000); // Check every 10 seconds
}
}
/**
* Clean up idle connections that haven't been used recently
*/
private cleanupIdleConnections(): void {
const now = Date.now();
for (const [relay, conn] of this.connectionPool.entries()) {
// Close connections that are idle and have no pending requests
if (conn.pendingRequests === 0 &&
now - conn.lastUsed > this.CONNECTION_TIMEOUT &&
(conn.ws.readyState === WebSocket.OPEN || conn.ws.readyState === WebSocket.CLOSED)) {
try {
if (conn.ws.readyState === WebSocket.OPEN) {
conn.ws.close();
}
} catch {
// Ignore errors
}
this.connectionPool.delete(relay);
}
}
}
/**
* Get or create a WebSocket connection to a relay
*/
private async getConnection(relay: string): Promise<WebSocket | null> {
const existing = this.connectionPool.get(relay);
// Reuse existing connection if it's open
if (existing && existing.ws.readyState === WebSocket.OPEN) {
existing.lastUsed = Date.now();
existing.pendingRequests++;
return existing.ws;
}
// Check connection attempt throttling
const attemptInfo = this.connectionAttempts.get(relay) || { count: 0, lastAttempt: 0 };
const now = Date.now();
const timeSinceLastAttempt = now - attemptInfo.lastAttempt;
// If we've had too many recent failures, apply exponential backoff
if (attemptInfo.count > 0) {
const backoffTime = this.CONNECTION_BACKOFF_BASE * Math.pow(2, Math.min(attemptInfo.count - 1, 5));
if (timeSinceLastAttempt < backoffTime) {
logger.debug({ relay, backoffTime, timeSinceLastAttempt }, 'Throttling connection attempt');
return null; // Don't attempt connection yet
}
}
// Check if we have too many concurrent connections to this relay
const openConnections = Array.from(this.connectionPool.values())
.filter(c => c.ws === existing?.ws || (c.ws.readyState === WebSocket.OPEN || c.ws.readyState === WebSocket.CONNECTING))
.length;
if (openConnections >= this.MAX_CONCURRENT_CONNECTIONS) {
logger.debug({ relay, openConnections }, 'Too many concurrent connections, skipping');
return null;
}
// Remove dead connection
if (existing) {
this.connectionPool.delete(relay);
try {
if (existing.ws.readyState !== WebSocket.CLOSED) {
existing.ws.close();
}
} catch {
// Ignore errors
}
}
// Update attempt tracking
this.connectionAttempts.set(relay, { count: attemptInfo.count + 1, lastAttempt: now });
// Create new connection
try {
const ws = await createWebSocketWithTor(relay);
const conn: RelayConnection = {
ws,
lastUsed: Date.now(),
pendingRequests: 1,
reconnectAttempts: 0,
messageHandlers: new Map(),
nextSubscriptionId: 1
};
// Set up shared message handler for routing
ws.onmessage = (event: MessageEvent) => {
try {
const message = JSON.parse(event.data);
// Route to appropriate handler based on message type
if (message[0] === 'EVENT' && message[1]) {
// message[1] is the subscription ID
const handler = conn.messageHandlers.get(message[1]);
if (handler) {
handler(message);
}
} else if (message[0] === 'EOSE' && message[1]) {
// message[1] is the subscription ID
const handler = conn.messageHandlers.get(message[1]);
if (handler) {
handler(message);
}
} else if (message[0] === 'AUTH') {
// AUTH challenge - broadcast to all handlers (they'll handle it)
for (const handler of conn.messageHandlers.values()) {
handler(message);
}
} else if (message[0] === 'OK' && message[1] === 'auth') {
// AUTH response - broadcast to all handlers
for (const handler of conn.messageHandlers.values()) {
handler(message);
}
}
} catch (error) {
// Ignore parse errors
}
};
// Handle connection close/error
ws.onclose = () => {
// Remove from pool when closed
const poolConn = this.connectionPool.get(relay);
if (poolConn && poolConn.ws === ws) {
this.connectionPool.delete(relay);
}
};
ws.onerror = () => {
// Remove from pool on error
const poolConn = this.connectionPool.get(relay);
if (poolConn && poolConn.ws === ws) {
this.connectionPool.delete(relay);
}
};
this.connectionPool.set(relay, conn);
// Reset attempt count on successful connection
ws.onopen = () => {
this.connectionAttempts.set(relay, { count: 0, lastAttempt: Date.now() });
};
return ws;
} catch (error) {
logger.debug({ error, relay }, 'Failed to create WebSocket connection');
return null;
}
}
/**
* Release a connection (decrement pending requests counter)
*/
private releaseConnection(relay: string): void {
const conn = this.connectionPool.get(relay);
if (conn) {
conn.pendingRequests = Math.max(0, conn.pendingRequests - 1);
conn.lastUsed = Date.now();
}
}
/**
@ -281,43 +490,53 @@ export class NostrClient { @@ -281,43 +490,53 @@ export class NostrClient {
}
}
// Merge with existing events - never delete valid events
// Merge with existing events - handle replaceable and parameterized replaceable events
// Map: deduplication key -> latest event
const eventMap = new Map<string, NostrEvent>();
const eventsToDelete = new Set<string>(); // Event IDs to delete from cache
// Add existing events first
// Add existing events first, indexed by deduplication key
for (const event of existingEvents) {
eventMap.set(event.id, event);
const key = getDeduplicationKey(event);
const existing = eventMap.get(key);
// Keep the newest if there are duplicates
if (!existing || event.created_at > existing.created_at) {
if (existing) {
eventsToDelete.add(existing.id); // Mark older event for deletion
}
eventMap.set(key, event);
} else {
eventsToDelete.add(event.id); // This one is older
}
}
// Add/update with new events from relays
// For replaceable events (kind 0, 3, 10002), use latest per pubkey
const replaceableEvents = new Map<string, NostrEvent>(); // pubkey -> latest event
for (const event of events) {
if (REPLACEABLE_KINDS.includes(event.kind)) {
// Replaceable event - only keep latest per pubkey
const existing = replaceableEvents.get(event.pubkey);
if (!existing || event.created_at > existing.created_at) {
replaceableEvents.set(event.pubkey, event);
const key = getDeduplicationKey(event);
const existing = eventMap.get(key);
if (!existing || event.created_at > existing.created_at) {
// New event is newer (or first occurrence)
if (existing) {
eventsToDelete.add(existing.id); // Mark older event for deletion
}
eventMap.set(key, event);
} else {
// Regular event - add if newer or doesn't exist
const existing = eventMap.get(event.id);
if (!existing || event.created_at > existing.created_at) {
eventMap.set(event.id, event);
}
// Existing event is newer, mark this one for deletion
eventsToDelete.add(event.id);
}
}
// Add replaceable events to the map (replacing older versions)
for (const [pubkey, event] of replaceableEvents.entries()) {
// Remove any existing replaceable events for this pubkey
for (const [id, existingEvent] of eventMap.entries()) {
if (existingEvent.pubkey === pubkey && REPLACEABLE_KINDS.includes(existingEvent.kind)) {
eventMap.delete(id);
// Remove events that should be deleted
for (const eventId of eventsToDelete) {
eventMap.delete(eventId); // Remove by ID if it was keyed by ID
// Also remove from map if it's keyed by deduplication key
for (const [key, event] of eventMap.entries()) {
if (event.id === eventId) {
eventMap.delete(key);
break;
}
}
eventMap.set(event.id, event);
}
const finalEvents = Array.from(eventMap.values());
@ -328,6 +547,15 @@ export class NostrClient { @@ -328,6 +547,15 @@ export class NostrClient {
// Get persistent cache once (if available)
const persistentCache = await getPersistentCache();
// Delete older events from cache if we have newer ones
if (persistentCache && eventsToDelete.size > 0) {
for (const eventId of eventsToDelete) {
persistentCache.deleteEvent(eventId).catch((err: unknown) => {
logger.debug({ error: err, eventId }, 'Failed to delete old event from cache');
});
}
}
// Cache in persistent cache (has built-in in-memory layer)
// For kind 0 (profile) events, also cache individually by pubkey
const profileEvents = finalEvents.filter(e => e.kind === 0);
@ -444,6 +672,7 @@ export class NostrClient { @@ -444,6 +672,7 @@ export class NostrClient {
let timeoutId: ReturnType<typeof setTimeout> | null = null;
let connectionTimeoutId: ReturnType<typeof setTimeout> | null = null;
let authHandled = false;
let isNewConnection = false;
const cleanup = () => {
if (timeoutId) {
@ -454,12 +683,17 @@ export class NostrClient { @@ -454,12 +683,17 @@ export class NostrClient {
clearTimeout(connectionTimeoutId);
connectionTimeoutId = null;
}
if (ws && (ws.readyState === WebSocket.OPEN || ws.readyState === WebSocket.CONNECTING)) {
// Only close if it's a new connection we created (not from pool)
// Pool connections are managed separately
if (isNewConnection && ws && (ws.readyState === WebSocket.OPEN || ws.readyState === WebSocket.CONNECTING)) {
try {
ws.close();
} catch {
// Ignore errors during cleanup
}
} else {
// Release connection back to pool
self.releaseConnection(relay);
}
};
@ -473,93 +707,168 @@ export class NostrClient { @@ -473,93 +707,168 @@ export class NostrClient {
let authPromise: Promise<boolean> | null = null;
// Create WebSocket connection (with Tor support if needed)
createWebSocketWithTor(relay).then(websocket => {
// Get connection from pool or create new one
this.getConnection(relay).then(websocket => {
if (!websocket) {
resolveOnce([]);
return;
}
ws = websocket;
isNewConnection = false; // From pool
setupWebSocketHandlers();
}).catch(error => {
// Connection failed immediately
resolveOnce([]);
// Connection failed, try creating new one
createWebSocketWithTor(relay).then(websocket => {
ws = websocket;
isNewConnection = true; // New connection
setupWebSocketHandlers();
}).catch(err => {
// Connection failed immediately
resolveOnce([]);
});
});
function setupWebSocketHandlers() {
if (!ws) return;
const conn = self.connectionPool.get(relay);
if (!conn) {
resolveOnce([]);
return;
}
// Get unique subscription ID for this request
const subscriptionId = `sub${conn.nextSubscriptionId++}`;
// Connection timeout - if we can't connect within 3 seconds, give up
connectionTimeoutId = setTimeout(() => {
if (!resolved && ws && ws.readyState !== WebSocket.OPEN) {
conn.messageHandlers.delete(subscriptionId);
resolveOnce([]);
}
}, 3000);
ws.onopen = () => {
if (connectionTimeoutId) {
clearTimeout(connectionTimeoutId);
connectionTimeoutId = null;
}
// Connection opened, wait for AUTH challenge or proceed
// If no AUTH challenge comes within 1 second, send REQ
setTimeout(() => {
if (!authHandled && ws && ws.readyState === WebSocket.OPEN) {
try {
ws.send(JSON.stringify(['REQ', 'sub', ...filters]));
} catch {
// Connection might have closed
resolveOnce(events);
// Set up message handler for this subscription
const messageHandler = async (message: any) => {
try {
// Handle AUTH challenge
if (message[0] === 'AUTH' && message[1] && !authHandled) {
authHandled = true;
authPromise = self.handleAuthChallenge(ws!, relay, message[1]);
const authenticated = await authPromise;
// After authentication, send the REQ
if (ws && ws.readyState === WebSocket.OPEN) {
try {
ws.send(JSON.stringify(['REQ', subscriptionId, ...filters]));
} catch {
conn.messageHandlers.delete(subscriptionId);
resolveOnce(events);
}
}
return;
}
// Handle AUTH OK response
if (message[0] === 'OK' && message[1] === 'auth' && ws) {
// AUTH completed, send REQ if not already sent
if (ws.readyState === WebSocket.OPEN && !authHandled) {
setTimeout(() => {
if (ws && ws.readyState === WebSocket.OPEN) {
try {
ws.send(JSON.stringify(['REQ', subscriptionId, ...filters]));
} catch {
conn.messageHandlers.delete(subscriptionId);
resolveOnce(events);
}
}
}, 100);
}
return;
}
// Wait for auth to complete before processing other messages
if (authPromise) {
await authPromise;
}
// Only process messages for this subscription
if (message[1] === subscriptionId) {
if (message[0] === 'EVENT') {
events.push(message[2]);
} else if (message[0] === 'EOSE') {
conn.messageHandlers.delete(subscriptionId);
resolveOnce(events);
}
}
} catch (error) {
// Ignore parse errors, continue receiving events
}
}, 1000);
};
ws.onmessage = async (event: MessageEvent) => {
try {
const message = JSON.parse(event.data);
// Handle AUTH challenge
if (message[0] === 'AUTH' && message[1] && !authHandled) {
authHandled = true;
authPromise = self.handleAuthChallenge(ws!, relay, message[1]);
const authenticated = await authPromise;
// After authentication, send the REQ
if (ws && ws.readyState === WebSocket.OPEN) {
};
conn.messageHandlers.set(subscriptionId, messageHandler);
// If connection is already open, send REQ immediately
if (ws.readyState === WebSocket.OPEN) {
// Wait a bit for AUTH challenge if needed
setTimeout(() => {
if (!authHandled && ws && ws.readyState === WebSocket.OPEN) {
try {
ws.send(JSON.stringify(['REQ', 'sub', ...filters]));
ws.send(JSON.stringify(['REQ', subscriptionId, ...filters]));
} catch {
conn.messageHandlers.delete(subscriptionId);
resolveOnce(events);
}
}
return;
}
// Wait for auth to complete before processing other messages
if (authPromise) {
await authPromise;
}
if (message[0] === 'EVENT') {
events.push(message[2]);
} else if (message[0] === 'EOSE') {
resolveOnce(events);
}
} catch (error) {
// Ignore parse errors, continue receiving events
}
};
ws.onerror = () => {
// Silently handle connection errors - some relays may be down
// Don't log or reject, just resolve with empty results
if (!resolved) {
resolveOnce([]);
}, 1000);
} else {
// Wait for connection to open
ws.onopen = () => {
if (connectionTimeoutId) {
clearTimeout(connectionTimeoutId);
connectionTimeoutId = null;
}
// Connection opened, wait for AUTH challenge or proceed
// If no AUTH challenge comes within 1 second, send REQ
setTimeout(() => {
if (!authHandled && ws && ws.readyState === WebSocket.OPEN) {
try {
ws.send(JSON.stringify(['REQ', subscriptionId, ...filters]));
} catch {
conn.messageHandlers.delete(subscriptionId);
resolveOnce(events);
}
}
}, 1000);
};
}
};
// Error and close handlers are set on the connection itself
// But we need to clean up our handler
if (ws) {
const wsRef = ws; // Capture for closure
const originalOnError = ws.onerror;
ws.onerror = () => {
conn.messageHandlers.delete(subscriptionId);
if (originalOnError) {
originalOnError.call(wsRef, new Event('error'));
}
if (!resolved) {
resolveOnce([]);
}
};
ws.onclose = () => {
// If we haven't resolved yet, resolve with what we have
if (!resolved) {
resolveOnce(events);
const originalOnClose = ws.onclose;
ws.onclose = () => {
conn.messageHandlers.delete(subscriptionId);
if (originalOnClose) {
originalOnClose.call(wsRef, new CloseEvent('close'));
}
// If we haven't resolved yet, resolve with what we have
if (!resolved) {
resolveOnce(events);
}
};
}
};
// Overall timeout - resolve with what we have after 8 seconds
timeoutId = setTimeout(() => {

221
src/lib/services/nostr/persistent-event-cache.ts

@ -24,6 +24,32 @@ const STORE_PROFILES = 'profiles'; // Optimized storage for kind 0 events @@ -24,6 +24,32 @@ const STORE_PROFILES = 'profiles'; // Optimized storage for kind 0 events
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
/**
* Check if an event is a parameterized replaceable event (NIP-33)
* Parameterized replaceable events have kind >= 10000 && kind < 20000 and a 'd' tag
*/
function isParameterizedReplaceable(event: NostrEvent): boolean {
return event.kind >= 10000 && event.kind < 20000 &&
event.tags.some(t => t[0] === 'd' && t[1]);
}
/**
* Get the deduplication key for an event
* For replaceable events: kind:pubkey
* For parameterized replaceable events: kind:pubkey:d-tag
* For regular events: event.id
*/
function getDeduplicationKey(event: NostrEvent): string {
if (REPLACEABLE_KINDS.includes(event.kind)) {
return `${event.kind}:${event.pubkey}`;
}
if (isParameterizedReplaceable(event)) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || '';
return `${event.kind}:${event.pubkey}:${dTag}`;
}
return event.id;
}
interface CachedEvent {
event: NostrEvent;
cachedAt: number;
@ -279,22 +305,18 @@ export class PersistentEventCache { @@ -279,22 +305,18 @@ export class PersistentEventCache {
}
}
// For replaceable events, ensure we only return the latest per pubkey
const replaceableEvents = new Map<string, NostrEvent>();
const regularEvents: NostrEvent[] = [];
// For replaceable and parameterized replaceable events, ensure we only return the latest per deduplication key
const deduplicatedEvents = new Map<string, NostrEvent>(); // deduplication key -> latest event
for (const event of events) {
if (REPLACEABLE_KINDS.includes(event.kind)) {
const existing = replaceableEvents.get(event.pubkey);
if (!existing || event.created_at > existing.created_at) {
replaceableEvents.set(event.pubkey, event);
}
} else {
regularEvents.push(event);
const key = getDeduplicationKey(event);
const existing = deduplicatedEvents.get(key);
if (!existing || event.created_at > existing.created_at) {
deduplicatedEvents.set(key, event);
}
}
const result = [...Array.from(replaceableEvents.values()), ...regularEvents];
const result = Array.from(deduplicatedEvents.values());
// Sort by created_at descending
result.sort((a, b) => b.created_at - a.created_at);
@ -501,25 +523,81 @@ export class PersistentEventCache { @@ -501,25 +523,81 @@ export class PersistentEventCache {
const profileStore = transaction.objectStore(STORE_PROFILES);
const filterStore = transaction.objectStore(STORE_FILTERS);
const newEventIds: string[] = [];
let newEventIds: string[] = [];
const eventsToDelete = new Set<string>();
// Process all events in the transaction
// Group events by deduplication key to find the newest per key
const eventsByKey = new Map<string, NostrEvent>();
for (const event of events) {
// For replaceable events, check if we have a newer version for this pubkey
if (REPLACEABLE_KINDS.includes(event.kind)) {
// Check if we already have a newer replaceable event for this pubkey
// Use the same transaction instead of calling getProfile (which creates a new transaction)
const existingProfile = await new Promise<CachedEvent | undefined>((resolve) => {
const req = profileStore.get(event.pubkey);
req.onsuccess = () => resolve(req.result);
req.onerror = () => resolve(undefined);
});
const key = getDeduplicationKey(event);
const existing = eventsByKey.get(key);
if (!existing || event.created_at > existing.created_at) {
if (existing) {
eventsToDelete.add(existing.id); // Mark older version for deletion
}
eventsByKey.set(key, event);
} else {
eventsToDelete.add(event.id); // This one is older
}
}
// Check existing events in cache for same deduplication keys and mark older ones for deletion
for (const eventId of existingEventIds) {
const existingEventRequest = eventStore.get(eventId);
const existingCached = await new Promise<CachedEvent | undefined>((resolve) => {
existingEventRequest.onsuccess = () => resolve(existingEventRequest.result);
existingEventRequest.onerror = () => resolve(undefined);
});
if (existingCached) {
const existingEvent = existingCached.event;
const key = getDeduplicationKey(existingEvent);
const newEvent = eventsByKey.get(key);
if (existingProfile && existingProfile.event.kind === event.kind && existingProfile.event.created_at >= event.created_at) {
// Existing event is newer or same, skip
if (existingEventIds.has(existingProfile.event.id)) {
newEventIds.push(existingProfile.event.id);
// If we have a newer event with the same key, mark the old one for deletion
if (newEvent && newEvent.id !== existingEvent.id && newEvent.created_at > existingEvent.created_at) {
eventsToDelete.add(existingEvent.id);
}
}
}
// Process all events in the transaction (only the newest per deduplication key)
for (const event of Array.from(eventsByKey.values())) {
const key = getDeduplicationKey(event);
// For replaceable events (kind 0, 3, 10002), check profile store (only kind 0 uses it, but check all)
if (REPLACEABLE_KINDS.includes(event.kind)) {
// For kind 0, check profile store
if (event.kind === 0) {
const existingProfile = await new Promise<CachedEvent | undefined>((resolve) => {
const req = profileStore.get(event.pubkey);
req.onsuccess = () => resolve(req.result);
req.onerror = () => resolve(undefined);
});
if (existingProfile && existingProfile.event.kind === event.kind && existingProfile.event.created_at >= event.created_at) {
// Existing event is newer or same, skip
if (existingEventIds.has(existingProfile.event.id)) {
newEventIds.push(existingProfile.event.id);
}
// Mark this one for deletion if it's different
if (existingProfile.event.id !== event.id) {
eventsToDelete.add(event.id);
}
continue;
}
} else {
// For kind 3 and 10002, check if we already have a newer one in events store
// We already checked above, so just continue if it's already in existingEventIds
if (existingEventIds.has(event.id)) {
newEventIds.push(event.id);
continue;
}
}
} else if (isParameterizedReplaceable(event)) {
// For parameterized replaceable events, check if we already have this event
if (existingEventIds.has(event.id)) {
newEventIds.push(event.id);
continue;
}
} else {
@ -604,8 +682,42 @@ export class PersistentEventCache { @@ -604,8 +682,42 @@ export class PersistentEventCache {
}
}
// Merge with existing event IDs (don't delete valid events)
const mergedEventIds = Array.from(new Set([...existingEntry?.eventIds || [], ...newEventIds]));
// Delete older events that have been superseded
for (const eventId of eventsToDelete) {
try {
await new Promise<void>((resolve, reject) => {
const req = eventStore.delete(eventId);
req.onsuccess = () => resolve();
req.onerror = () => reject(req.error);
});
// Remove from existing event IDs if present
existingEventIds.delete(eventId);
newEventIds = newEventIds.filter(id => id !== eventId);
// Also remove from profile store if it's a kind 0 event
const deleteProfileRequest = profileStore.openCursor();
await new Promise<void>((resolve) => {
deleteProfileRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const cached = cursor.value as CachedEvent;
if (cached.event.id === eventId) {
cursor.delete();
}
cursor.continue();
} else {
resolve();
}
};
deleteProfileRequest.onerror = () => resolve();
});
} catch (error) {
logger.debug({ error, eventId }, 'Failed to delete old event from cache');
}
}
// Merge with existing event IDs (excluding deleted ones)
const mergedEventIds = Array.from(new Set([...existingEntry?.eventIds.filter(id => !eventsToDelete.has(id)) || [], ...newEventIds]));
// Update filter cache entry (using same transaction)
const filterEntry: FilterCacheEntry = {
@ -1262,6 +1374,59 @@ export class PersistentEventCache { @@ -1262,6 +1374,59 @@ export class PersistentEventCache {
throw error;
}
}
/**
* Delete a single event from the cache by event ID
*/
async deleteEvent(eventId: string): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
const transaction = this.db.transaction([STORE_EVENTS, STORE_FILTERS], 'readwrite');
const eventStore = transaction.objectStore(STORE_EVENTS);
const filterStore = transaction.objectStore(STORE_FILTERS);
// Delete from events store
await new Promise<void>((resolve, reject) => {
const req = eventStore.delete(eventId);
req.onsuccess = () => resolve();
req.onerror = () => reject(req.error);
});
// Remove from all filter entries that reference this event
const filterCursor = filterStore.openCursor();
await new Promise<void>((resolve, reject) => {
filterCursor.onsuccess = (evt) => {
const cursor = (evt.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const filterEntry = cursor.value;
if (filterEntry.eventIds && filterEntry.eventIds.includes(eventId)) {
filterEntry.eventIds = filterEntry.eventIds.filter((id: string) => id !== eventId);
cursor.update(filterEntry);
}
cursor.continue();
} else {
resolve();
}
};
filterCursor.onerror = () => reject(filterCursor.error);
});
// Also remove from memory cache
for (const [filterKey, cacheEntry] of this.memoryCache.entries()) {
const index = cacheEntry.events.findIndex(e => e.id === eventId);
if (index !== -1) {
cacheEntry.events.splice(index, 1);
}
}
} catch (error) {
logger.debug({ error, eventId }, 'Error deleting event from cache');
}
}
}
// Singleton instance

4
src/lib/services/settings-store.ts

@ -14,13 +14,15 @@ interface Settings { @@ -14,13 +14,15 @@ interface Settings {
userName: string;
userEmail: string;
theme: 'gitrepublic-light' | 'gitrepublic-dark' | 'gitrepublic-black';
defaultBranch: string;
}
const DEFAULT_SETTINGS: Settings = {
autoSave: false,
userName: '',
userEmail: '',
theme: 'gitrepublic-dark'
theme: 'gitrepublic-dark',
defaultBranch: 'master'
};
export class SettingsStore {

13
src/routes/api/repos/[npub]/[repo]/branches/+server.ts

@ -140,7 +140,18 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -140,7 +140,18 @@ export const POST: RequestHandler = createRepoPostHandler(
}
// Get default branch if fromBranch not provided
const sourceBranch = fromBranch || await fileManager.getDefaultBranch(context.npub, context.repo);
// If repo has no branches, use 'master' as default
let sourceBranch = fromBranch;
if (!sourceBranch) {
try {
sourceBranch = await fileManager.getDefaultBranch(context.npub, context.repo);
} catch (err) {
// If getDefaultBranch fails (e.g., no branches exist), use 'master' as default
logger.debug({ error: err, npub: context.npub, repo: context.repo }, 'No default branch found, using master');
sourceBranch = 'master';
}
}
await fileManager.createBranch(context.npub, context.repo, branchName, sourceBranch);
return json({ success: true, message: 'Branch created successfully' });
},

71
src/routes/docs/+page.svelte

@ -30,7 +30,56 @@ @@ -30,7 +30,56 @@
}
});
content = md.render(docContent);
let rendered = md.render(docContent);
// Add IDs to headings for anchor links
rendered = rendered.replace(/<h([1-6])>(.*?)<\/h[1-6]>/g, (match, level, text) => {
// Extract text content (remove any HTML tags)
const textContent = text.replace(/<[^>]*>/g, '').trim();
// Create slug from text
const slug = textContent
.toLowerCase()
.replace(/[^\w\s-]/g, '') // Remove special characters
.replace(/\s+/g, '-') // Replace spaces with hyphens
.replace(/-+/g, '-') // Replace multiple hyphens with single
.replace(/^-|-$/g, ''); // Remove leading/trailing hyphens
return `<h${level} id="${slug}">${text}</h${level}>`;
});
content = rendered;
// Handle anchor links after content is rendered
setTimeout(() => {
// Handle initial hash in URL
if (window.location.hash) {
const id = window.location.hash.substring(1);
const element = document.getElementById(id);
if (element) {
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
}
}
// Handle clicks on anchor links
const markdownContent = document.querySelector('.markdown-content');
if (markdownContent) {
markdownContent.addEventListener('click', (e) => {
const target = e.target as HTMLElement;
if (target.tagName === 'A' && target.getAttribute('href')?.startsWith('#')) {
const id = target.getAttribute('href')?.substring(1);
if (id) {
const element = document.getElementById(id);
if (element) {
e.preventDefault();
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
// Update URL without scrolling
window.history.pushState(null, '', `#${id}`);
}
}
}
});
}
}, 100);
} else {
error = $page.data.error || 'Failed to load documentation';
}
@ -81,6 +130,7 @@ @@ -81,6 +130,7 @@
border-bottom: 2px solid var(--border-color);
padding-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h2) {
@ -88,6 +138,7 @@ @@ -88,6 +138,7 @@
margin-top: 1.5rem;
margin-bottom: 0.75rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h3) {
@ -95,6 +146,24 @@ @@ -95,6 +146,24 @@
margin-top: 1.25rem;
margin-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h4) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h5) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h6) {
scroll-margin-top: 1rem;
}
/* Smooth scrolling for anchor links */
:global(.markdown-content) {
scroll-behavior: smooth;
}
:global(.markdown-content code) {

71
src/routes/docs/nip34/+page.svelte

@ -30,7 +30,56 @@ @@ -30,7 +30,56 @@
}
});
content = md.render(docContent);
let rendered = md.render(docContent);
// Add IDs to headings for anchor links
rendered = rendered.replace(/<h([1-6])>(.*?)<\/h[1-6]>/g, (match, level, text) => {
// Extract text content (remove any HTML tags)
const textContent = text.replace(/<[^>]*>/g, '').trim();
// Create slug from text
const slug = textContent
.toLowerCase()
.replace(/[^\w\s-]/g, '') // Remove special characters
.replace(/\s+/g, '-') // Replace spaces with hyphens
.replace(/-+/g, '-') // Replace multiple hyphens with single
.replace(/^-|-$/g, ''); // Remove leading/trailing hyphens
return `<h${level} id="${slug}">${text}</h${level}>`;
});
content = rendered;
// Handle anchor links after content is rendered
setTimeout(() => {
// Handle initial hash in URL
if (window.location.hash) {
const id = window.location.hash.substring(1);
const element = document.getElementById(id);
if (element) {
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
}
}
// Handle clicks on anchor links
const markdownContent = document.querySelector('.markdown-content');
if (markdownContent) {
markdownContent.addEventListener('click', (e) => {
const target = e.target as HTMLElement;
if (target.tagName === 'A' && target.getAttribute('href')?.startsWith('#')) {
const id = target.getAttribute('href')?.substring(1);
if (id) {
const element = document.getElementById(id);
if (element) {
e.preventDefault();
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
// Update URL without scrolling
window.history.pushState(null, '', `#${id}`);
}
}
}
});
}
}, 100);
} else {
error = $page.data.error || 'Failed to load NIP-34 documentation';
}
@ -87,6 +136,7 @@ @@ -87,6 +136,7 @@
border-bottom: 2px solid var(--border-color);
padding-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h2) {
@ -94,6 +144,7 @@ @@ -94,6 +144,7 @@
margin-top: 1.5rem;
margin-bottom: 0.75rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h3) {
@ -101,6 +152,24 @@ @@ -101,6 +152,24 @@
margin-top: 1.25rem;
margin-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h4) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h5) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h6) {
scroll-margin-top: 1rem;
}
/* Smooth scrolling for anchor links */
:global(.markdown-content) {
scroll-behavior: smooth;
}
:global(.markdown-content code) {

71
src/routes/docs/nip34/spec/+page.svelte

@ -30,7 +30,56 @@ @@ -30,7 +30,56 @@
}
});
content = md.render(docContent);
let rendered = md.render(docContent);
// Add IDs to headings for anchor links
rendered = rendered.replace(/<h([1-6])>(.*?)<\/h[1-6]>/g, (match, level, text) => {
// Extract text content (remove any HTML tags)
const textContent = text.replace(/<[^>]*>/g, '').trim();
// Create slug from text
const slug = textContent
.toLowerCase()
.replace(/[^\w\s-]/g, '') // Remove special characters
.replace(/\s+/g, '-') // Replace spaces with hyphens
.replace(/-+/g, '-') // Replace multiple hyphens with single
.replace(/^-|-$/g, ''); // Remove leading/trailing hyphens
return `<h${level} id="${slug}">${text}</h${level}>`;
});
content = rendered;
// Handle anchor links after content is rendered
setTimeout(() => {
// Handle initial hash in URL
if (window.location.hash) {
const id = window.location.hash.substring(1);
const element = document.getElementById(id);
if (element) {
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
}
}
// Handle clicks on anchor links
const markdownContent = document.querySelector('.markdown-content');
if (markdownContent) {
markdownContent.addEventListener('click', (e) => {
const target = e.target as HTMLElement;
if (target.tagName === 'A' && target.getAttribute('href')?.startsWith('#')) {
const id = target.getAttribute('href')?.substring(1);
if (id) {
const element = document.getElementById(id);
if (element) {
e.preventDefault();
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
// Update URL without scrolling
window.history.pushState(null, '', `#${id}`);
}
}
}
});
}
}, 100);
} else {
error = $page.data.error || 'Failed to load NIP-34 specification';
}
@ -87,6 +136,7 @@ @@ -87,6 +136,7 @@
border-bottom: 2px solid var(--border-color);
padding-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h2) {
@ -94,6 +144,7 @@ @@ -94,6 +144,7 @@
margin-top: 1.5rem;
margin-bottom: 0.75rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h3) {
@ -101,6 +152,24 @@ @@ -101,6 +152,24 @@
margin-top: 1.25rem;
margin-bottom: 0.5rem;
color: var(--text-primary);
scroll-margin-top: 1rem;
}
:global(.markdown-content h4) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h5) {
scroll-margin-top: 1rem;
}
:global(.markdown-content h6) {
scroll-margin-top: 1rem;
}
/* Smooth scrolling for anchor links */
:global(.markdown-content) {
scroll-behavior: smooth;
}
:global(.markdown-content code) {

31
src/routes/repos/[npub]/[repo]/+page.svelte

@ -193,6 +193,7 @@ @@ -193,6 +193,7 @@
let showCreateBranchDialog = $state(false);
let newBranchName = $state('');
let newBranchFrom = $state<string | null>(null);
let defaultBranchName = $state('master'); // Default branch from settings
// Commit history
let commits = $state<Array<{ hash: string; message: string; author: string; date: string; files: string[] }>>([]);
@ -2709,6 +2710,13 @@ @@ -2709,6 +2710,13 @@
error = null;
try {
// If no branches exist, use default branch from settings
let fromBranch = newBranchFrom || currentBranch;
if (!fromBranch && branches.length === 0) {
const settings = await settingsStore.getSettings();
fromBranch = settings.defaultBranch || 'master';
}
const response = await fetch(`/api/repos/${npub}/${repo}/branches`, {
method: 'POST',
headers: {
@ -2717,7 +2725,7 @@ @@ -2717,7 +2725,7 @@
},
body: JSON.stringify({
branchName: newBranchName,
fromBranch: newBranchFrom || currentBranch
fromBranch: fromBranch || 'master' // Final fallback
})
});
@ -3237,8 +3245,15 @@ @@ -3237,8 +3245,15 @@
{/if}
{#if isMaintainer}
<button
onclick={() => {
onclick={async () => {
if (!userPubkey || !isMaintainer || needsClone) return;
// Load default branch from settings
try {
const settings = await settingsStore.getSettings();
defaultBranchName = settings.defaultBranch || 'master';
} catch {
defaultBranchName = 'master';
}
showCreateBranchDialog = true;
showRepoMenu = false;
}}
@ -4285,10 +4300,14 @@ @@ -4285,10 +4300,14 @@
<label>
From Branch:
<select bind:value={newBranchFrom}>
{#each branches as branch}
{@const branchName = typeof branch === 'string' ? branch : (branch as { name: string }).name}
<option value={branchName}>{branchName}</option>
{/each}
{#if branches.length === 0}
<option value={null}>No branches - will create initial branch ({defaultBranchName})</option>
{:else}
{#each branches as branch}
{@const branchName = typeof branch === 'string' ? branch : (branch as { name: string }).name}
<option value={branchName}>{branchName}</option>
{/each}
{/if}
</select>
</label>
<div class="modal-actions">

Loading…
Cancel
Save