@@ -1028,11 +953,27 @@
}
.loading-state,
- .empty-state {
+ .empty-state,
+ .error-state {
padding: 2rem;
text-align: center;
}
+ .error-message {
+ font-weight: 600;
+ color: var(--fog-accent, #64748b);
+ margin-bottom: 0.5rem;
+ }
+
+ :global(.dark) .error-message {
+ color: var(--fog-dark-accent, #94a3b8);
+ }
+
+ .error-hint {
+ font-size: 0.875rem;
+ margin-top: 0.5rem;
+ }
+
.feed-posts {
display: flex;
flex-direction: column;
diff --git a/src/lib/modules/threads/ThreadList.svelte b/src/lib/modules/threads/ThreadList.svelte
index 74e7a07..917f2e6 100644
--- a/src/lib/modules/threads/ThreadList.svelte
+++ b/src/lib/modules/threads/ThreadList.svelte
@@ -29,12 +29,42 @@
return sorted;
});
- $effect(() => {
+ // Track if we're currently loading to prevent loops
+ let isLoading = $state(false);
+ let prevSortBy = $state<'newest' | 'active' | 'upvoted' | null>(null);
+ let prevShowOlder = $state(null);
+ let prevSelectedTopic = $state(null);
+
+ // Initial load on mount
+ onMount(() => {
+ prevSortBy = sortBy;
+ prevShowOlder = showOlder;
+ prevSelectedTopic = selectedTopic;
loadAllData();
});
+ // Only reload when sortBy, showOlder, or selectedTopic changes (after initial values are set)
+ $effect(() => {
+ // Skip if we haven't set initial values yet (onMount hasn't run)
+ if (prevSortBy === null) return;
+
+ // Check if any filter parameter actually changed
+ if (sortBy !== prevSortBy || showOlder !== prevShowOlder || selectedTopic !== prevSelectedTopic) {
+ prevSortBy = sortBy;
+ prevShowOlder = showOlder;
+ prevSelectedTopic = selectedTopic;
+
+ // Only reload if not already loading
+ if (!isLoading) {
+ loadAllData();
+ }
+ }
+ });
+
async function loadAllData() {
+ if (isLoading) return; // Prevent concurrent loads
loading = true;
+ isLoading = true;
try {
const config = nostrClient.getConfig();
const since = showOlder
@@ -48,88 +78,54 @@
const reactionRelays = relayManager.getProfileReadRelays();
const zapRelays = relayManager.getZapReceiptReadRelays();
- // Step 1: Load from cache first (immediate display)
- const cachedThreads = await nostrClient.fetchEvents(
- [{ kinds: [KIND.DISCUSSION_THREAD], since, limit: 50 }],
- threadRelays,
- {
- useCache: true,
- cacheResults: false, // Don't cache again, we already have it
- timeout: 100 // Quick timeout for cache-only fetch
- }
- );
-
- // Build threads map from cache immediately
- const newThreadsMap = new Map();
- for (const event of cachedThreads) {
- newThreadsMap.set(event.id, event);
- }
- threadsMap = newThreadsMap;
- loading = false; // Show cached data immediately
-
- // Step 2: Fetch from relays in background and update incrementally
- // Use a Set to track which events we've already processed to avoid loops
- const processedEventIds = new Set(Array.from(newThreadsMap.keys()));
-
- nostrClient.fetchEvents(
+ // Query relays first with 3-second timeout, then fill from cache if needed
+ const relayThreads = await nostrClient.fetchEvents(
[{ kinds: [KIND.DISCUSSION_THREAD], since, limit: 50 }],
threadRelays,
{
- useCache: false, // Force query relays
- cacheResults: true,
+ relayFirst: true, // Query relays first with timeout
+ useCache: true, // Fill from cache if relay query returns nothing
+ cacheResults: true, // Cache the results
+ timeout: 3000, // 3-second timeout
onUpdate: async (updatedEvents) => {
- // Only add new events that aren't already in the map
+ // Update incrementally as events arrive
+ const newThreadsMap = new Map(threadsMap);
let hasNewEvents = false;
for (const event of updatedEvents) {
- if (!processedEventIds.has(event.id)) {
+ const existing = newThreadsMap.get(event.id);
+ if (!existing) {
newThreadsMap.set(event.id, event);
- processedEventIds.add(event.id);
hasNewEvents = true;
- } else {
+ } else if (event.created_at > existing.created_at) {
// Update existing event if this one is newer
- const existing = newThreadsMap.get(event.id);
- if (existing && event.created_at > existing.created_at) {
- newThreadsMap.set(event.id, event);
- }
+ newThreadsMap.set(event.id, event);
+ hasNewEvents = true;
}
}
if (hasNewEvents) {
- threadsMap = new Map(newThreadsMap); // Trigger reactivity
- }
- }
- }
- ).then((relayThreads) => {
- // Final update after relay fetch completes
- let hasNewEvents = false;
- for (const event of relayThreads) {
- if (!processedEventIds.has(event.id)) {
- newThreadsMap.set(event.id, event);
- processedEventIds.add(event.id);
- hasNewEvents = true;
- } else {
- // Update existing event if this one is newer
- const existing = newThreadsMap.get(event.id);
- if (existing && event.created_at > existing.created_at) {
- newThreadsMap.set(event.id, event);
+ threadsMap = newThreadsMap; // Trigger reactivity
}
}
}
- if (hasNewEvents) {
- threadsMap = new Map(newThreadsMap); // Trigger reactivity
- }
- }).catch((error) => {
- console.debug('Background relay fetch error (non-critical):', error);
- });
+ );
+
+ // Build threads map from results
+ const newThreadsMap = new Map();
+ for (const event of relayThreads) {
+ newThreadsMap.set(event.id, event);
+ }
+ threadsMap = newThreadsMap;
+ loading = false; // Show data immediately
// Get all thread IDs (use current threadsMap, not newThreadsMap, since it may have been updated)
const threadIds = Array.from(threadsMap.keys());
if (threadIds.length > 0) {
- // Fetch all comments in parallel
+ // Fetch all comments in parallel (relay-first for first-time users)
const allComments = await nostrClient.fetchEvents(
[{ kinds: [KIND.COMMENT], '#E': threadIds, '#K': ['11'] }],
commentRelays,
- { useCache: true }
+ { relayFirst: true, useCache: true, cacheResults: true, timeout: 3000 }
);
// Fetch all reactions in parallel
@@ -140,7 +136,7 @@
// Function to process and group reactions (called initially and on updates)
const processReactionUpdates = async () => {
const allReactions = Array.from(allReactionsMap.values());
- console.log('[ThreadList] Processing reaction updates, total reactions:', allReactions.length);
+ // Processing reaction updates
if (allReactions.length === 0) return;
@@ -148,7 +144,7 @@
const deletionEvents = await nostrClient.fetchEvents(
[{ kinds: [KIND.EVENT_DELETION], authors: Array.from(new Set(allReactions.map(r => r.pubkey))) }],
reactionRelays,
- { useCache: true }
+ { relayFirst: true, useCache: true, cacheResults: true, timeout: 3000 }
);
// Build deleted reaction IDs map
@@ -187,35 +183,10 @@
}
reactionsMap = updatedReactionsMap;
- console.log('[ThreadList] Updated reactions map:', {
- threadCounts: Array.from(updatedReactionsMap.entries()).map(([threadId, reactions]) => ({
- threadId: threadId.substring(0, 16) + '...',
- count: reactions.length,
- upvotes: reactions.filter(r => {
- const content = r.content.trim();
- return content === '+' || content === '⬆️' || content === '↑';
- }).length,
- reactionEvents: reactions.map(r => ({
- id: r.id.substring(0, 16) + '...',
- pubkey: r.pubkey.substring(0, 16) + '...',
- content: r.content,
- fullEvent: r
- }))
- }))
- });
+ // Updated reactions map
};
const handleReactionUpdate = async (updated: NostrEvent[]) => {
- console.log('[ThreadList] Received reaction update:', {
- count: updated.length,
- events: updated.map(r => ({
- id: r.id.substring(0, 16) + '...',
- pubkey: r.pubkey.substring(0, 16) + '...',
- content: r.content,
- tags: r.tags.filter(t => t[0] === 'e' || t[0] === 'E'),
- fullEvent: r
- }))
- });
for (const r of updated) {
allReactionsMap.set(r.id, r);
}
@@ -227,7 +198,10 @@
[{ kinds: [KIND.REACTION], '#e': threadIds }],
reactionRelays,
{
+ relayFirst: true,
useCache: true,
+ cacheResults: true,
+ timeout: 3000,
onUpdate: handleReactionUpdate
}
);
@@ -239,7 +213,10 @@
[{ kinds: [KIND.REACTION], '#E': threadIds }],
reactionRelays,
{
+ relayFirst: true,
useCache: true,
+ cacheResults: true,
+ timeout: 3000,
onUpdate: handleReactionUpdate
}
);
@@ -247,12 +224,7 @@
console.log('[ThreadList] Upper case #E filter rejected by relay (this is normal):', error);
}
- console.log('[ThreadList] Reactions fetched:', {
- withLowerE: reactionsWithLowerE.length,
- withUpperE: reactionsWithUpperE.length,
- lowerE_events: reactionsWithLowerE,
- upperE_events: reactionsWithUpperE
- });
+ // Reactions fetched
// Combine and deduplicate by reaction ID
for (const r of reactionsWithLowerE) {
@@ -263,22 +235,12 @@
}
const allReactions = Array.from(allReactionsMap.values());
- console.log('[ThreadList] All reactions (deduplicated):', {
- total: allReactions.length,
- events: allReactions.map(r => ({
- id: r.id.substring(0, 16) + '...',
- pubkey: r.pubkey.substring(0, 16) + '...',
- content: r.content,
- tags: r.tags.filter(t => t[0] === 'e' || t[0] === 'E'),
- created_at: new Date(r.created_at * 1000).toISOString()
- }))
- });
- // Fetch all zap receipts in parallel
+ // Fetch all zap receipts in parallel (relay-first for first-time users)
const allZapReceipts = await nostrClient.fetchEvents(
[{ kinds: [KIND.ZAP_RECEIPT], '#e': threadIds }],
zapRelays,
- { useCache: true }
+ { relayFirst: true, useCache: true, cacheResults: true, timeout: 3000 }
);
// Build maps
diff --git a/src/lib/services/nostr/config.ts b/src/lib/services/nostr/config.ts
index 7681fcc..b9c3c28 100644
--- a/src/lib/services/nostr/config.ts
+++ b/src/lib/services/nostr/config.ts
@@ -7,7 +7,6 @@ const DEFAULT_RELAYS = [
'wss://theforest.nostr1.com',
'wss://nostr21.com',
'wss://nostr.land',
- 'wss://nostr.sovbit.host',
'wss://orly-relay.imwald.eu',
'wss://nostr.wine'
];
diff --git a/src/lib/services/nostr/memory-manager.ts b/src/lib/services/nostr/memory-manager.ts
new file mode 100644
index 0000000..ad7f702
--- /dev/null
+++ b/src/lib/services/nostr/memory-manager.ts
@@ -0,0 +1,166 @@
+/**
+ * Memory management for Nostr client
+ * Tracks in-memory event usage and provides cleanup mechanisms
+ */
+
+import type { NostrEvent } from '../../types/nostr.js';
+
+interface EventReference {
+ id: string;
+ size: number;
+ timestamp: number;
+}
+
+class MemoryManager {
+ private eventReferences: Map = new Map();
+ private totalSize: number = 0;
+ private warningThresholds = [50 * 1024 * 1024, 100 * 1024 * 1024, 200 * 1024 * 1024]; // 50MB, 100MB, 200MB
+ private lastWarningLevel: number = -1;
+
+ /**
+ * Estimate size of an event in bytes
+ */
+ private estimateEventSize(event: NostrEvent): number {
+ // Rough estimate: JSON string length + overhead
+ return JSON.stringify(event).length + 100; // 100 bytes overhead for Map entry
+ }
+
+ /**
+ * Track an event in memory
+ */
+ trackEvent(event: NostrEvent): void {
+ const id = event.id;
+ const size = this.estimateEventSize(event);
+
+ // If already tracked, update size if different
+ const existing = this.eventReferences.get(id);
+ if (existing) {
+ this.totalSize -= existing.size;
+ }
+
+ this.eventReferences.set(id, {
+ id,
+ size,
+ timestamp: Date.now()
+ });
+
+ this.totalSize += size;
+ this.checkThresholds();
+ }
+
+ /**
+ * Untrack an event (when it's removed from memory)
+ */
+ untrackEvent(eventId: string): void {
+ const ref = this.eventReferences.get(eventId);
+ if (ref) {
+ this.totalSize -= ref.size;
+ this.eventReferences.delete(eventId);
+ }
+ }
+
+ /**
+ * Untrack multiple events
+ */
+ untrackEvents(eventIds: string[]): void {
+ for (const id of eventIds) {
+ this.untrackEvent(id);
+ }
+ }
+
+ /**
+ * Check memory thresholds and warn if exceeded
+ */
+ private checkThresholds(): void {
+ const currentLevel = this.warningThresholds.findIndex(threshold => this.totalSize >= threshold);
+
+ if (currentLevel > this.lastWarningLevel) {
+ const thresholdMB = this.warningThresholds[currentLevel] / (1024 * 1024);
+ const currentMB = (this.totalSize / (1024 * 1024)).toFixed(2);
+ console.warn(
+ `[memory-manager] Memory usage exceeded ${thresholdMB}MB threshold. ` +
+ `Current usage: ${currentMB}MB (${this.eventReferences.size} events)`
+ );
+ this.lastWarningLevel = currentLevel;
+
+ // If we've exceeded the highest threshold, suggest cleanup
+ if (currentLevel === this.warningThresholds.length - 1) {
+ console.warn(
+ `[memory-manager] Consider cleaning up old events. ` +
+ `Use cleanupOldEvents() to free memory.`
+ );
+ }
+ }
+ }
+
+ /**
+ * Clean up oldest events until we're under the target size
+ */
+ cleanupOldEvents(targetSize: number): string[] {
+ if (this.totalSize <= targetSize) {
+ return [];
+ }
+
+ const sorted = Array.from(this.eventReferences.values())
+ .sort((a, b) => a.timestamp - b.timestamp); // Oldest first
+
+ const removed: string[] = [];
+ let freedSize = 0;
+ const targetFreed = this.totalSize - targetSize;
+
+ for (const ref of sorted) {
+ if (freedSize >= targetFreed) {
+ break;
+ }
+
+ this.totalSize -= ref.size;
+ this.eventReferences.delete(ref.id);
+ removed.push(ref.id);
+ freedSize += ref.size;
+ }
+
+ const freedMB = (freedSize / (1024 * 1024)).toFixed(2);
+ console.debug(
+ `[memory-manager] Cleaned up ${removed.length} old events, freed ${freedMB}MB`
+ );
+
+ return removed;
+ }
+
+ /**
+ * Get current memory usage statistics
+ */
+ getStats(): {
+ totalSize: number;
+ totalSizeMB: number;
+ eventCount: number;
+ averageEventSize: number;
+ } {
+ return {
+ totalSize: this.totalSize,
+ totalSizeMB: this.totalSize / (1024 * 1024),
+ eventCount: this.eventReferences.size,
+ averageEventSize: this.eventReferences.size > 0
+ ? this.totalSize / this.eventReferences.size
+ : 0
+ };
+ }
+
+ /**
+ * Clear all tracking
+ */
+ clear(): void {
+ this.eventReferences.clear();
+ this.totalSize = 0;
+ this.lastWarningLevel = -1;
+ }
+
+ /**
+ * Get list of tracked event IDs (for debugging)
+ */
+ getTrackedEventIds(): string[] {
+ return Array.from(this.eventReferences.keys());
+ }
+}
+
+export const memoryManager = new MemoryManager();
diff --git a/src/lib/services/nostr/nostr-client.ts b/src/lib/services/nostr/nostr-client.ts
index 5bfc5d5..da93c67 100644
--- a/src/lib/services/nostr/nostr-client.ts
+++ b/src/lib/services/nostr/nostr-client.ts
@@ -11,6 +11,7 @@ import { getDB } from '../cache/indexeddb-store.js';
import { filterEvents, shouldHideEvent } from '../event-filter.js';
import { sessionManager } from '../auth/session-manager.js';
import { KIND } from '../../types/kind-lookup.js';
+import { memoryManager } from './memory-manager.js';
export interface PublishOptions {
relays?: string[];
@@ -22,15 +23,20 @@ interface FetchOptions {
cacheResults?: boolean;
onUpdate?: (events: NostrEvent[]) => void;
timeout?: number;
+ relayFirst?: boolean; // If true, query relays first with timeout, then fill from cache
}
class NostrClient {
private initialized = false;
private relays: Map = new Map();
- private subscriptions: Map = new Map();
+ private subscriptions: Map = new Map();
private nextSubId = 1;
private activeFetches: Map> = new Map();
+ // Subscription TTL cleanup
+ private subscriptionCleanupInterval: ReturnType | null = null;
+ private readonly SUBSCRIPTION_TTL = 30000; // 30 seconds
+
// Rate limiting and throttling
private requestQueue: Array<() => void> = [];
private processingQueue = false;
@@ -41,9 +47,6 @@ class NostrClient {
private readonly MAX_CONCURRENT_TOTAL = 3; // Max 3 total concurrent requests
private totalActiveRequests = 0;
- // Track background refresh operations to prevent duplicates
- private backgroundRefreshes: Set = new Set();
-
// Failed relay tracking with exponential backoff
private failedRelays: Map = new Map();
private readonly INITIAL_RETRY_DELAY = 5000; // 5 seconds
@@ -53,6 +56,10 @@ class NostrClient {
// Track authenticated relays to avoid re-authenticating
private authenticatedRelays: Set = new Set();
+
+ // Cache NIP-11 metadata to avoid repeated HTTP requests
+ private nip11MetadataCache: Map = new Map();
+ private readonly NIP11_CACHE_TTL = 300000; // 5 minutes
async initialize(): Promise {
if (this.initialized) return;
@@ -72,9 +79,47 @@ class NostrClient {
});
await Promise.allSettled(connectionPromises);
+
+ // Start subscription cleanup interval
+ this.startSubscriptionCleanup();
+
this.initialized = true;
}
+ /**
+ * Start periodic cleanup of inactive subscriptions
+ */
+ private startSubscriptionCleanup(): void {
+ if (this.subscriptionCleanupInterval) return;
+
+ this.subscriptionCleanupInterval = setInterval(() => {
+ const now = Date.now();
+ const toRemove: string[] = [];
+
+ for (const [key, { lastActivity }] of this.subscriptions.entries()) {
+ if (now - lastActivity > this.SUBSCRIPTION_TTL) {
+ toRemove.push(key);
+ }
+ }
+
+ for (const key of toRemove) {
+ const sub = this.subscriptions.get(key);
+ if (sub) {
+ try {
+ sub.sub.close();
+ } catch (error) {
+ // Ignore errors closing subscriptions
+ }
+ this.subscriptions.delete(key);
+ }
+ }
+
+ if (toRemove.length > 0) {
+ console.debug(`[nostr-client] Cleaned up ${toRemove.length} inactive subscriptions`);
+ }
+ }, 10000); // Check every 10 seconds
+ }
+
/**
* Authenticate with a relay using NIP-42 AUTH
* Only call this when the relay has sent an AUTH challenge or an operation failed with auth-required
@@ -309,10 +354,68 @@ class NostrClient {
});
}
+ /**
+ * Get events by kind with pagination
+ */
+ private async getEventsByKindPaginated(kind: number, limit: number, maxEvents: number, pageSize: number): Promise {
+ try {
+ const db = await getDB();
+ const tx = db.transaction('events', 'readonly');
+ const index = tx.store.index('kind');
+ const events: NostrEvent[] = [];
+ let count = 0;
+ const targetLimit = Math.min(limit, maxEvents);
+
+ // Use cursor to paginate through events
+ let cursor = await index.openCursor(IDBKeyRange.only(kind), 'prev');
+ while (cursor && count < targetLimit) {
+ events.push(cursor.value as NostrEvent);
+ count++;
+ cursor = await cursor.continue();
+ }
+
+ await tx.done;
+ return events;
+ } catch (error) {
+ console.debug('Error getting events by kind from cache:', error);
+ return [];
+ }
+ }
+
+ /**
+ * Get events by pubkey with pagination
+ */
+ private async getEventsByPubkeyPaginated(pubkey: string, limit: number, maxEvents: number, pageSize: number): Promise {
+ try {
+ const db = await getDB();
+ const tx = db.transaction('events', 'readonly');
+ const index = tx.store.index('pubkey');
+ const events: NostrEvent[] = [];
+ let count = 0;
+ const targetLimit = Math.min(limit, maxEvents);
+
+ // Use cursor to paginate through events
+ let cursor = await index.openCursor(IDBKeyRange.only(pubkey), 'prev');
+ while (cursor && count < targetLimit) {
+ events.push(cursor.value as NostrEvent);
+ count++;
+ cursor = await cursor.continue();
+ }
+
+ await tx.done;
+ return events;
+ } catch (error) {
+ console.debug('Error getting events by pubkey from cache:', error);
+ return [];
+ }
+ }
+
private async getCachedEvents(filters: Filter[]): Promise {
try {
const results: NostrEvent[] = [];
const seen = new Set();
+ const MAX_EVENTS = 1000; // Maximum events to load from cache
+ const PAGE_SIZE = 100; // Load in pages of 100
for (const filter of filters) {
try {
@@ -330,44 +433,47 @@ class NostrClient {
}
candidateEvents = idEvents;
} else if (filter.kinds && filter.kinds.length > 0) {
- // Query by kind(s) if specified
- // If single kind, use index for efficiency
+ // Query by kind(s) if specified - use pagination
if (filter.kinds.length === 1) {
- candidateEvents = await getEventsByKind(filter.kinds[0], (filter.limit || 100) * 3);
+ // Single kind - use paginated query
+ candidateEvents = await this.getEventsByKindPaginated(filter.kinds[0], filter.limit || 100, MAX_EVENTS, PAGE_SIZE);
} else {
- // Multiple kinds - query each and combine
+ // Multiple kinds - query each with pagination and combine
const allEvents: NostrEvent[] = [];
+ const perKindLimit = Math.ceil((filter.limit || 100) / filter.kinds.length);
for (const kind of filter.kinds) {
- const kindEvents = await getEventsByKind(kind, (filter.limit || 100) * 3);
+ const kindEvents = await this.getEventsByKindPaginated(kind, perKindLimit, MAX_EVENTS, PAGE_SIZE);
allEvents.push(...kindEvents);
+ if (allEvents.length >= MAX_EVENTS) break; // Stop if we've hit max
}
candidateEvents = allEvents;
}
} else if (filter.authors && filter.authors.length > 0) {
- // Query by author(s) if no kinds specified
+ // Query by author(s) if no kinds specified - use pagination
if (filter.authors.length === 1) {
- candidateEvents = await getEventsByPubkey(filter.authors[0], (filter.limit || 100) * 3);
+ candidateEvents = await this.getEventsByPubkeyPaginated(filter.authors[0], filter.limit || 100, MAX_EVENTS, PAGE_SIZE);
} else {
- // Multiple authors - query each and combine
+ // Multiple authors - query each with pagination and combine
const allEvents: NostrEvent[] = [];
+ const perAuthorLimit = Math.ceil((filter.limit || 100) / filter.authors.length);
for (const author of filter.authors) {
- const authorEvents = await getEventsByPubkey(author, (filter.limit || 100) * 3);
+ const authorEvents = await this.getEventsByPubkeyPaginated(author, perAuthorLimit, MAX_EVENTS, PAGE_SIZE);
allEvents.push(...authorEvents);
+ if (allEvents.length >= MAX_EVENTS) break; // Stop if we've hit max
}
candidateEvents = allEvents;
}
} else {
- // No specific kind or author - get recent events by created_at
- // This is a fallback for broad queries
+ // No specific kind or author - get recent events by created_at with pagination
try {
const db = await getDB();
const tx = db.transaction('events', 'readonly');
const index = tx.store.index('created_at');
const events: NostrEvent[] = [];
let count = 0;
- const limit = (filter.limit || 100) * 3;
+ const limit = Math.min(filter.limit || 100, MAX_EVENTS);
- // Iterate in reverse (newest first)
+ // Iterate in reverse (newest first) with pagination
let cursor = await index.openCursor(null, 'prev');
while (cursor && count < limit) {
events.push(cursor.value as NostrEvent);
@@ -458,7 +564,7 @@ class NostrClient {
// Sort by created_at descending and apply limit
const sorted = results.sort((a, b) => b.created_at - a.created_at);
const limit = filters[0]?.limit || 100;
- const limited = sorted.slice(0, limit);
+ const limited = sorted.slice(0, Math.min(limit, MAX_EVENTS));
const filtered = filterEvents(limited);
// Only log cache queries at debug level to reduce console noise
@@ -722,7 +828,7 @@ class NostrClient {
}
});
- client.subscriptions.set(`${url}_${subId}`, { relay, sub });
+ client.subscriptions.set(`${url}_${subId}`, { relay, sub, lastActivity: Date.now() });
};
startSub();
@@ -759,7 +865,8 @@ class NostrClient {
relayUrl: string,
filters: Filter[],
events: Map,
- timeout: number
+ timeout: number,
+ onUpdate?: (events: NostrEvent[]) => void
): Promise {
return new Promise((resolve) => {
const makeRequest = () => {
@@ -779,7 +886,7 @@ class NostrClient {
this.totalActiveRequests++;
// Make the request
- this.makeRelayRequest(relayUrl, filters, events, timeout)
+ this.makeRelayRequest(relayUrl, filters, events, timeout, onUpdate)
.finally(() => {
const current = this.activeRequestsPerRelay.get(relayUrl) || 0;
if (current > 0) {
@@ -809,7 +916,8 @@ class NostrClient {
relayUrl: string,
filters: Filter[],
events: Map,
- timeout: number
+ timeout: number,
+ onUpdate?: (events: NostrEvent[]) => void
): Promise {
const relay = this.relays.get(relayUrl);
if (!relay || !this.checkAndCleanupRelay(relayUrl)) {
@@ -820,39 +928,171 @@ class NostrClient {
let resolved = false;
let timeoutId: ReturnType | null = null;
let hasAuthed = this.authenticatedRelays.has(relayUrl);
+ let authInProgress = false;
+ let relayRequiresAuth = false;
+ let seenEventIds = new Set(); // Track seen event IDs for deduplication
+
+ // Create a promise that resolves when the subscription finishes
+ let finishResolve: (() => void) | null = null;
+ const finishPromise = new Promise((resolve) => {
+ finishResolve = resolve;
+ });
const finish = () => {
if (resolved) return;
resolved = true;
if (timeoutId) clearTimeout(timeoutId);
this.unsubscribe(subId);
+ if (finishResolve) {
+ finishResolve();
+ }
};
+ // Simple timeout - single timeout per fetch
+ timeoutId = setTimeout(() => {
+ if (!resolved) {
+ console.debug(`[nostr-client] Timeout reached for ${relayUrl}`);
+ finish();
+ }
+ }, timeout);
+
+ // Check if relay requires auth from NIP-11 metadata (async, but we'll check it)
+ const checkRelayAuthRequirement = async () => {
+ if (!relayRequiresAuth && !hasAuthed) {
+ // Check cache first
+ const cached = this.nip11MetadataCache.get(relayUrl);
+ if (cached && (Date.now() - cached.cachedAt) < this.NIP11_CACHE_TTL) {
+ relayRequiresAuth = cached.requiresAuth;
+ return;
+ }
+
+ try {
+ const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
+ return match === 'wss://' ? 'https://' : 'http://';
+ });
+ const nip11Url = `${httpUrl}/.well-known/nostr.json`;
+ const response = await fetch(nip11Url, {
+ method: 'GET',
+ headers: { 'Accept': 'application/nostr+json' },
+ signal: AbortSignal.timeout(2000) // 2 second timeout for metadata fetch
+ });
+
+ if (response.ok) {
+ const metadata = await response.json();
+ const requiresAuth = !!metadata?.limitation?.auth_required;
+ relayRequiresAuth = requiresAuth;
+ // Cache the result
+ this.nip11MetadataCache.set(relayUrl, { requiresAuth, cachedAt: Date.now() });
+ }
+ } catch (error) {
+ // Metadata fetch failed - that's okay, we'll proceed normally
+ // Cache negative result to avoid repeated failed requests
+ this.nip11MetadataCache.set(relayUrl, { requiresAuth: false, cachedAt: Date.now() });
+ }
+ }
+ };
+
+ // Check auth requirement in background (don't wait for it)
+ checkRelayAuthRequirement();
+
const startSub = () => {
try {
// Check relay status before subscribing
if (!this.checkAndCleanupRelay(relayUrl)) {
- console.debug(`[nostr-client] Relay ${relayUrl} is closed, skipping subscription`);
finish();
return;
}
const client = this;
+ // Re-check authentication state right before subscribing (it might have changed)
+ hasAuthed = client.authenticatedRelays.has(relayUrl);
const sub = relay.subscribe(filters, {
onevent: (event: NostrEvent) => {
try {
- if (!client.relays.has(relayUrl)) return;
+ if (!client.relays.has(relayUrl) || resolved) return;
if (shouldHideEvent(event)) return;
if (client.shouldFilterZapReceipt(event)) return;
+
+ // Deduplicate events
+ if (seenEventIds.has(event.id)) return;
+ seenEventIds.add(event.id);
+
+ // Update subscription activity timestamp
+ const subEntry = client.subscriptions.get(`${relayUrl}_${subId}`);
+ if (subEntry) {
+ subEntry.lastActivity = Date.now();
+ }
+
+ // Add to events Map for return value
events.set(event.id, event);
+
+ // Track in memory manager (for monitoring)
+ memoryManager.trackEvent(event);
+
+ // Cache the event
client.addToCache(event);
+
+ // Check memory usage and cleanup if needed (soft limits)
+ const stats = memoryManager.getStats();
+ if (stats.totalSizeMB > 200) {
+ // If over 200MB, cleanup oldest events to get back to 100MB
+ const cleanedIds = memoryManager.cleanupOldEvents(100 * 1024 * 1024);
+ // Note: We don't remove from events Map here as those are needed for return value
+ // The cleanup is just for tracking/monitoring purposes
+ }
+
+ // Stream event directly to onUpdate callback immediately
+ if (onUpdate && !resolved) {
+ try {
+ const filtered = filterEvents([event]);
+ const zapFiltered = filtered.filter(e => !client.shouldFilterZapReceipt(e));
+ if (zapFiltered.length > 0) {
+ onUpdate(zapFiltered);
+ }
+ } catch (error) {
+ // Ignore errors from onUpdate callback
+ }
+ }
} catch (error) {
// Silently handle errors - connection may be closed
}
},
oneose: () => {
try {
- if (!resolved) finish();
+ if (resolved) return;
+
+ // If we have no events and haven't authenticated, try to authenticate if relay requires it
+ if (events.size === 0 && !hasAuthed && relayRequiresAuth) {
+ const session = sessionManager.getSession();
+ if (session && !authInProgress) {
+ authInProgress = true;
+ // Relay requires auth, attempting authentication
+ client.authenticateRelay(relay, relayUrl)
+ .then((authSuccess) => {
+ authInProgress = false;
+ if (authSuccess) {
+ hasAuthed = true;
+ // Successfully authenticated, retrying subscription
+ if (!resolved) {
+ startSub(); // Retry subscription after authentication
+ }
+ } else {
+ console.debug(`[nostr-client] Authentication failed for ${relayUrl} after 0-event EOSE`);
+ finish();
+ }
+ })
+ .catch((error) => {
+ authInProgress = false;
+ console.debug(`[nostr-client] Authentication error for ${relayUrl} after 0-event EOSE:`, error);
+ finish();
+ });
+ return; // Don't finish yet, wait for auth
+ }
+ }
+
+ // Simple finish on EOSE - no complex timeout chains
+ // EOSE received - subscription complete
+ finish();
} catch (error) {
// Silently handle errors - connection may be closed
}
@@ -862,33 +1102,39 @@ class NostrClient {
if (reason.startsWith('auth-required') && !hasAuthed && !resolved) {
const session = sessionManager.getSession();
if (session) {
+ authInProgress = true;
+ // Relay requires authentication, authenticating
client.authenticateRelay(relay, relayUrl)
.then((authSuccess) => {
+ authInProgress = false;
if (authSuccess) {
hasAuthed = true;
- // Retry subscription after authentication
+ // Successfully authenticated, retrying subscription
if (!resolved) {
startSub();
}
} else {
+ console.debug(`[nostr-client] Authentication failed for ${relayUrl}`);
finish();
}
})
- .catch(() => {
+ .catch((error) => {
+ authInProgress = false;
+ console.debug(`[nostr-client] Authentication error for ${relayUrl}:`, error);
finish();
});
} else {
+ console.debug(`[nostr-client] Relay ${relayUrl} requires authentication but user is not logged in`);
finish();
}
+ } else if (!resolved) {
+ // If subscription closed for other reasons, finish
+ finish();
}
}
});
- client.subscriptions.set(`${relayUrl}_${subId}`, { relay, sub });
-
- timeoutId = setTimeout(() => {
- if (!resolved) finish();
- }, timeout);
+ client.subscriptions.set(`${relayUrl}_${subId}`, { relay, sub, lastActivity: Date.now() });
} catch (error) {
// Handle SendingOnClosedConnection and other errors
const errorMessage = error instanceof Error ? error.message : String(error);
@@ -902,6 +1148,9 @@ class NostrClient {
};
startSub();
+
+ // Wait for the subscription to finish before returning
+ await finishPromise;
}
private processQueue(): void {
@@ -921,62 +1170,128 @@ class NostrClient {
relays: string[],
options: FetchOptions = {}
): Promise {
- const { useCache = true, cacheResults = true, onUpdate, timeout = 10000 } = options;
+ const { useCache = true, cacheResults = true, onUpdate, timeout = 10000, relayFirst = false } = options;
// Create a key for this fetch to prevent duplicates
- const fetchKey = JSON.stringify({ filters, relays: relays.sort() });
-
+ const fetchKey = JSON.stringify({
+ filters,
+ relays: relays.sort()
+ });
+
const activeFetch = this.activeFetches.get(fetchKey);
if (activeFetch) {
return activeFetch;
}
- // Query cache first
- if (useCache) {
- try {
- const cachedEvents = await this.getCachedEvents(filters);
- if (cachedEvents.length > 0) {
- console.debug(`[nostr-client] Returning ${cachedEvents.length} cached events for filter:`, filters);
- // Return cached immediately, fetch fresh in background with delay
- // Don't pass onUpdate to background fetch to avoid interfering with cached results
- if (cacheResults) {
- // Prevent duplicate background refreshes for the same filter
- if (!this.backgroundRefreshes.has(fetchKey)) {
- this.backgroundRefreshes.add(fetchKey);
- // Use a longer delay for background refresh to avoid interfering with initial load
- setTimeout(() => {
- // Only update cache, don't call onUpdate for background refresh
- // This ensures cached events persist and are not cleared by background refresh
- const bgPromise = this.fetchFromRelays(filters, relays, { cacheResults: true, onUpdate: undefined, timeout });
- bgPromise.finally(() => {
- // Remove from background refreshes set after a delay to allow re-refresh if needed
- setTimeout(() => {
- this.backgroundRefreshes.delete(fetchKey);
- }, 60000); // Allow re-refresh after 60 seconds
- }).catch((error) => {
- // Log but don't throw - background refresh failures shouldn't affect cached results
- console.debug('[nostr-client] Background refresh failed:', error);
- this.backgroundRefreshes.delete(fetchKey);
- });
- }, 5000); // 5 second delay for background refresh to avoid interfering
+ // Always use relay-first mode: query relays first with timeout, then fill from cache if needed
+ {
+ // Fetching events from relays
+ const relayTimeout = timeout || 10000; // Default 10s timeout
+ const fetchPromise = (async () => {
+ // For single relay queries, ensure connection is established and authenticated first
+ if (relays.length === 1) {
+ const relayUrl = relays[0];
+ if (!this.relays.has(relayUrl)) {
+ // Try to connect first
+ await this.addRelay(relayUrl);
+ // Wait for connection to establish (up to 2 seconds)
+ let attempts = 0;
+ const maxAttempts = 4; // 2 seconds max
+ while (attempts < maxAttempts && !this.relays.has(relayUrl)) {
+ await new Promise(resolve => setTimeout(resolve, 500));
+ attempts++;
+ }
+ // If still not connected, try one more time
+ if (!this.relays.has(relayUrl)) {
+ await this.addRelay(relayUrl);
+ await new Promise(resolve => setTimeout(resolve, 500));
+ }
+ }
+
+ // Check if relay requires authentication from NIP-11 metadata
+ // If it does, authenticate proactively before starting the subscription
+ const relay = this.relays.get(relayUrl);
+ if (relay && !this.authenticatedRelays.has(relayUrl)) {
+ try {
+ const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
+ return match === 'wss://' ? 'https://' : 'http://';
+ });
+ const nip11Url = `${httpUrl}/.well-known/nostr.json`;
+ const response = await fetch(nip11Url, {
+ method: 'GET',
+ headers: { 'Accept': 'application/nostr+json' },
+ signal: AbortSignal.timeout(3000) // 3 second timeout for metadata fetch
+ });
+
+ if (response.ok) {
+ const metadata = await response.json();
+ if (metadata?.limitation?.auth_required) {
+ console.debug(`[nostr-client] Relay ${relayUrl} requires authentication (from NIP-11), authenticating before subscription...`);
+ const session = sessionManager.getSession();
+ if (session) {
+ // Try to authenticate proactively - wait for it to complete before starting subscription
+ try {
+ const authSuccess = await this.authenticateRelay(relay, relayUrl);
+ if (authSuccess) {
+ console.debug(`[nostr-client] Successfully authenticated with ${relayUrl} proactively, starting subscription`);
+ } else {
+ console.debug(`[nostr-client] Proactive auth not available for ${relayUrl}, will authenticate on challenge`);
+ }
+ } catch (error) {
+ // Auth might fail if relay doesn't send challenge - that's okay
+ console.debug(`[nostr-client] Proactive auth attempt for ${relayUrl} failed (will try on challenge):`, error);
+ }
+ } else {
+ console.debug(`[nostr-client] Relay ${relayUrl} requires authentication but user is not logged in`);
+ }
+ }
+ }
+ } catch (error) {
+ // Metadata fetch failed - that's okay, we'll proceed normally
+ console.debug(`[nostr-client] Could not fetch NIP-11 metadata for ${relayUrl}:`, error);
}
}
- return cachedEvents;
}
- // No cached events - this is expected and normal, so don't log it
- } catch (error) {
- console.error('[nostr-client] Error querying cache:', error);
- // Continue to fetch from relays
- }
+
+ // Query relays first with timeout
+ // Respect cacheResults option - don't cache if explicitly disabled
+ const relayEvents = await this.fetchFromRelays(filters, relays, {
+ cacheResults: cacheResults,
+ onUpdate,
+ timeout: relayTimeout
+ });
+
+ // If we got results from relays, return them immediately
+ if (relayEvents.length > 0) {
+ // Got events from relays
+ return relayEvents;
+ }
+
+ // If no results from relays, try to fill from cache (only if useCache is true)
+ // IMPORTANT: In single-relay mode, useCache should be false to avoid showing events from other relays
+ if (useCache) {
+ try {
+ const cachedEvents = await this.getCachedEvents(filters);
+ if (cachedEvents.length > 0) {
+ console.debug(`[nostr-client] Relay query returned 0 events, using ${cachedEvents.length} cached events`);
+ return cachedEvents;
+ }
+ } catch (error) {
+ console.error('[nostr-client] Error querying cache:', error);
+ }
+ } else {
+ console.debug(`[nostr-client] No events from relays, useCache=false, returning empty array`);
+ }
+
+ return relayEvents; // Return empty array if both failed
+ })();
+
+ this.activeFetches.set(fetchKey, fetchPromise);
+ fetchPromise.finally(() => {
+ this.activeFetches.delete(fetchKey);
+ });
+ return fetchPromise;
}
-
- // Fetch from relays
- const fetchPromise = this.fetchFromRelays(filters, relays, { cacheResults, onUpdate, timeout });
- this.activeFetches.set(fetchKey, fetchPromise);
- fetchPromise.finally(() => {
- this.activeFetches.delete(fetchKey);
- });
- return fetchPromise;
}
private async fetchFromRelays(
@@ -1016,9 +1331,26 @@ class NostrClient {
await Promise.allSettled(
relaysToConnect.map(url => this.addRelay(url))
);
- // For single relay, wait a bit longer for connection to establish
+ // For single relay, wait for connection to actually establish
if (relays.length === 1 && relaysToConnect.length > 0) {
- await new Promise(resolve => setTimeout(resolve, 1000));
+ const relayUrl = relaysToConnect[0];
+ let attempts = 0;
+ const maxAttempts = 6; // Wait up to 3 seconds (6 * 500ms) for connection
+ while (attempts < maxAttempts) {
+ const relay = this.relays.get(relayUrl);
+ if (relay) {
+ try {
+ const ws = (relay as any).ws;
+ if (ws && ws.readyState === WebSocket.OPEN) {
+ break; // Connection is open, proceed
+ }
+ } catch {
+ // Ignore errors checking WebSocket
+ }
+ }
+ await new Promise(resolve => setTimeout(resolve, 500));
+ attempts++;
+ }
}
}
@@ -1064,7 +1396,7 @@ class NostrClient {
const events: Map = new Map();
for (const relayUrl of connectedRelays) {
- await this.throttledRelayRequest(relayUrl, filters, events, timeout);
+ await this.throttledRelayRequest(relayUrl, filters, events, timeout, options.onUpdate);
// Small delay between relays
await new Promise(resolve => setTimeout(resolve, 100));
}
@@ -1082,14 +1414,14 @@ class NostrClient {
// Only call onUpdate if we got new events AND onUpdate is provided
// This prevents clearing the UI when background fetch returns fewer results
if (options.onUpdate && filtered.length > 0) {
- console.log(`[nostr-client] Fetch returned ${filtered.length} events, calling onUpdate`);
+ // Calling onUpdate with fetched events
options.onUpdate(filtered);
} else if (options.onUpdate && filtered.length === 0) {
- console.debug(`[nostr-client] Fetch returned 0 events, skipping onUpdate to preserve cached results`);
+ // No new events, skipping onUpdate
} else if (!options.onUpdate) {
// Only log background refreshes that return events, not empty results
if (filtered.length > 0) {
- console.debug(`[nostr-client] Fetch returned ${filtered.length} events (background refresh, no onUpdate)`);
+ // Background refresh completed
}
}
@@ -1122,16 +1454,35 @@ class NostrClient {
}
close(): void {
+ // Stop subscription cleanup interval
+ if (this.subscriptionCleanupInterval) {
+ clearInterval(this.subscriptionCleanupInterval);
+ this.subscriptionCleanupInterval = null;
+ }
+
+ // Close all subscriptions
for (const { sub } of this.subscriptions.values()) {
- sub.close();
+ try {
+ sub.close();
+ } catch (error) {
+ // Ignore errors
+ }
}
this.subscriptions.clear();
+ // Close all relay connections
for (const relay of this.relays.values()) {
- relay.close();
+ try {
+ relay.close();
+ } catch (error) {
+ // Ignore errors
+ }
}
this.relays.clear();
-
+
+ // Clear memory tracking
+ memoryManager.clear();
+
this.initialized = false;
}
}