You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

1971 lines
80 KiB

/**
* Nostr client - optimized for low bandwidth and efficiency
* Features: request throttling, batching, rate limiting, efficient caching
*/
import { Relay, type Filter, matchFilter, type EventTemplate, type VerifiedEvent } from 'nostr-tools';
import { config } from './config.js';
import type { NostrEvent } from '../../types/nostr.js';
import { cacheEvent, cacheEvents, getEvent, getEventsByKind, getEventsByPubkey } from '../cache/event-cache.js';
import { getDB } from '../cache/indexeddb-store.js';
import { filterEvents, shouldHideEvent } from '../event-filter.js';
import { sessionManager } from '../auth/session-manager.js';
import { KIND } from '../../types/kind-lookup.js';
import { memoryManager } from './memory-manager.js';
export interface PublishOptions {
relays?: string[];
skipRelayValidation?: boolean;
}
interface FetchOptions {
useCache?: boolean | 'cache-first' | 'relay-first'; // Cache strategy: true/'cache-first' = check cache first (default), 'relay-first' = query relays first then cache fallback, false = no cache
cacheResults?: boolean;
onUpdate?: (events: NostrEvent[]) => void;
onUpdateWithRelay?: (events: Array<{ event: NostrEvent; relay: string }>) => void; // Callback with relay information for each event
timeout?: number;
priority?: 'high' | 'medium' | 'low'; // Priority level: high for critical UI (comments), low for background (reactions, profiles)
caller?: string; // Optional caller identifier for logging (e.g., "topics/[name]/+page.svelte")
}
class NostrClient {
private initialized = false;
private relays: Map<string, Relay> = new Map();
private subscriptions: Map<string, { relay: Relay; sub: any; lastActivity: number }> = new Map();
private nextSubId = 1;
private activeFetches: Map<string, Promise<NostrEvent[]>> = new Map();
// Subscription TTL cleanup
private subscriptionCleanupInterval: ReturnType<typeof setInterval> | null = null;
private readonly SUBSCRIPTION_TTL = 30000; // 30 seconds
// Rate limiting and throttling
private requestQueue: Array<() => void> = [];
private processingQueue = false;
private lastRequestTime: Map<string, number> = new Map(); // relay -> timestamp
private activeRequestsPerRelay: Map<string, number> = new Map();
private readonly MIN_REQUEST_INTERVAL = 200; // 200ms between requests to same relay
private readonly MAX_CONCURRENT_PER_RELAY = 1; // Only 1 concurrent request per relay
private readonly MAX_CONCURRENT_TOTAL = 3; // Max 3 total concurrent requests
private totalActiveRequests = 0;
// Failed relay tracking with exponential backoff
private failedRelays: Map<string, { lastFailure: number; retryAfter: number; failureCount: number }> = new Map();
private readonly INITIAL_RETRY_DELAY = 5000; // 5 seconds
private readonly MAX_RETRY_DELAY = 300000; // 5 minutes
private readonly MAX_FAILURE_COUNT = 10; // After 10 failures, wait max delay
private readonly PERMANENT_FAILURE_THRESHOLD = 20; // After 20 failures, skip relay for this session
// Track authenticated relays to avoid re-authenticating
private authenticatedRelays: Set<string> = new Set();
// Cache NIP-11 metadata to avoid repeated HTTP requests
private nip11MetadataCache: Map<string, { requiresAuth: boolean; cachedAt: number }> = new Map();
private readonly NIP11_CACHE_TTL = 60 * 60 * 24 * 1000; // 24 hours (optimized for slow connections)
// Track fetch patterns to identify repeated fetches
private fetchPatterns: Map<string, { count: number; lastFetch: number; totalEvents: number }> = new Map();
// Cache empty results to prevent repeated fetches of non-existent data
// Also track pending fetches to prevent concurrent duplicate fetches
private emptyResultCache: Map<string, { cachedAt: number; pending?: boolean }> = new Map();
private readonly EMPTY_RESULT_CACHE_TTL = 60 * 60 * 1000; // 1 hour - cache empty results longer to prevent repeated fetches (optimized for slow connections)
private readonly PENDING_FETCH_TTL = 5000; // 5 seconds - how long to wait for a pending fetch
/**
* Check if a relay requires authentication (using cached NIP-11 metadata)
* Returns null if unknown, true if requires auth, false if doesn't require auth
*/
private async checkRelayRequiresAuth(relayUrl: string): Promise<boolean | null> {
// Check cache first
const cached = this.nip11MetadataCache.get(relayUrl);
if (cached && (Date.now() - cached.cachedAt) < this.NIP11_CACHE_TTL) {
return cached.requiresAuth;
}
// Fetch NIP-11 metadata
try {
const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
return match === 'wss://' ? 'https://' : 'http://';
});
const nip11Url = `${httpUrl}/.well-known/nostr.json`;
const response = await fetch(nip11Url, {
method: 'GET',
headers: { 'Accept': 'application/nostr+json' },
signal: AbortSignal.timeout(2000) // 2 second timeout
});
if (response.ok) {
const metadata = await response.json();
const requiresAuth = metadata?.limitation?.auth_required === true;
// Cache the result
this.nip11MetadataCache.set(relayUrl, {
requiresAuth,
cachedAt: Date.now()
});
return requiresAuth;
}
} catch (error) {
// Metadata fetch failed - return null (unknown)
}
return null; // Unknown
}
async initialize(): Promise<void> {
if (this.initialized) return;
// Connect to default relays with timeout
const connectionPromises = config.defaultRelays.map(async (url) => {
try {
await Promise.race([
this.addRelay(url),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Connection timeout')), 10000)
)
]);
} catch (error) {
// Silently fail - we'll retry later if needed
}
});
await Promise.allSettled(connectionPromises);
// Start subscription cleanup interval
this.startSubscriptionCleanup();
// Start periodic memory cleanup
this.startMemoryCleanup();
this.initialized = true;
}
/**
* Start periodic memory cleanup to prevent gradual memory growth
*/
private startMemoryCleanup(): void {
// Clean up memory every 30 seconds
setInterval(() => {
const stats = memoryManager.getStats();
if (stats.totalSizeMB > 50) {
const cleanedIds = memoryManager.cleanupOldEvents(25 * 1024 * 1024);
// Memory cleanup completed
}
}, 30000); // Every 30 seconds
}
/**
* Start periodic cleanup of inactive subscriptions
*/
private startSubscriptionCleanup(): void {
if (this.subscriptionCleanupInterval) return;
this.subscriptionCleanupInterval = setInterval(() => {
const now = Date.now();
const toRemove: string[] = [];
for (const [key, { lastActivity }] of this.subscriptions.entries()) {
if (now - lastActivity > this.SUBSCRIPTION_TTL) {
toRemove.push(key);
}
}
for (const key of toRemove) {
const sub = this.subscriptions.get(key);
if (sub) {
try {
sub.sub.close();
} catch (error) {
// Ignore errors closing subscriptions
}
this.subscriptions.delete(key);
}
}
// Subscription cleanup completed
}, 10000); // Check every 10 seconds
}
/**
* Authenticate with a relay using NIP-42 AUTH
* Only call this when the relay has sent an AUTH challenge or an operation failed with auth-required
*/
private async authenticateRelay(relay: Relay, url: string): Promise<boolean> {
// Check if we're already authenticated
if (this.authenticatedRelays.has(url)) {
return true;
}
// Check if user is logged in
const session = sessionManager.getSession();
if (!session) {
// User not logged in, cannot authenticate
return false;
}
try {
// Check if relay supports AUTH by checking for auth method
// nostr-tools Relay.auth() will handle the AUTH flow
// Only call this when relay has sent a challenge (handled by nostr-tools)
if (typeof (relay as any).auth === 'function') {
await relay.auth(async (authEvent: EventTemplate): Promise<VerifiedEvent> => {
// Ensure authEvent has required fields
const eventToSign: Omit<NostrEvent, 'sig' | 'id'> = {
pubkey: session.pubkey,
created_at: authEvent.created_at,
kind: authEvent.kind,
tags: authEvent.tags || [],
content: authEvent.content || ''
};
// Sign the auth event using the session signer
const signedEvent = await session.signer(eventToSign);
// Return as VerifiedEvent (nostr-tools will verify the signature)
return signedEvent as VerifiedEvent;
});
this.authenticatedRelays.add(url);
return true;
}
} catch (error) {
// Only log if it's not the "no challenge" error (which is expected for relays that don't require auth)
const errorMessage = error instanceof Error ? error.message : String(error);
if (!errorMessage.includes('no challenge was received')) {
// Log authentication failures but don't expose sensitive data
// Never log passwords, nsec, or ncryptsec values
const safeMessage = errorMessage
.replace(/password/gi, '[password]')
.replace(/nsec/gi, '[nsec]')
.replace(/ncryptsec/gi, '[ncryptsec]');
// Authentication failed (non-critical)
}
return false;
}
return false;
}
async addRelay(url: string): Promise<void> {
if (this.relays.has(url)) return;
// Check if this relay has failed too many times - skip permanently for this session
const failureInfo = this.failedRelays.get(url);
if (failureInfo && failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
// Silently skip - don't throw or log, just return
return;
}
// Check if this relay has failed recently and we should wait
if (failureInfo) {
const timeSinceFailure = Date.now() - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
// Still in backoff period, silently skip
return;
}
}
try {
// Use connection timeout similar to jumble's approach (5 seconds)
const relay = await Promise.race([
Relay.connect(url),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('Connection timeout')), 5000)
)
]);
this.relays.set(url, relay);
// Add connection close handler to automatically clean up closed relays
try {
const ws = (relay as any).ws;
if (ws) {
ws.addEventListener('close', () => {
// Only log if relay wasn't already marked as permanently failed
const failureInfo = this.failedRelays.get(url);
if (!failureInfo || failureInfo.failureCount < this.PERMANENT_FAILURE_THRESHOLD) {
// Don't log - connection closes are normal
}
this.relays.delete(url);
this.authenticatedRelays.delete(url);
});
}
} catch (error) {
// Ignore errors accessing WebSocket
}
// Don't proactively authenticate - only authenticate when:
// 1. Relay sends an AUTH challenge (handled by nostr-tools automatically)
// 2. An operation fails with 'auth-required' error (handled in publish/subscribe methods)
// Clear failure tracking on successful connection
this.failedRelays.delete(url);
// Don't log successful connections - too verbose
} catch (error) {
// Track the failure but don't throw - allow graceful degradation like jumble
const existingFailure = this.failedRelays.get(url) || { lastFailure: 0, retryAfter: this.INITIAL_RETRY_DELAY, failureCount: 0 };
const failureCount = existingFailure.failureCount + 1;
// For connection refused errors, use longer backoff to avoid spam
const errorMessage = error instanceof Error ? error.message : String(error);
const isConnectionRefused = errorMessage.includes('CONNECTION_REFUSED') ||
errorMessage.includes('connection refused') ||
errorMessage.includes('Connection refused');
// Use longer initial delay for connection refused errors
const initialDelay = isConnectionRefused ? this.INITIAL_RETRY_DELAY * 2 : this.INITIAL_RETRY_DELAY;
const retryAfter = Math.min(
initialDelay * Math.pow(2, Math.min(failureCount - 1, 6)), // Exponential backoff, max 2^6 = 64x
this.MAX_RETRY_DELAY
);
this.failedRelays.set(url, {
lastFailure: Date.now(),
retryAfter,
failureCount
});
// Only log warnings for persistent failures to reduce console noise
// Connection failures are expected and normal, so we don't log every attempt
if (failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
// Only log once when threshold is reached
if (failureCount === this.PERMANENT_FAILURE_THRESHOLD) {
// Relay temporarily disabled due to failures
}
} else if (failureCount === 10) {
// Log once at 10 failures as a warning
// Relay approaching permanent failure threshold
}
// Don't throw - allow graceful degradation like jumble does
// The caller can check if relay was added by checking this.relays.has(url)
}
}
async removeRelay(url: string): Promise<void> {
const relay = this.relays.get(url);
if (relay) {
try {
relay.close();
} catch (error) {
// Ignore
}
this.relays.delete(url);
// Remove from authenticated relays when disconnecting
this.authenticatedRelays.delete(url);
}
}
private checkAndCleanupRelay(relayUrl: string): boolean {
const relay = this.relays.get(relayUrl);
if (!relay) return false;
// Check relay connection status
// Status values: 0 = CONNECTING, 1 = OPEN, 2 = CLOSING, 3 = CLOSED
const status = (relay as any).status;
if (status === 3 || status === 2) {
// Relay is closed or closing, remove it silently
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
return false;
}
// Check if relay has a connection property and if it's closed
try {
const ws = (relay as any).ws;
if (ws && (ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING)) {
// Relay connection closed
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
return false;
}
} catch (error) {
// If we can't check the WebSocket, assume it's still valid
}
return true;
}
/**
* Get a relay instance by URL
* Will connect if not already connected
* Returns null if connection fails (graceful degradation like jumble)
*/
async getRelay(url: string): Promise<Relay | null> {
// Ensure relay is connected
if (!this.relays.has(url)) {
// addRelay doesn't throw on failure, it just doesn't add the relay
await this.addRelay(url);
// Check if relay was actually added
if (!this.relays.has(url)) {
// Connection failed, skipping relay
return null;
}
}
return this.relays.get(url) || null;
}
private addToCache(event: NostrEvent): void {
cacheEvent(event).catch(() => {
// Silently fail
});
}
/**
* Get events by kind with pagination
*/
private async getEventsByKindPaginated(kind: number, limit: number, maxEvents: number, pageSize: number): Promise<NostrEvent[]> {
try {
const db = await getDB();
const tx = db.transaction('events', 'readonly');
const index = tx.store.index('kind');
const targetLimit = Math.min(limit, maxEvents);
// Use getAll() to get all matching events in one operation
// This keeps the transaction active and avoids cursor iteration issues
const allEvents = await index.getAll(kind);
await tx.done;
// Sort by created_at (newest first) and limit
const sorted = allEvents.sort((a, b) => b.created_at - a.created_at);
return sorted.slice(0, targetLimit);
} catch (error) {
// Cache error (non-critical)
return [];
}
}
/**
* Get events by pubkey with pagination
*/
private async getEventsByPubkeyPaginated(pubkey: string, limit: number, maxEvents: number, pageSize: number): Promise<NostrEvent[]> {
try {
const db = await getDB();
const tx = db.transaction('events', 'readonly');
const index = tx.store.index('pubkey');
const targetLimit = Math.min(limit, maxEvents);
// Use getAll() to get all matching events in one operation
// This keeps the transaction active and avoids cursor iteration issues
const allEvents = await index.getAll(pubkey);
await tx.done;
// Sort by created_at (newest first) and limit
const sorted = allEvents.sort((a, b) => b.created_at - a.created_at);
return sorted.slice(0, targetLimit);
} catch (error) {
// Cache error (non-critical)
return [];
}
}
private async getCachedEvents(filters: Filter[]): Promise<NostrEvent[]> {
try {
const results: NostrEvent[] = [];
const seen = new Set<string>();
const MAX_EVENTS = 1000; // Maximum events to load from cache
const PAGE_SIZE = 100; // Load in pages of 100
for (const filter of filters) {
try {
let candidateEvents: NostrEvent[] = [];
// Prioritize ID queries when IDs are specified (more efficient than querying by kind)
if (filter.ids && filter.ids.length > 0 && filter.ids.length <= 10) {
// For small number of IDs, query directly by ID
const idEvents: NostrEvent[] = [];
for (const id of filter.ids) {
const event = await getEvent(id);
if (event) {
idEvents.push(event);
}
}
candidateEvents = idEvents;
} else if (filter.kinds && filter.kinds.length > 0) {
// Query by kind(s) if specified - use pagination
if (filter.kinds.length === 1) {
// Single kind - use paginated query
candidateEvents = await this.getEventsByKindPaginated(filter.kinds[0], filter.limit || 100, MAX_EVENTS, PAGE_SIZE);
} else {
// Multiple kinds - query each with pagination and combine
const allEvents: NostrEvent[] = [];
const perKindLimit = Math.ceil((filter.limit || 100) / filter.kinds.length);
for (const kind of filter.kinds) {
const kindEvents = await this.getEventsByKindPaginated(kind, perKindLimit, MAX_EVENTS, PAGE_SIZE);
allEvents.push(...kindEvents);
if (allEvents.length >= MAX_EVENTS) break; // Stop if we've hit max
}
candidateEvents = allEvents;
}
} else if (filter.authors && filter.authors.length > 0) {
// Query by author(s) if no kinds specified - use pagination
if (filter.authors.length === 1) {
candidateEvents = await this.getEventsByPubkeyPaginated(filter.authors[0], filter.limit || 100, MAX_EVENTS, PAGE_SIZE);
} else {
// Multiple authors - query each with pagination and combine
const allEvents: NostrEvent[] = [];
const perAuthorLimit = Math.ceil((filter.limit || 100) / filter.authors.length);
for (const author of filter.authors) {
const authorEvents = await this.getEventsByPubkeyPaginated(author, perAuthorLimit, MAX_EVENTS, PAGE_SIZE);
allEvents.push(...authorEvents);
if (allEvents.length >= MAX_EVENTS) break; // Stop if we've hit max
}
candidateEvents = allEvents;
}
} else {
// No specific kind or author - get recent events by created_at with pagination
try {
const db = await getDB();
const tx = db.transaction('events', 'readonly');
const index = tx.store.index('created_at');
const events: NostrEvent[] = [];
let count = 0;
const limit = Math.min(filter.limit || 100, MAX_EVENTS);
// Iterate in reverse (newest first) with pagination
let cursor = await index.openCursor(null, 'prev');
while (cursor && count < limit) {
events.push(cursor.value as NostrEvent);
count++;
cursor = await cursor.continue();
}
await tx.done;
candidateEvents = events;
} catch (dbError) {
// IndexedDB query failed (non-critical)
candidateEvents = [];
}
}
// Filter candidates by all filter criteria
const queriedByIds = filter.ids && filter.ids.length > 0 && filter.ids.length <= 10;
let beforeFilter = candidateEvents.length;
for (const event of candidateEvents) {
if (seen.has(event.id)) continue;
// Apply since/until filters
if (filter.since && event.created_at < filter.since) continue;
if (filter.until && event.created_at > filter.until) continue;
// Apply ids filter (skip if we already queried by IDs)
if (!queriedByIds && filter.ids && filter.ids.length > 0 && !filter.ids.includes(event.id)) continue;
// Apply authors filter (if not already used for query)
if (filter.authors && filter.authors.length > 0 && !filter.authors.includes(event.pubkey)) continue;
// Apply kinds filter (if not already used for query)
if (filter.kinds && filter.kinds.length > 0 && !filter.kinds.includes(event.kind)) continue;
// Apply tag filters (#e, #E, #p, #P, #a, #A, #d, etc.)
// Handle both uppercase and lowercase tag filters (Nostr spec allows both)
// Ignore relays that don't support the tag filter, they need to be corrected by the relay operator.
if (filter['#e'] && filter['#e'].length > 0) {
const eventTags = event.tags.filter(t => (t[0] === 'e' || t[0] === 'E') && t[1]).map(t => t[1]);
if (eventTags.length === 0 || !filter['#e'].some(id => eventTags.includes(id))) continue;
}
if (filter['#E'] && filter['#E'].length > 0) {
const eventTags = event.tags.filter(t => (t[0] === 'e' || t[0] === 'E') && t[1]).map(t => t[1]);
if (eventTags.length === 0 || !filter['#E'].some(id => eventTags.includes(id))) continue;
}
if (filter['#p'] && filter['#p'].length > 0) {
const pubkeyTags = event.tags.filter(t => (t[0] === 'p' || t[0] === 'P') && t[1]).map(t => t[1]);
if (pubkeyTags.length === 0 || !filter['#p'].some(pk => pubkeyTags.includes(pk))) continue;
}
if (filter['#P'] && filter['#P'].length > 0) {
const pubkeyTags = event.tags.filter(t => (t[0] === 'p' || t[0] === 'P') && t[1]).map(t => t[1]);
if (pubkeyTags.length === 0 || !filter['#P'].some(pk => pubkeyTags.includes(pk))) continue;
}
if (filter['#a'] && filter['#a'].length > 0) {
const aTags = event.tags.filter(t => (t[0] === 'a' || t[0] === 'A') && t[1]).map(t => t[1]);
if (aTags.length === 0 || !filter['#a'].some(a => aTags.includes(a))) continue;
}
if (filter['#A'] && filter['#A'].length > 0) {
const aTags = event.tags.filter(t => (t[0] === 'a' || t[0] === 'A') && t[1]).map(t => t[1]);
if (aTags.length === 0 || !filter['#A'].some(a => aTags.includes(a))) continue;
}
if (filter['#d'] && filter['#d'].length > 0) {
const dTags = event.tags.filter(t => (t[0] === 'd' || t[0] === 'D') && t[1]).map(t => t[1]);
if (dTags.length === 0 || !filter['#d'].some(d => dTags.includes(d))) continue;
}
if (filter['#q'] && filter['#q'].length > 0) {
const qTags = event.tags.filter(t => (t[0] === 'q' || t[0] === 'Q') && t[1]).map(t => t[1]);
if (qTags.length === 0 || !filter['#q'].some(q => qTags.includes(q))) continue;
}
if (filter['#Q'] && filter['#Q'].length > 0) {
const qTags = event.tags.filter(t => (t[0] === 'q' || t[0] === 'Q') && t[1]).map(t => t[1]);
if (qTags.length === 0 || !filter['#Q'].some(q => qTags.includes(q))) continue;
}
// Use matchFilter for final validation
if (matchFilter(filter, event)) {
results.push(event);
seen.add(event.id);
}
}
} catch (error) {
// Continue with other filters
// Cache query failed (non-critical)
}
}
// Sort by created_at descending and apply limit
const sorted = results.sort((a, b) => b.created_at - a.created_at);
const limit = filters[0]?.limit || 100;
const limited = sorted.slice(0, Math.min(limit, MAX_EVENTS));
const filtered = filterEvents(limited);
// Only log cache queries when significant filtering happens or large result sets
// This reduces noise from background cache enhancement queries
if (filtered.length > 10 || (filtered.length > 0 && limited.length > filtered.length * 3)) {
// Cache query completed
}
return filtered;
} catch (error) {
// Cache error (non-critical)
return [];
}
}
async publish(event: NostrEvent, options: PublishOptions = {}): Promise<{
success: string[];
failed: Array<{ relay: string; error: string }>;
}> {
const relays = options.relays || Array.from(this.relays.keys());
const results = {
success: [] as string[],
failed: [] as Array<{ relay: string; error: string }>
};
this.addToCache(event);
for (const url of relays) {
const relay = this.relays.get(url);
if (!relay) {
// addRelay doesn't throw on failure, it just doesn't add the relay (graceful degradation like jumble)
await this.addRelay(url);
const newRelay = this.relays.get(url);
if (newRelay && this.checkAndCleanupRelay(url)) {
try {
await newRelay.publish(event);
results.success.push(url);
} catch (error) {
// Check if error is auth-required
if (error instanceof Error && error.message.startsWith('auth-required')) {
// Try to authenticate and retry
const authSuccess = await this.authenticateRelay(newRelay, url);
if (authSuccess) {
try {
await newRelay.publish(event);
results.success.push(url);
} catch (retryError) {
results.failed.push({
relay: url,
error: retryError instanceof Error ? retryError.message : 'Unknown error'
});
}
} else {
results.failed.push({
relay: url,
error: 'Authentication required but failed'
});
}
} else {
// Check if it's a closed connection error
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
// Relay closed during publish
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
results.failed.push({
relay: url,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
} else {
// Relay connection failed (addRelay didn't add it)
results.failed.push({
relay: url,
error: 'Failed to connect'
});
}
} else {
// Check relay status before publishing
if (!this.checkAndCleanupRelay(url)) {
results.failed.push({
relay: url,
error: 'Relay connection closed'
});
continue;
}
try {
await relay.publish(event);
results.success.push(url);
} catch (error) {
// Check if error is auth-required
if (error instanceof Error && error.message.startsWith('auth-required')) {
// Try to authenticate and retry
const authSuccess = await this.authenticateRelay(relay, url);
if (authSuccess) {
try {
await relay.publish(event);
results.success.push(url);
} catch (retryError) {
results.failed.push({
relay: url,
error: retryError instanceof Error ? retryError.message : 'Unknown error'
});
}
} else {
results.failed.push({
relay: url,
error: 'Authentication required but failed'
});
}
} else {
// Check if it's a closed connection error
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${url} connection closed during publish, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
results.failed.push({
relay: url,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
}
}
return results;
}
subscribe(
filters: Filter[],
relays: string[],
onEvent: (event: NostrEvent, relay: string) => void,
onEose?: (relay: string) => void
): string {
const subId = `sub_${this.nextSubId++}_${Date.now()}`;
for (const url of relays) {
if (!this.relays.has(url)) {
// Check if relay should be skipped before attempting connection
const failureInfo = this.failedRelays.get(url);
if (failureInfo && failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
// Skip permanently failed relays silently
continue;
}
// Check if relay failed recently and is still in backoff period
if (failureInfo) {
const timeSinceFailure = Date.now() - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
// Still in backoff period, skip this attempt
continue;
}
}
// addRelay doesn't throw on failure, it just doesn't add the relay (graceful degradation like jumble)
this.addRelay(url).then(() => {
const newRelay = this.relays.get(url);
if (newRelay) {
this.setupSubscription(newRelay, url, subId, filters, onEvent, onEose);
}
}).catch(() => {
// Ignore errors - addRelay handles failures gracefully
});
continue;
}
const relay = this.relays.get(url);
if (!relay) continue;
// Check relay status before setting up subscription
if (!this.checkAndCleanupRelay(url)) {
// Relay is closed, skip silently
continue;
}
try {
this.setupSubscription(relay, url, subId, filters, onEvent, onEose);
} catch (error) {
// Handle errors - setupSubscription already handles closed connection errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
// Relay connection closed
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
}
}
return subId;
}
private setupSubscription(
relay: Relay,
url: string,
subId: string,
filters: Filter[],
onEvent: (event: NostrEvent, relay: string) => void,
onEose?: (relay: string) => void
): void {
if (!this.relays.has(url)) return;
// Check relay status before setting up subscription
if (!this.checkAndCleanupRelay(url)) {
// Relay closed, skipping subscription
return;
}
try {
const client = this;
let hasAuthed = this.authenticatedRelays.has(url);
const startSub = () => {
// Check relay status again before subscribing
if (!client.checkAndCleanupRelay(url)) {
// Relay closed before subscription
return;
}
const sub = relay.subscribe(filters, {
onevent: (event: NostrEvent) => {
try {
if (!client.relays.has(url)) return;
client.addToCache(event);
onEvent(event, url);
} catch (err) {
// Silently handle errors - connection may be closed
}
},
oneose: () => {
try {
if (!client.relays.has(url)) return;
onEose?.(url);
} catch (err) {
// Silently handle errors - connection may be closed
}
},
onclose: (reason: string) => {
// Handle auth-required on subscription close
if (reason.startsWith('auth-required') && !hasAuthed) {
const session = sessionManager.getSession();
if (session) {
client.authenticateRelay(relay, url)
.then((authSuccess) => {
if (authSuccess) {
hasAuthed = true;
// Retry subscription after authentication
startSub();
}
})
.catch(() => {
// Authentication failed, give up
});
}
}
}
});
client.subscriptions.set(`${url}_${subId}`, { relay, sub, lastActivity: Date.now() });
};
startSub();
} catch (error) {
// Handle SendingOnClosedConnection and other errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
// Relay closed during subscription setup
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
}
}
unsubscribe(subId: string): void {
for (const [key, { sub }] of this.subscriptions.entries()) {
if (key.endsWith(`_${subId}`)) {
try {
// Try to close subscription - may fail if connection already closed
if (sub && typeof sub.close === 'function') {
sub.close();
}
} catch (error) {
// Silently handle errors when closing subscriptions
// Connection may already be closed, which is fine
}
this.subscriptions.delete(key);
}
}
}
// Throttled request to a relay
private async throttledRelayRequest(
relayUrl: string,
filters: Filter[],
events: Map<string, NostrEvent>,
timeout: number,
onUpdate?: (events: NostrEvent[]) => void,
onUpdateWithRelay?: (events: Array<{ event: NostrEvent; relay: string }>) => void,
priority: 'high' | 'medium' | 'low' = 'medium'
): Promise<void> {
return new Promise((resolve) => {
const makeRequest = () => {
const now = Date.now();
const lastRequest = this.lastRequestTime.get(relayUrl) || 0;
const timeSinceLastRequest = now - lastRequest;
const activeForRelay = this.activeRequestsPerRelay.get(relayUrl) || 0;
// High priority requests get higher concurrent limits and can bypass some throttling
const maxConcurrentForPriority = priority === 'high' ? 2 : this.MAX_CONCURRENT_PER_RELAY;
const maxTotalForPriority = priority === 'high' ? 5 : this.MAX_CONCURRENT_TOTAL;
const minIntervalForPriority = priority === 'high' ? 50 : this.MIN_REQUEST_INTERVAL;
// Check if we can make the request now
// High priority requests can bypass some throttling
if (timeSinceLastRequest >= minIntervalForPriority &&
activeForRelay < maxConcurrentForPriority &&
this.totalActiveRequests < maxTotalForPriority) {
// Update tracking
this.lastRequestTime.set(relayUrl, now);
this.activeRequestsPerRelay.set(relayUrl, activeForRelay + 1);
this.totalActiveRequests++;
// Make the request
this.makeRelayRequest(relayUrl, filters, events, timeout, onUpdate, onUpdateWithRelay)
.finally(() => {
const current = this.activeRequestsPerRelay.get(relayUrl) || 0;
if (current > 0) {
this.activeRequestsPerRelay.set(relayUrl, current - 1);
}
if (this.totalActiveRequests > 0) {
this.totalActiveRequests--;
}
resolve();
this.processQueue(); // Process next in queue
});
} else {
// Wait and retry
// High priority requests wait less
const waitTime = priority === 'high'
? Math.max(minIntervalForPriority - timeSinceLastRequest, 10)
: Math.max(this.MIN_REQUEST_INTERVAL - timeSinceLastRequest, 100);
setTimeout(makeRequest, waitTime);
}
};
makeRequest();
});
}
private async makeRelayRequest(
relayUrl: string,
filters: Filter[],
events: Map<string, NostrEvent>,
timeout: number,
onUpdate?: (events: NostrEvent[]) => void,
onUpdateWithRelay?: (events: Array<{ event: NostrEvent; relay: string }>) => void
): Promise<void> {
const relay = this.relays.get(relayUrl);
if (!relay || !this.checkAndCleanupRelay(relayUrl)) {
return;
}
const subId = `sub_${this.nextSubId++}_${Date.now()}`;
let resolved = false;
let timeoutId: ReturnType<typeof setTimeout> | null = null;
let hasAuthed = this.authenticatedRelays.has(relayUrl);
let authInProgress = false;
let relayRequiresAuth = false;
let seenEventIds = new Set<string>(); // Track seen event IDs for deduplication
// Create a promise that resolves when the subscription finishes
let finishResolve: (() => void) | null = null;
const finishPromise = new Promise<void>((resolve) => {
finishResolve = resolve;
});
const finish = () => {
if (resolved) return;
resolved = true;
if (timeoutId) clearTimeout(timeoutId);
this.unsubscribe(subId);
if (finishResolve) {
finishResolve();
}
};
// Simple timeout - single timeout per fetch
timeoutId = setTimeout(() => {
if (!resolved) {
// Request timeout
finish();
}
}, timeout);
// Check if relay requires auth from NIP-11 metadata (async, but we'll check it)
const checkRelayAuthRequirement = async () => {
if (!relayRequiresAuth && !hasAuthed) {
// Check cache first
const cached = this.nip11MetadataCache.get(relayUrl);
if (cached && (Date.now() - cached.cachedAt) < this.NIP11_CACHE_TTL) {
relayRequiresAuth = cached.requiresAuth;
return;
}
try {
const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
return match === 'wss://' ? 'https://' : 'http://';
});
const nip11Url = `${httpUrl}/.well-known/nostr.json`;
const response = await fetch(nip11Url, {
method: 'GET',
headers: { 'Accept': 'application/nostr+json' },
signal: AbortSignal.timeout(2000) // 2 second timeout for metadata fetch
});
if (response.ok) {
const metadata = await response.json();
const requiresAuth = !!metadata?.limitation?.auth_required;
relayRequiresAuth = requiresAuth;
// Cache the result
this.nip11MetadataCache.set(relayUrl, { requiresAuth, cachedAt: Date.now() });
}
} catch (error) {
// Metadata fetch failed - that's okay, we'll proceed normally
// Cache negative result to avoid repeated failed requests
this.nip11MetadataCache.set(relayUrl, { requiresAuth: false, cachedAt: Date.now() });
}
}
};
// Check auth requirement in background (don't wait for it)
checkRelayAuthRequirement();
const startSub = () => {
try {
// Check relay status before subscribing
if (!this.checkAndCleanupRelay(relayUrl)) {
finish();
return;
}
const client = this;
// Re-check authentication state right before subscribing (it might have changed)
hasAuthed = client.authenticatedRelays.has(relayUrl);
const sub = relay.subscribe(filters, {
onevent: (event: NostrEvent) => {
try {
if (!client.relays.has(relayUrl) || resolved) return;
if (shouldHideEvent(event)) return;
// Deduplicate events
if (seenEventIds.has(event.id)) return;
seenEventIds.add(event.id);
// Update subscription activity timestamp
const subEntry = client.subscriptions.get(`${relayUrl}_${subId}`);
if (subEntry) {
subEntry.lastActivity = Date.now();
}
// Add to events Map for return value
events.set(event.id, event);
// Track in memory manager (for monitoring)
memoryManager.trackEvent(event);
// Cache the event
client.addToCache(event);
// Check memory usage and cleanup if needed (aggressive limits)
const stats = memoryManager.getStats();
if (stats.totalSizeMB > 50) {
// If over 50MB, cleanup oldest events to get back to 25MB
const cleanedIds = memoryManager.cleanupOldEvents(25 * 1024 * 1024);
// Actually remove cleaned events from the Map to free memory
if (cleanedIds.length > 0) {
for (const id of cleanedIds) {
events.delete(id);
}
// Memory cleanup completed
}
}
// Also limit Map size per-request to prevent unbounded growth
if (events.size > 500) {
const sorted = Array.from(events.entries())
.sort((a, b) => a[1].created_at - b[1].created_at); // Oldest first
const toRemove = sorted.slice(0, events.size - 500);
for (const [id] of toRemove) {
events.delete(id);
memoryManager.untrackEvent(id);
}
}
// Stream event directly to onUpdate callback immediately
if ((onUpdate || onUpdateWithRelay) && !resolved) {
try {
const filtered = filterEvents([event]);
if (filtered.length > 0) {
// Call onUpdate if provided (backward compatible)
if (onUpdate) {
onUpdate(filtered);
}
// Call onUpdateWithRelay if provided (includes relay info)
if (onUpdateWithRelay) {
onUpdateWithRelay(filtered.map(e => ({ event: e, relay: relayUrl })));
}
}
} catch (error) {
// Ignore errors from onUpdate callback
}
}
} catch (error) {
// Silently handle errors - connection may be closed
}
},
oneose: () => {
try {
if (resolved) return;
// If we have no events and haven't authenticated, try to authenticate if relay requires it
if (events.size === 0 && !hasAuthed && relayRequiresAuth) {
const session = sessionManager.getSession();
if (session && !authInProgress) {
authInProgress = true;
// Relay requires auth, attempting authentication
client.authenticateRelay(relay, relayUrl)
.then((authSuccess) => {
authInProgress = false;
if (authSuccess) {
hasAuthed = true;
// Successfully authenticated, retrying subscription
if (!resolved) {
startSub(); // Retry subscription after authentication
}
} else {
// Authentication failed (expected for some relays)
finish();
}
})
.catch((error) => {
authInProgress = false;
// Authentication error (non-critical)
finish();
});
return; // Don't finish yet, wait for auth
}
}
// Simple finish on EOSE - no complex timeout chains
// EOSE received - subscription complete
finish();
} catch (error) {
// Silently handle errors - connection may be closed
}
},
onclose: (reason: string) => {
// Handle auth-required on subscription close
if (reason.startsWith('auth-required') && !hasAuthed && !resolved) {
const session = sessionManager.getSession();
if (session) {
authInProgress = true;
// Relay requires authentication, authenticating
client.authenticateRelay(relay, relayUrl)
.then((authSuccess) => {
authInProgress = false;
if (authSuccess) {
hasAuthed = true;
// Successfully authenticated, retrying subscription
if (!resolved) {
startSub();
}
} else {
// Authentication failed (non-critical)
finish();
}
})
.catch((error) => {
authInProgress = false;
console.debug(`[nostr-client] Authentication error for ${relayUrl}:`, error);
finish();
});
} else {
console.debug(`[nostr-client] Relay ${relayUrl} requires authentication but user is not logged in`);
finish();
}
} else if (!resolved) {
// If subscription closed for other reasons, finish
finish();
}
}
});
client.subscriptions.set(`${relayUrl}_${subId}`, { relay, sub, lastActivity: Date.now() });
} catch (error) {
// Handle SendingOnClosedConnection and other errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${relayUrl} connection closed, removing from active relays`);
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
}
finish();
}
};
startSub();
// Wait for the subscription to finish before returning
await finishPromise;
}
private processQueue(): void {
if (this.processingQueue || this.requestQueue.length === 0) return;
this.processingQueue = true;
const next = this.requestQueue.shift();
if (next) {
next();
}
this.processingQueue = false;
}
/**
* Extract caller information from stack trace for logging
* Returns a short identifier like "topics/[name]/+page.svelte" or "FeedPage.svelte"
*/
private getCallerInfo(): string {
try {
const stack = new Error().stack;
if (!stack) return 'unknown';
// Skip the first 3 lines: Error, getCallerInfo, fetchEvents
const lines = stack.split('\n').slice(3);
for (const line of lines) {
// Look for file paths in the stack trace
// Match various patterns:
// - at functionName (file:///path/to/file.svelte:123:45)
// - at functionName (/path/to/file.ts:123:45)
// - @file:///path/to/file.svelte:123:45
// - file:///path/to/file.svelte:123:45
let filePath: string | null = null;
// Try different patterns
const patterns = [
/(?:\(|@)(file:\/\/\/[^:)]+\.(?:svelte|ts|js))/i, // file:///path/to/file.svelte
/(?:\(|@)(\/[^:)]+\.(?:svelte|ts|js))/i, // /path/to/file.svelte
/(?:\(|@)([^:)]+src\/[^:)]+\.(?:svelte|ts|js))/i, // ...src/path/to/file.svelte
/([^:)]+src\/[^:)]+\.(?:svelte|ts|js))/i, // ...src/path/to/file.svelte (no parens)
];
for (const pattern of patterns) {
const match = line.match(pattern);
if (match) {
filePath = match[1];
break;
}
}
if (filePath) {
// Normalize file:/// paths
if (filePath.startsWith('file:///')) {
filePath = filePath.replace(/^file:\/\/\//, '/');
}
// Extract just the relevant part of the path
const parts = filePath.split('/');
const srcIndex = parts.indexOf('src');
if (srcIndex !== -1) {
// Get everything after 'src'
const relevantParts = parts.slice(srcIndex + 1);
// Prefer showing route/page names
if (relevantParts[0] === 'routes') {
// Show up to the page file
const pageIndex = relevantParts.findIndex(p => p.startsWith('+page') || p.startsWith('+layout'));
if (pageIndex !== -1) {
return relevantParts.slice(0, pageIndex + 1).join('/');
}
return relevantParts.slice(0, 3).join('/'); // routes/topics/[name]/+page.svelte
}
// For lib files, show component/service name
if (relevantParts[0] === 'lib') {
const fileName = relevantParts[relevantParts.length - 1];
// Show parent directory if it's a component
if (relevantParts.includes('components') || relevantParts.includes('modules')) {
const componentIndex = relevantParts.findIndex(p => p === 'components' || p === 'modules');
if (componentIndex !== -1) {
return relevantParts.slice(componentIndex + 1).join('/');
}
}
return fileName;
}
return relevantParts.join('/');
}
}
}
} catch (e) {
// Ignore errors in stack trace parsing
}
return 'unknown';
}
/**
* Create a human-readable description of a filter for logging
*/
private describeFilter(filter: Filter): string {
const parts: string[] = [];
if (filter.kinds && filter.kinds.length > 0) {
parts.push(`kind${filter.kinds.length > 1 ? 's' : ''}:${filter.kinds.join(',')}`);
}
if (filter.ids && filter.ids.length > 0) {
parts.push(`id${filter.ids.length > 1 ? 's' : ''}:${filter.ids.length}`);
}
if (filter.authors && filter.authors.length > 0) {
parts.push(`author${filter.authors.length > 1 ? 's' : ''}:${filter.authors.length}`);
}
if (filter['#e'] && filter['#e'].length > 0) {
parts.push(`#e:${filter['#e'].length}`);
}
if (filter['#p'] && filter['#p'].length > 0) {
parts.push(`#p:${filter['#p'].length}`);
}
if (filter.limit) {
parts.push(`limit:${filter.limit}`);
}
return parts.length > 0 ? parts.join(' ') : 'empty filter';
}
async fetchEvents(
filters: Filter[],
relays: string[],
options: FetchOptions = {}
): Promise<NostrEvent[]> {
const { cacheResults = true, onUpdate, timeout = 10000, priority = 'medium', caller: providedCaller } = options;
const caller = providedCaller || this.getCallerInfo();
// Normalize useCache to a string strategy
let useCacheValue: 'cache-first' | 'relay-first' | false;
const useCacheOption = options.useCache ?? true;
// Normalize boolean to string for consistency
if (useCacheOption === true) {
useCacheValue = 'cache-first';
} else if (useCacheOption === false) {
useCacheValue = false;
} else {
useCacheValue = useCacheOption; // Already a string
}
// Determine if we should use cache at all
const shouldUseCache = useCacheValue !== false;
const isCacheFirst = useCacheValue === 'cache-first';
// Create a key for this fetch to prevent duplicates
const fetchKey = JSON.stringify({
filters,
relays: relays.sort()
});
// Create filter description for logging
const filterDesc = filters.length === 1
? this.describeFilter(filters[0])
: filters.map(f => this.describeFilter(f)).join(' | ');
// Create relay description for logging (show count and first few)
const relayDesc = relays.length <= 3
? relays.join(', ')
: `${relays.length} relays (${relays.slice(0, 2).join(', ')}, ...)`;
const activeFetch = this.activeFetches.get(fetchKey);
if (activeFetch) {
console.debug(`[nostr-client] Deduplicating fetch [${filterDesc}] from [${relayDesc}] - already in progress`);
return activeFetch;
}
// Check if we recently got an empty result for this exact fetch
// This prevents repeated fetches of non-existent data
// Use a key based on filters (with actual IDs) but not relays, so different relay sets can share cache
// Create a stable key from filters that includes actual event IDs, not just counts
const filterKey = JSON.stringify(filters.map(f => ({
kinds: f.kinds,
ids: f.ids,
authors: f.authors,
'#e': f['#e'],
'#p': f['#p'],
limit: f.limit
})));
const emptyCacheKey = filterKey;
// Check and set pending flag atomically to prevent race conditions
const emptyCacheEntry = this.emptyResultCache.get(emptyCacheKey);
if (emptyCacheEntry) {
const age = Date.now() - emptyCacheEntry.cachedAt;
if (emptyCacheEntry.pending && age < this.PENDING_FETCH_TTL) {
// Another fetch for this is in progress, wait for it
console.log(`[nostr-client] Waiting for pending fetch [${filterDesc}] from [${relayDesc}] - another fetch in progress`);
// Wait and check multiple times (up to 2 seconds)
for (let i = 0; i < 4; i++) {
await new Promise(resolve => setTimeout(resolve, 500));
const updatedEntry = this.emptyResultCache.get(emptyCacheKey);
if (updatedEntry && !updatedEntry.pending) {
if ((Date.now() - updatedEntry.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const finalAge = Math.round((Date.now() - updatedEntry.cachedAt) / 1000);
console.debug(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${finalAge}s ago (waited for pending) [caller: ${caller}]`);
return [];
}
break; // No longer pending, but result expired or had data
}
if (!updatedEntry || !updatedEntry.pending) {
break; // No longer pending
}
}
// If still pending after waiting, proceed (might be a slow fetch)
} else if (!emptyCacheEntry.pending && age < this.EMPTY_RESULT_CACHE_TTL) {
const ageSeconds = Math.round(age / 1000);
// Use debug level to reduce console noise - this is expected behavior
console.debug(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${ageSeconds}s ago [caller: ${caller}]`);
return Promise.resolve([]);
}
}
// Atomically check and set pending flag - if another fetch just set it, wait
const existingEntry = this.emptyResultCache.get(emptyCacheKey);
if (existingEntry?.pending && (Date.now() - existingEntry.cachedAt) < 1000) {
// Very recent pending entry, wait a bit
await new Promise(resolve => setTimeout(resolve, 200));
const recheck = this.emptyResultCache.get(emptyCacheKey);
if (recheck && !recheck.pending && (Date.now() - recheck.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const finalAge = Math.round((Date.now() - recheck.cachedAt) / 1000);
console.debug(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${finalAge}s ago (waited for concurrent) [caller: ${caller}]`);
return [];
}
}
// Mark this fetch as pending
this.emptyResultCache.set(emptyCacheKey, { cachedAt: Date.now(), pending: true });
// Create the main fetch promise
const relayTimeout = timeout || 10000; // Default 10s timeout
const fetchPromise = (async () => {
// Check cache first if strategy is 'cache-first' (default behavior)
// This provides instant page loads from cache, then enhances with fresh relay data
if (isCacheFirst) {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
// Return cached data immediately for fast page loads
console.log(`[nostr-client] Fetch complete: ${cachedEvents.length} from cache (instant), fetching from relays in background [${filterDesc}] from [${relayDesc}]`);
// Notify about cached events via onUpdateWithRelay callback (if provided)
// Use "cache" as the relay identifier for cached events
if (options.onUpdateWithRelay) {
try {
options.onUpdateWithRelay(cachedEvents.map(event => ({ event, relay: 'cache' })));
} catch (error) {
// Ignore errors from onUpdateWithRelay callback
console.debug('[nostr-client] Error in onUpdateWithRelay callback for cached events:', error);
}
}
// Also call regular onUpdate callback for backward compatibility
if (onUpdate) {
try {
onUpdate(cachedEvents);
} catch (error) {
// Ignore errors from onUpdate callback
console.debug('[nostr-client] Error in onUpdate callback for cached events:', error);
}
}
// Fetch from relays in background to enhance/update results
// Don't await this - let it run in background
this.fetchFromRelays(filters, relays, {
cacheResults: cacheResults,
onUpdate: (freshEvents) => {
// Merge fresh events with cached events via onUpdate callback
if (onUpdate) {
onUpdate(freshEvents);
}
},
onUpdateWithRelay: options.onUpdateWithRelay,
timeout: relayTimeout,
priority: options.priority
}).then((freshEvents) => {
// Log when background fetch completes
if (freshEvents.length > 0) {
console.log(`[nostr-client] Background fetch complete: ${freshEvents.length} fresh events from relays [${filterDesc}] from [${relayDesc}]`);
}
}).catch(() => {
// Silently fail - background fetch is optional
});
// Clear pending flag since we got results
const currentEntry = this.emptyResultCache.get(emptyCacheKey);
if (currentEntry?.pending) {
this.emptyResultCache.delete(emptyCacheKey);
}
return cachedEvents;
}
} catch (error) {
console.debug('[nostr-client] Error querying cache first, falling back to relays:', error);
// Continue to relay fetch below
}
}
// Relay-first mode: query relays first with timeout, then fill from cache if needed
// For single relay queries, ensure connection is established and authenticated first
if (relays.length === 1) {
const relayUrl = relays[0];
if (!this.relays.has(relayUrl)) {
// Try to connect first
await this.addRelay(relayUrl);
// Wait for connection to establish (up to 2 seconds)
let attempts = 0;
const maxAttempts = 4; // 2 seconds max
while (attempts < maxAttempts && !this.relays.has(relayUrl)) {
await new Promise(resolve => setTimeout(resolve, 500));
attempts++;
}
// If still not connected, try one more time
if (!this.relays.has(relayUrl)) {
await this.addRelay(relayUrl);
await new Promise(resolve => setTimeout(resolve, 500));
}
}
// Check if relay requires authentication from NIP-11 metadata
// If it does, authenticate proactively before starting the subscription
const relay = this.relays.get(relayUrl);
if (relay && !this.authenticatedRelays.has(relayUrl)) {
try {
const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
return match === 'wss://' ? 'https://' : 'http://';
});
const nip11Url = `${httpUrl}/.well-known/nostr.json`;
const response = await fetch(nip11Url, {
method: 'GET',
headers: { 'Accept': 'application/nostr+json' },
signal: AbortSignal.timeout(3000) // 3 second timeout for metadata fetch
});
if (response.ok) {
const metadata = await response.json();
const requiresAuth = metadata?.limitation?.auth_required === true;
// Cache the metadata
this.nip11MetadataCache.set(relayUrl, {
requiresAuth,
cachedAt: Date.now()
});
if (requiresAuth) {
console.debug(`[nostr-client] Relay ${relayUrl} requires authentication (from NIP-11), authenticating before subscription...`);
const session = sessionManager.getSession();
if (session) {
// Try to authenticate proactively - wait for it to complete before starting subscription
try {
const authSuccess = await this.authenticateRelay(relay, relayUrl);
if (authSuccess) {
console.debug(`[nostr-client] Successfully authenticated with ${relayUrl} proactively, starting subscription`);
} else {
console.debug(`[nostr-client] Proactive auth not available for ${relayUrl}, will authenticate on challenge`);
}
} catch (error) {
// Auth might fail if relay doesn't send challenge - that's okay
console.debug(`[nostr-client] Proactive auth attempt for ${relayUrl} failed (will try on challenge):`, error);
}
} else {
// Don't log this - it's expected when not logged in and we'll skip these relays now
}
}
}
} catch (error) {
// Metadata fetch failed - that's okay, we'll proceed normally
console.debug(`[nostr-client] Could not fetch NIP-11 metadata for ${relayUrl}:`, error);
}
}
}
// Check empty result cache again (in case another concurrent fetch already completed)
const emptyCacheEntry2 = this.emptyResultCache.get(emptyCacheKey);
if (emptyCacheEntry2 && !emptyCacheEntry2.pending && (Date.now() - emptyCacheEntry2.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const age = Math.round((Date.now() - emptyCacheEntry2.cachedAt) / 1000);
console.log(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${age}s ago (checked during fetch) [caller: ${caller}]`);
// Clear pending flag if it was set
this.emptyResultCache.delete(emptyCacheKey);
return [];
}
// Query relays first with timeout
// Respect cacheResults option - don't cache if explicitly disabled
const relayEvents = await this.fetchFromRelays(filters, relays, {
cacheResults: cacheResults,
onUpdate,
onUpdateWithRelay: options.onUpdateWithRelay,
timeout: relayTimeout,
priority: options.priority
});
// Track results for composite log
let finalEvents = relayEvents;
let cacheEnhancementCount = 0;
let usedCacheFallback = false;
// If we got results from relays, return them immediately for fast display
// Then enhance with cache delta and log summary
if (relayEvents.length > 0) {
// Start cache enhancement in background, but wait for it before logging
const cacheEnhancementPromise = (async () => {
if (shouldUseCache && onUpdate) {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
// Find events in cache that aren't in relay results (delta)
const relayEventIds = new Set(relayEvents.map(e => e.id));
const cacheDelta = cachedEvents.filter(e => !relayEventIds.has(e.id));
if (cacheDelta.length > 0) {
cacheEnhancementCount = cacheDelta.length;
// Enhance results with cache delta via onUpdate callback
onUpdate(cacheDelta);
}
}
} catch (error) {
// Silently fail - cache enhancement is optional
}
}
return cacheEnhancementCount;
})();
// Wait for cache enhancement to complete, then log composite summary
cacheEnhancementPromise.then((enhancementCount) => {
// Track fetch patterns
const patternKey = `${filterDesc}`;
const pattern = this.fetchPatterns.get(patternKey) || { count: 0, lastFetch: 0, totalEvents: 0 };
pattern.count++;
pattern.lastFetch = Date.now();
pattern.totalEvents += relayEvents.length + enhancementCount;
this.fetchPatterns.set(patternKey, pattern);
const summary = enhancementCount > 0
? `[nostr-client] Fetch complete: ${relayEvents.length} from relays, ${enhancementCount} from cache (enhanced) [${filterDesc}] from [${relayDesc}]`
: `[nostr-client] Fetch complete: ${relayEvents.length} events from relays [${filterDesc}] from [${relayDesc}]`;
console.log(summary);
});
return relayEvents;
}
// If no results from relays, try to fill from cache (only if cache is enabled)
// IMPORTANT: In single-relay mode, useCache should be false to avoid showing events from other relays
if (shouldUseCache) {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
usedCacheFallback = true;
finalEvents = cachedEvents;
console.debug(`[nostr-client] Using ${cachedEvents.length} cached events (relays returned 0)`);
} else {
console.debug(`[nostr-client] No cached events available, returning empty result`);
}
} catch (error) {
console.error('[nostr-client] Error querying cache:', error);
}
}
// Track fetch patterns for analysis
const patternKey = `${filterDesc}`;
const pattern = this.fetchPatterns.get(patternKey) || { count: 0, lastFetch: 0, totalEvents: 0 };
pattern.count++;
pattern.lastFetch = Date.now();
pattern.totalEvents += finalEvents.length;
this.fetchPatterns.set(patternKey, pattern);
// Cache empty results to prevent repeated fetches of non-existent data
if (finalEvents.length === 0 && !usedCacheFallback) {
const wasAlreadyCached = this.emptyResultCache.has(emptyCacheKey) && !this.emptyResultCache.get(emptyCacheKey)?.pending;
this.emptyResultCache.set(emptyCacheKey, { cachedAt: Date.now(), pending: false });
// Only log if this is a new cache entry (not updating an existing one)
if (!wasAlreadyCached) {
console.log(`[nostr-client] Cached empty result for [${filterDesc}] - will skip similar fetches for 30s`);
}
} else {
// Clear pending flag if we got results
const currentEntry = this.emptyResultCache.get(emptyCacheKey);
if (currentEntry?.pending) {
this.emptyResultCache.delete(emptyCacheKey);
}
}
// Log composite summary
if (usedCacheFallback) {
console.log(`[nostr-client] Fetch complete: 0 from relays, ${finalEvents.length} from cache (fallback) [${filterDesc}] from [${relayDesc}]`);
} else if (finalEvents.length === 0) {
// Only log 0-event fetches if they're repeated many times (likely a problem)
if (pattern.count > 5 && pattern.totalEvents === 0) {
console.warn(`[nostr-client] Repeated empty fetch (${pattern.count}x): [${filterDesc}] from [${relayDesc}] - consider caching or skipping`);
} else {
console.log(`[nostr-client] Fetch complete: 0 events (relays returned 0, no cache available) [${filterDesc}] from [${relayDesc}]`);
}
}
return finalEvents;
})();
this.activeFetches.set(fetchKey, fetchPromise);
fetchPromise.finally(() => {
this.activeFetches.delete(fetchKey);
});
return fetchPromise;
}
private async fetchFromRelays(
filters: Filter[],
relays: string[],
options: { cacheResults: boolean; onUpdate?: (events: NostrEvent[]) => void; onUpdateWithRelay?: (events: Array<{ event: NostrEvent; relay: string }>) => void; timeout: number; priority?: 'high' | 'medium' | 'low' }
): Promise<NostrEvent[]> {
const timeout = options.timeout || config.relayTimeout;
// Check if user is logged in
const session = sessionManager.getSession();
const isLoggedIn = !!session;
// Filter out relays that have failed recently or permanently
// Also filter out auth-required relays if user is not logged in
const now = Date.now();
const availableRelays = await Promise.all(
relays.map(async (url) => {
if (this.relays.has(url)) return { url, available: true }; // Already connected
const failureInfo = this.failedRelays.get(url);
if (failureInfo) {
// Skip permanently failed relays
if (failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
return { url, available: false };
}
// Skip relays that failed recently (still in backoff period)
const timeSinceFailure = now - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
return { url, available: false };
}
}
// If not logged in, check if relay requires auth and skip it
if (!isLoggedIn) {
const requiresAuth = await this.checkRelayRequiresAuth(url);
if (requiresAuth === true) {
return { url, available: false }; // Skip auth-required relay
}
}
return { url, available: true };
})
);
const filteredRelays = availableRelays
.filter(r => r.available)
.map(r => r.url);
// Try to connect to relays that aren't already connected
// Like jumble, we gracefully handle failures - addRelay doesn't throw, it just doesn't add failed relays
const relaysToConnect = filteredRelays.filter(url => !this.relays.has(url));
if (relaysToConnect.length > 0) {
if (filteredRelays.length === 1) {
console.log(`[nostr-client] Attempting to connect to relay ${relaysToConnect[0]}...`);
}
await Promise.allSettled(
relaysToConnect.map(url => this.addRelay(url))
);
// For single relay, wait for connection to actually establish
if (filteredRelays.length === 1 && relaysToConnect.length > 0) {
const relayUrl = relaysToConnect[0];
let attempts = 0;
const maxAttempts = 6; // Wait up to 3 seconds (6 * 500ms) for connection
while (attempts < maxAttempts) {
const relay = this.relays.get(relayUrl);
if (relay) {
try {
const ws = (relay as any).ws;
if (ws && ws.readyState === WebSocket.OPEN) {
break; // Connection is open, proceed
}
} catch {
// Ignore errors checking WebSocket
}
}
await new Promise(resolve => setTimeout(resolve, 500));
attempts++;
}
}
}
// Get list of actually connected relays
const connectedRelays = filteredRelays.filter(url => this.relays.has(url));
if (connectedRelays.length === 0) {
// Log at warn level for single relay queries (more important to know about failures)
const logLevel = filteredRelays.length === 1 ? 'warn' : 'debug';
const skippedCount = relays.length - filteredRelays.length;
const skipReason = !isLoggedIn && skippedCount > 0 ? ` (${skippedCount} skipped: auth required)` : '';
const message = `[nostr-client] No connected relays available for fetch (${relays.length} requested, ${filteredRelays.length} available${skipReason}, all failed or unavailable)`;
if (logLevel === 'warn') {
console.warn(message);
// For single relay, also log which relays were attempted and failure info
if (filteredRelays.length === 1) {
const failureInfo = this.failedRelays.get(relays[0]);
if (failureInfo) {
console.warn(`[nostr-client] Relay ${relays[0]} failure info:`, failureInfo);
}
if (relaysToConnect.length > 0) {
console.warn(`[nostr-client] Attempted to connect to: ${relaysToConnect.join(', ')}`);
}
console.warn(`[nostr-client] Available relays (after filtering): ${filteredRelays.join(', ')}`);
console.warn(`[nostr-client] Currently connected relays: ${Array.from(this.relays.keys()).join(', ')}`);
}
} else {
console.debug(message);
}
return [];
}
// Log connection status for single relay queries
if (relays.length === 1 && connectedRelays.length === 1) {
console.log(`[nostr-client] Successfully connected to relay ${relays[0]}, fetching events...`);
} else if (connectedRelays.length < filteredRelays.length * 0.5 || filteredRelays.length < relays.length) {
const skippedCount = relays.length - filteredRelays.length;
const skipReason = !isLoggedIn && skippedCount > 0 ? ` (${skippedCount} skipped: auth required)` : '';
console.debug(`[nostr-client] Fetching from ${connectedRelays.length} connected relay(s) out of ${filteredRelays.length} available${skipReason} (${relays.length} requested)`);
}
// Log connection status for single relay queries
if (relays.length === 1 && connectedRelays.length === 1) {
console.log(`[nostr-client] Successfully connected to relay ${relays[0]}, fetching events...`);
}
// Process relays sequentially with throttling to avoid overload
// High priority requests get processed faster
const events: Map<string, NostrEvent> = new Map();
const priority = options.priority || 'medium';
const delayBetweenRelays = priority === 'high' ? 10 : 100;
// Limit events Map size to prevent memory bloat (keep only most recent 1000 events)
const MAX_EVENTS_IN_MAP = 1000;
for (const relayUrl of connectedRelays) {
await this.throttledRelayRequest(relayUrl, filters, events, timeout, options.onUpdate, options.onUpdateWithRelay, priority);
// Limit Map size - remove oldest if over limit
if (events.size > MAX_EVENTS_IN_MAP) {
const sorted = Array.from(events.entries())
.sort((a, b) => a[1].created_at - b[1].created_at); // Oldest first
const toRemove = sorted.slice(0, events.size - MAX_EVENTS_IN_MAP);
for (const [id] of toRemove) {
events.delete(id);
memoryManager.untrackEvent(id);
}
}
// Small delay between relays (shorter for high priority)
await new Promise(resolve => setTimeout(resolve, delayBetweenRelays));
}
const eventArray = Array.from(events.values());
const filtered = filterEvents(eventArray);
// Clear events Map after processing to free memory
events.clear();
if (options.cacheResults && filtered.length > 0) {
cacheEvents(filtered).catch(() => {
// Silently fail
});
}
// Only call onUpdate if we got new events AND onUpdate is provided
// This prevents clearing the UI when background fetch returns fewer results
if (options.onUpdate && filtered.length > 0) {
// Calling onUpdate with fetched events
options.onUpdate(filtered);
} else if (options.onUpdate && filtered.length === 0) {
// No new events, skipping onUpdate
} else if (!options.onUpdate) {
// Only log background refreshes that return events, not empty results
if (filtered.length > 0) {
// Background refresh completed
}
}
return filtered;
}
async getEventById(id: string, relays: string[]): Promise<NostrEvent | null> {
try {
const dbEvent = await getEvent(id);
if (dbEvent) return dbEvent;
} catch (error) {
// Continue to fetch from relays
}
const filters: Filter[] = [{ ids: [id] }];
const events = await this.fetchEvents(filters, relays, { useCache: false });
return events[0] || null;
}
async getByFilters(filters: Filter[]): Promise<NostrEvent[]> {
return this.getCachedEvents(filters);
}
getConfig() {
return config;
}
getConnectedRelays(): string[] {
return Array.from(this.relays.keys());
}
close(): void {
// Stop subscription cleanup interval
if (this.subscriptionCleanupInterval) {
clearInterval(this.subscriptionCleanupInterval);
this.subscriptionCleanupInterval = null;
}
// Close all subscriptions
for (const { sub } of this.subscriptions.values()) {
try {
sub.close();
} catch (error) {
// Ignore errors
}
}
this.subscriptions.clear();
// Close all relay connections
for (const relay of this.relays.values()) {
try {
relay.close();
} catch (error) {
// Ignore errors
}
}
this.relays.clear();
// Clear memory tracking
memoryManager.clear();
this.initialized = false;
}
}
export const nostrClient = new NostrClient();