You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

1089 lines
42 KiB

/**
* Nostr client - optimized for low bandwidth and efficiency
* Features: request throttling, batching, rate limiting, efficient caching
*/
import { Relay, type Filter, matchFilter, type EventTemplate, type VerifiedEvent } from 'nostr-tools';
import { config } from './config.js';
import type { NostrEvent } from '../../types/nostr.js';
import { cacheEvent, cacheEvents, getEvent, getEventsByKind, getEventsByPubkey } from '../cache/event-cache.js';
import { getDB } from '../cache/indexeddb-store.js';
import { filterEvents, shouldHideEvent } from '../event-filter.js';
import { sessionManager } from '../auth/session-manager.js';
import { KIND } from '../../types/kind-lookup.js';
export interface PublishOptions {
relays?: string[];
skipRelayValidation?: boolean;
}
interface FetchOptions {
useCache?: boolean;
cacheResults?: boolean;
onUpdate?: (events: NostrEvent[]) => void;
timeout?: number;
}
class NostrClient {
private initialized = false;
private relays: Map<string, Relay> = new Map();
private subscriptions: Map<string, { relay: Relay; sub: any }> = new Map();
private nextSubId = 1;
private activeFetches: Map<string, Promise<NostrEvent[]>> = new Map();
// Rate limiting and throttling
private requestQueue: Array<() => void> = [];
private processingQueue = false;
private lastRequestTime: Map<string, number> = new Map(); // relay -> timestamp
private activeRequestsPerRelay: Map<string, number> = new Map();
private readonly MIN_REQUEST_INTERVAL = 200; // 200ms between requests to same relay
private readonly MAX_CONCURRENT_PER_RELAY = 1; // Only 1 concurrent request per relay
private readonly MAX_CONCURRENT_TOTAL = 3; // Max 3 total concurrent requests
private totalActiveRequests = 0;
// Track background refresh operations to prevent duplicates
private backgroundRefreshes: Set<string> = new Set();
// Failed relay tracking with exponential backoff
private failedRelays: Map<string, { lastFailure: number; retryAfter: number; failureCount: number }> = new Map();
private readonly INITIAL_RETRY_DELAY = 5000; // 5 seconds
private readonly MAX_RETRY_DELAY = 300000; // 5 minutes
private readonly MAX_FAILURE_COUNT = 10; // After 10 failures, wait max delay
private readonly PERMANENT_FAILURE_THRESHOLD = 20; // After 20 failures, skip relay for this session
// Track authenticated relays to avoid re-authenticating
private authenticatedRelays: Set<string> = new Set();
async initialize(): Promise<void> {
if (this.initialized) return;
// Connect to default relays with timeout
const connectionPromises = config.defaultRelays.map(async (url) => {
try {
await Promise.race([
this.addRelay(url),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Connection timeout')), 10000)
)
]);
} catch (error) {
// Silently fail - we'll retry later if needed
}
});
await Promise.allSettled(connectionPromises);
this.initialized = true;
}
/**
* Authenticate with a relay using NIP-42 AUTH
* Only call this when the relay has sent an AUTH challenge or an operation failed with auth-required
*/
private async authenticateRelay(relay: Relay, url: string): Promise<boolean> {
// Check if we're already authenticated
if (this.authenticatedRelays.has(url)) {
return true;
}
// Check if user is logged in
const session = sessionManager.getSession();
if (!session) {
console.debug(`[nostr-client] Cannot authenticate with ${url}: user not logged in`);
return false;
}
try {
// Check if relay supports AUTH by checking for auth method
// nostr-tools Relay.auth() will handle the AUTH flow
// Only call this when relay has sent a challenge (handled by nostr-tools)
if (typeof (relay as any).auth === 'function') {
await relay.auth(async (authEvent: EventTemplate): Promise<VerifiedEvent> => {
// Ensure authEvent has required fields
const eventToSign: Omit<NostrEvent, 'sig' | 'id'> = {
pubkey: session.pubkey,
created_at: authEvent.created_at,
kind: authEvent.kind,
tags: authEvent.tags || [],
content: authEvent.content || ''
};
// Sign the auth event using the session signer
const signedEvent = await session.signer(eventToSign);
// Return as VerifiedEvent (nostr-tools will verify the signature)
return signedEvent as VerifiedEvent;
});
this.authenticatedRelays.add(url);
console.debug(`[nostr-client] Successfully authenticated with relay: ${url}`);
return true;
}
} catch (error) {
// Only log if it's not the "no challenge" error (which is expected for relays that don't require auth)
const errorMessage = error instanceof Error ? error.message : String(error);
if (!errorMessage.includes('no challenge was received')) {
console.debug(`[nostr-client] Failed to authenticate with relay ${url}:`, errorMessage);
}
return false;
}
return false;
}
async addRelay(url: string): Promise<void> {
if (this.relays.has(url)) return;
// Check if this relay has failed too many times - skip permanently for this session
const failureInfo = this.failedRelays.get(url);
if (failureInfo && failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
console.debug(`[nostr-client] Relay ${url} has failed ${failureInfo.failureCount} times, skipping for this session`);
throw new Error(`Relay has failed too many times (${failureInfo.failureCount}), skipping for this session`);
}
// Check if this relay has failed recently and we should wait
if (failureInfo) {
const timeSinceFailure = Date.now() - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
const waitTime = failureInfo.retryAfter - timeSinceFailure;
console.debug(`[nostr-client] Relay ${url} failed recently, waiting ${Math.round(waitTime / 1000)}s before retry`);
throw new Error(`Relay failed recently, retry after ${Math.round(waitTime / 1000)}s`);
}
}
try {
// Use connection timeout similar to jumble's approach (5 seconds)
const relay = await Promise.race([
Relay.connect(url),
new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('Connection timeout')), 5000)
)
]);
this.relays.set(url, relay);
// Add connection close handler to automatically clean up closed relays
try {
const ws = (relay as any).ws;
if (ws) {
ws.addEventListener('close', () => {
console.debug(`[nostr-client] Relay ${url} connection closed, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
});
}
} catch (error) {
// Ignore errors accessing WebSocket
}
// Don't proactively authenticate - only authenticate when:
// 1. Relay sends an AUTH challenge (handled by nostr-tools automatically)
// 2. An operation fails with 'auth-required' error (handled in publish/subscribe methods)
// Clear failure tracking on successful connection
this.failedRelays.delete(url);
// Log successful connection at debug level to reduce console noise
console.debug(`[nostr-client] Successfully connected to relay: ${url}`);
} catch (error) {
// Track the failure but don't throw - allow graceful degradation like jumble
const existingFailure = this.failedRelays.get(url) || { lastFailure: 0, retryAfter: this.INITIAL_RETRY_DELAY, failureCount: 0 };
const failureCount = existingFailure.failureCount + 1;
// For connection refused errors, use longer backoff to avoid spam
const errorMessage = error instanceof Error ? error.message : String(error);
const isConnectionRefused = errorMessage.includes('CONNECTION_REFUSED') ||
errorMessage.includes('connection refused') ||
errorMessage.includes('Connection refused');
// Use longer initial delay for connection refused errors
const initialDelay = isConnectionRefused ? this.INITIAL_RETRY_DELAY * 2 : this.INITIAL_RETRY_DELAY;
const retryAfter = Math.min(
initialDelay * Math.pow(2, Math.min(failureCount - 1, 6)), // Exponential backoff, max 2^6 = 64x
this.MAX_RETRY_DELAY
);
this.failedRelays.set(url, {
lastFailure: Date.now(),
retryAfter,
failureCount
});
// Only log at debug level to reduce console noise - connection failures are expected
// Only warn if it's a persistent failure (after several attempts)
if (failureCount > 3) {
console.debug(`[nostr-client] Relay ${url} connection failed (failure #${failureCount}), will retry after ${Math.round(retryAfter / 1000)}s`);
}
// Warn if approaching permanent failure threshold
if (failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
console.warn(`[nostr-client] Relay ${url} has failed ${failureCount} times, will be skipped for this session`);
}
// Don't throw - allow graceful degradation like jumble does
// The caller can check if relay was added by checking this.relays.has(url)
}
}
async removeRelay(url: string): Promise<void> {
const relay = this.relays.get(url);
if (relay) {
try {
relay.close();
} catch (error) {
// Ignore
}
this.relays.delete(url);
// Remove from authenticated relays when disconnecting
this.authenticatedRelays.delete(url);
}
}
private checkAndCleanupRelay(relayUrl: string): boolean {
const relay = this.relays.get(relayUrl);
if (!relay) return false;
// Check relay connection status
// Status values: 0 = CONNECTING, 1 = OPEN, 2 = CLOSING, 3 = CLOSED
const status = (relay as any).status;
if (status === 3 || status === 2) {
// Relay is closed or closing, remove it
console.debug(`[nostr-client] Relay ${relayUrl} is closed (status: ${status}), removing from active relays`);
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
return false;
}
// Check if relay has a connection property and if it's closed
try {
const ws = (relay as any).ws;
if (ws && (ws.readyState === WebSocket.CLOSED || ws.readyState === WebSocket.CLOSING)) {
console.debug(`[nostr-client] Relay ${relayUrl} WebSocket is closed, removing from active relays`);
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
return false;
}
} catch (error) {
// If we can't check the WebSocket, assume it's still valid
}
return true;
}
/**
* Get a relay instance by URL
* Will connect if not already connected
* Returns null if connection fails (graceful degradation like jumble)
*/
async getRelay(url: string): Promise<Relay | null> {
// Ensure relay is connected
if (!this.relays.has(url)) {
// addRelay doesn't throw on failure, it just doesn't add the relay
await this.addRelay(url);
// Check if relay was actually added
if (!this.relays.has(url)) {
console.debug(`[nostr-client] Failed to connect to relay ${url}, skipping gracefully`);
return null;
}
}
return this.relays.get(url) || null;
}
private shouldFilterZapReceipt(event: NostrEvent): boolean {
if (event.kind !== KIND.ZAP_RECEIPT) return false;
const amountTag = event.tags.find((t) => t[0] === 'amount');
if (!amountTag || !amountTag[1]) return true;
const amount = parseInt(amountTag[1], 10);
return isNaN(amount) || amount < config.zapThreshold;
}
private addToCache(event: NostrEvent): void {
if (this.shouldFilterZapReceipt(event)) return;
cacheEvent(event).catch(() => {
// Silently fail
});
}
private async getCachedEvents(filters: Filter[]): Promise<NostrEvent[]> {
try {
const results: NostrEvent[] = [];
const seen = new Set<string>();
for (const filter of filters) {
try {
let candidateEvents: NostrEvent[] = [];
// Prioritize ID queries when IDs are specified (more efficient than querying by kind)
if (filter.ids && filter.ids.length > 0 && filter.ids.length <= 10) {
// For small number of IDs, query directly by ID
const idEvents: NostrEvent[] = [];
for (const id of filter.ids) {
const event = await getEvent(id);
if (event) {
idEvents.push(event);
}
}
candidateEvents = idEvents;
} else if (filter.kinds && filter.kinds.length > 0) {
// Query by kind(s) if specified
// If single kind, use index for efficiency
if (filter.kinds.length === 1) {
candidateEvents = await getEventsByKind(filter.kinds[0], (filter.limit || 100) * 3);
} else {
// Multiple kinds - query each and combine
const allEvents: NostrEvent[] = [];
for (const kind of filter.kinds) {
const kindEvents = await getEventsByKind(kind, (filter.limit || 100) * 3);
allEvents.push(...kindEvents);
}
candidateEvents = allEvents;
}
} else if (filter.authors && filter.authors.length > 0) {
// Query by author(s) if no kinds specified
if (filter.authors.length === 1) {
candidateEvents = await getEventsByPubkey(filter.authors[0], (filter.limit || 100) * 3);
} else {
// Multiple authors - query each and combine
const allEvents: NostrEvent[] = [];
for (const author of filter.authors) {
const authorEvents = await getEventsByPubkey(author, (filter.limit || 100) * 3);
allEvents.push(...authorEvents);
}
candidateEvents = allEvents;
}
} else {
// No specific kind or author - get recent events by created_at
// This is a fallback for broad queries
try {
const db = await getDB();
const tx = db.transaction('events', 'readonly');
const index = tx.store.index('created_at');
const events: NostrEvent[] = [];
let count = 0;
const limit = (filter.limit || 100) * 3;
// Iterate in reverse (newest first)
let cursor = await index.openCursor(null, 'prev');
while (cursor && count < limit) {
events.push(cursor.value as NostrEvent);
count++;
cursor = await cursor.continue();
}
await tx.done;
candidateEvents = events;
} catch (dbError) {
console.error('[nostr-client] Error querying IndexedDB for recent events:', dbError);
candidateEvents = [];
}
}
// Filter candidates by all filter criteria
const queriedByIds = filter.ids && filter.ids.length > 0 && filter.ids.length <= 10;
let beforeFilter = candidateEvents.length;
for (const event of candidateEvents) {
if (seen.has(event.id)) continue;
// Apply since/until filters
if (filter.since && event.created_at < filter.since) continue;
if (filter.until && event.created_at > filter.until) continue;
// Apply ids filter (skip if we already queried by IDs)
if (!queriedByIds && filter.ids && filter.ids.length > 0 && !filter.ids.includes(event.id)) continue;
// Apply authors filter (if not already used for query)
if (filter.authors && filter.authors.length > 0 && !filter.authors.includes(event.pubkey)) continue;
// Apply kinds filter (if not already used for query)
if (filter.kinds && filter.kinds.length > 0 && !filter.kinds.includes(event.kind)) continue;
// Apply tag filters (#e, #E, #p, #P, #a, #A, #d, etc.)
// Handle both uppercase and lowercase tag filters (Nostr spec allows both)
// Ignore relays that don't support the tag filter, they need to be corrected by the relay operator.
if (filter['#e'] && filter['#e'].length > 0) {
const eventTags = event.tags.filter(t => (t[0] === 'e' || t[0] === 'E') && t[1]).map(t => t[1]);
if (eventTags.length === 0 || !filter['#e'].some(id => eventTags.includes(id))) continue;
}
if (filter['#E'] && filter['#E'].length > 0) {
const eventTags = event.tags.filter(t => (t[0] === 'e' || t[0] === 'E') && t[1]).map(t => t[1]);
if (eventTags.length === 0 || !filter['#E'].some(id => eventTags.includes(id))) continue;
}
if (filter['#p'] && filter['#p'].length > 0) {
const pubkeyTags = event.tags.filter(t => (t[0] === 'p' || t[0] === 'P') && t[1]).map(t => t[1]);
if (pubkeyTags.length === 0 || !filter['#p'].some(pk => pubkeyTags.includes(pk))) continue;
}
if (filter['#P'] && filter['#P'].length > 0) {
const pubkeyTags = event.tags.filter(t => (t[0] === 'p' || t[0] === 'P') && t[1]).map(t => t[1]);
if (pubkeyTags.length === 0 || !filter['#P'].some(pk => pubkeyTags.includes(pk))) continue;
}
if (filter['#a'] && filter['#a'].length > 0) {
const aTags = event.tags.filter(t => (t[0] === 'a' || t[0] === 'A') && t[1]).map(t => t[1]);
if (aTags.length === 0 || !filter['#a'].some(a => aTags.includes(a))) continue;
}
if (filter['#A'] && filter['#A'].length > 0) {
const aTags = event.tags.filter(t => (t[0] === 'a' || t[0] === 'A') && t[1]).map(t => t[1]);
if (aTags.length === 0 || !filter['#A'].some(a => aTags.includes(a))) continue;
}
if (filter['#d'] && filter['#d'].length > 0) {
const dTags = event.tags.filter(t => (t[0] === 'd' || t[0] === 'D') && t[1]).map(t => t[1]);
if (dTags.length === 0 || !filter['#d'].some(d => dTags.includes(d))) continue;
}
if (filter['#q'] && filter['#q'].length > 0) {
const qTags = event.tags.filter(t => (t[0] === 'q' || t[0] === 'Q') && t[1]).map(t => t[1]);
if (qTags.length === 0 || !filter['#q'].some(q => qTags.includes(q))) continue;
}
if (filter['#Q'] && filter['#Q'].length > 0) {
const qTags = event.tags.filter(t => (t[0] === 'q' || t[0] === 'Q') && t[1]).map(t => t[1]);
if (qTags.length === 0 || !filter['#Q'].some(q => qTags.includes(q))) continue;
}
// Use matchFilter for final validation
if (matchFilter(filter, event)) {
results.push(event);
seen.add(event.id);
}
}
} catch (error) {
// Continue with other filters
console.error('[nostr-client] Error querying cache for filter:', error, filter);
}
}
// Sort by created_at descending and apply limit
const sorted = results.sort((a, b) => b.created_at - a.created_at);
const limit = filters[0]?.limit || 100;
const limited = sorted.slice(0, limit);
const filtered = filterEvents(limited);
// Only log cache queries at debug level to reduce console noise
// Only log if we got multiple events or if it's an interesting query
if (filtered.length > 5 || (filtered.length > 0 && limited.length > filtered.length * 2)) {
console.debug(`[nostr-client] Cache query: ${limited.length} events before filter, ${filtered.length} after filter`);
}
return filtered;
} catch (error) {
console.error('[nostr-client] Error getting cached events:', error);
return [];
}
}
async publish(event: NostrEvent, options: PublishOptions = {}): Promise<{
success: string[];
failed: Array<{ relay: string; error: string }>;
}> {
const relays = options.relays || Array.from(this.relays.keys());
const results = {
success: [] as string[],
failed: [] as Array<{ relay: string; error: string }>
};
this.addToCache(event);
for (const url of relays) {
const relay = this.relays.get(url);
if (!relay) {
// addRelay doesn't throw on failure, it just doesn't add the relay (graceful degradation like jumble)
await this.addRelay(url);
const newRelay = this.relays.get(url);
if (newRelay && this.checkAndCleanupRelay(url)) {
try {
await newRelay.publish(event);
results.success.push(url);
} catch (error) {
// Check if error is auth-required
if (error instanceof Error && error.message.startsWith('auth-required')) {
// Try to authenticate and retry
const authSuccess = await this.authenticateRelay(newRelay, url);
if (authSuccess) {
try {
await newRelay.publish(event);
results.success.push(url);
} catch (retryError) {
results.failed.push({
relay: url,
error: retryError instanceof Error ? retryError.message : 'Unknown error'
});
}
} else {
results.failed.push({
relay: url,
error: 'Authentication required but failed'
});
}
} else {
// Check if it's a closed connection error
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${url} connection closed during publish, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
results.failed.push({
relay: url,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
} else {
// Relay connection failed (addRelay didn't add it)
results.failed.push({
relay: url,
error: 'Failed to connect'
});
}
} else {
// Check relay status before publishing
if (!this.checkAndCleanupRelay(url)) {
results.failed.push({
relay: url,
error: 'Relay connection closed'
});
continue;
}
try {
await relay.publish(event);
results.success.push(url);
} catch (error) {
// Check if error is auth-required
if (error instanceof Error && error.message.startsWith('auth-required')) {
// Try to authenticate and retry
const authSuccess = await this.authenticateRelay(relay, url);
if (authSuccess) {
try {
await relay.publish(event);
results.success.push(url);
} catch (retryError) {
results.failed.push({
relay: url,
error: retryError instanceof Error ? retryError.message : 'Unknown error'
});
}
} else {
results.failed.push({
relay: url,
error: 'Authentication required but failed'
});
}
} else {
// Check if it's a closed connection error
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${url} connection closed during publish, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
results.failed.push({
relay: url,
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
}
}
return results;
}
subscribe(
filters: Filter[],
relays: string[],
onEvent: (event: NostrEvent, relay: string) => void,
onEose?: (relay: string) => void
): string {
const subId = `sub_${this.nextSubId++}_${Date.now()}`;
for (const url of relays) {
if (!this.relays.has(url)) {
// Check if relay should be skipped before attempting connection
const failureInfo = this.failedRelays.get(url);
if (failureInfo && failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
console.debug(`[nostr-client] Skipping permanently failed relay ${url} for subscription`);
continue;
}
// addRelay doesn't throw on failure, it just doesn't add the relay (graceful degradation like jumble)
this.addRelay(url).then(() => {
const newRelay = this.relays.get(url);
if (newRelay) {
this.setupSubscription(newRelay, url, subId, filters, onEvent, onEose);
}
});
continue;
}
const relay = this.relays.get(url);
if (!relay) continue;
// Check relay status before setting up subscription
if (!this.checkAndCleanupRelay(url)) {
console.debug(`[nostr-client] Relay ${url} is closed, skipping subscription`);
continue;
}
try {
this.setupSubscription(relay, url, subId, filters, onEvent, onEose);
} catch (error) {
// Handle errors - setupSubscription already handles closed connection errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${url} connection closed, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
}
}
return subId;
}
private setupSubscription(
relay: Relay,
url: string,
subId: string,
filters: Filter[],
onEvent: (event: NostrEvent, relay: string) => void,
onEose?: (relay: string) => void
): void {
if (!this.relays.has(url)) return;
// Check relay status before setting up subscription
if (!this.checkAndCleanupRelay(url)) {
console.debug(`[nostr-client] Relay ${url} is closed, skipping subscription setup`);
return;
}
try {
const client = this;
let hasAuthed = this.authenticatedRelays.has(url);
const startSub = () => {
// Check relay status again before subscribing
if (!client.checkAndCleanupRelay(url)) {
console.debug(`[nostr-client] Relay ${url} closed before subscription, aborting`);
return;
}
const sub = relay.subscribe(filters, {
onevent: (event: NostrEvent) => {
try {
if (!client.relays.has(url)) return;
if (client.shouldFilterZapReceipt(event)) return;
client.addToCache(event);
onEvent(event, url);
} catch (err) {
// Silently handle errors - connection may be closed
}
},
oneose: () => {
try {
if (!client.relays.has(url)) return;
onEose?.(url);
} catch (err) {
// Silently handle errors - connection may be closed
}
},
onclose: (reason: string) => {
// Handle auth-required on subscription close
if (reason.startsWith('auth-required') && !hasAuthed) {
const session = sessionManager.getSession();
if (session) {
client.authenticateRelay(relay, url)
.then((authSuccess) => {
if (authSuccess) {
hasAuthed = true;
// Retry subscription after authentication
startSub();
}
})
.catch(() => {
// Authentication failed, give up
});
}
}
}
});
client.subscriptions.set(`${url}_${subId}`, { relay, sub });
};
startSub();
} catch (error) {
// Handle SendingOnClosedConnection and other errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${url} connection closed during subscription setup, removing from active relays`);
this.relays.delete(url);
this.authenticatedRelays.delete(url);
}
}
}
unsubscribe(subId: string): void {
for (const [key, { sub }] of this.subscriptions.entries()) {
if (key.endsWith(`_${subId}`)) {
try {
// Try to close subscription - may fail if connection already closed
if (sub && typeof sub.close === 'function') {
sub.close();
}
} catch (error) {
// Silently handle errors when closing subscriptions
// Connection may already be closed, which is fine
}
this.subscriptions.delete(key);
}
}
}
// Throttled request to a relay
private async throttledRelayRequest(
relayUrl: string,
filters: Filter[],
events: Map<string, NostrEvent>,
timeout: number
): Promise<void> {
return new Promise((resolve) => {
const makeRequest = () => {
const now = Date.now();
const lastRequest = this.lastRequestTime.get(relayUrl) || 0;
const timeSinceLastRequest = now - lastRequest;
const activeForRelay = this.activeRequestsPerRelay.get(relayUrl) || 0;
// Check if we can make the request now
if (timeSinceLastRequest >= this.MIN_REQUEST_INTERVAL &&
activeForRelay < this.MAX_CONCURRENT_PER_RELAY &&
this.totalActiveRequests < this.MAX_CONCURRENT_TOTAL) {
// Update tracking
this.lastRequestTime.set(relayUrl, now);
this.activeRequestsPerRelay.set(relayUrl, activeForRelay + 1);
this.totalActiveRequests++;
// Make the request
this.makeRelayRequest(relayUrl, filters, events, timeout)
.finally(() => {
const current = this.activeRequestsPerRelay.get(relayUrl) || 0;
if (current > 0) {
this.activeRequestsPerRelay.set(relayUrl, current - 1);
}
if (this.totalActiveRequests > 0) {
this.totalActiveRequests--;
}
resolve();
this.processQueue(); // Process next in queue
});
} else {
// Wait and retry
const waitTime = Math.max(
this.MIN_REQUEST_INTERVAL - timeSinceLastRequest,
100
);
setTimeout(makeRequest, waitTime);
}
};
makeRequest();
});
}
private async makeRelayRequest(
relayUrl: string,
filters: Filter[],
events: Map<string, NostrEvent>,
timeout: number
): Promise<void> {
const relay = this.relays.get(relayUrl);
if (!relay || !this.checkAndCleanupRelay(relayUrl)) {
return;
}
const subId = `sub_${this.nextSubId++}_${Date.now()}`;
let resolved = false;
let timeoutId: ReturnType<typeof setTimeout> | null = null;
let hasAuthed = this.authenticatedRelays.has(relayUrl);
const finish = () => {
if (resolved) return;
resolved = true;
if (timeoutId) clearTimeout(timeoutId);
this.unsubscribe(subId);
};
const startSub = () => {
try {
// Check relay status before subscribing
if (!this.checkAndCleanupRelay(relayUrl)) {
console.debug(`[nostr-client] Relay ${relayUrl} is closed, skipping subscription`);
finish();
return;
}
const client = this;
const sub = relay.subscribe(filters, {
onevent: (event: NostrEvent) => {
try {
if (!client.relays.has(relayUrl)) return;
if (shouldHideEvent(event)) return;
if (client.shouldFilterZapReceipt(event)) return;
events.set(event.id, event);
client.addToCache(event);
} catch (error) {
// Silently handle errors - connection may be closed
}
},
oneose: () => {
try {
if (!resolved) finish();
} catch (error) {
// Silently handle errors - connection may be closed
}
},
onclose: (reason: string) => {
// Handle auth-required on subscription close
if (reason.startsWith('auth-required') && !hasAuthed && !resolved) {
const session = sessionManager.getSession();
if (session) {
client.authenticateRelay(relay, relayUrl)
.then((authSuccess) => {
if (authSuccess) {
hasAuthed = true;
// Retry subscription after authentication
if (!resolved) {
startSub();
}
} else {
finish();
}
})
.catch(() => {
finish();
});
} else {
finish();
}
}
}
});
client.subscriptions.set(`${relayUrl}_${subId}`, { relay, sub });
timeoutId = setTimeout(() => {
if (!resolved) finish();
}, timeout);
} catch (error) {
// Handle SendingOnClosedConnection and other errors
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('SendingOnClosedConnection') || errorMessage.includes('closed connection')) {
console.debug(`[nostr-client] Relay ${relayUrl} connection closed, removing from active relays`);
this.relays.delete(relayUrl);
this.authenticatedRelays.delete(relayUrl);
}
finish();
}
};
startSub();
}
private processQueue(): void {
if (this.processingQueue || this.requestQueue.length === 0) return;
this.processingQueue = true;
const next = this.requestQueue.shift();
if (next) {
next();
}
this.processingQueue = false;
}
async fetchEvents(
filters: Filter[],
relays: string[],
options: FetchOptions = {}
): Promise<NostrEvent[]> {
const { useCache = true, cacheResults = true, onUpdate, timeout = 10000 } = options;
// Create a key for this fetch to prevent duplicates
const fetchKey = JSON.stringify({ filters, relays: relays.sort() });
const activeFetch = this.activeFetches.get(fetchKey);
if (activeFetch) {
return activeFetch;
}
// Query cache first
if (useCache) {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
console.debug(`[nostr-client] Returning ${cachedEvents.length} cached events for filter:`, filters);
// Return cached immediately, fetch fresh in background with delay
// Don't pass onUpdate to background fetch to avoid interfering with cached results
if (cacheResults) {
// Prevent duplicate background refreshes for the same filter
if (!this.backgroundRefreshes.has(fetchKey)) {
this.backgroundRefreshes.add(fetchKey);
// Use a longer delay for background refresh to avoid interfering with initial load
setTimeout(() => {
// Only update cache, don't call onUpdate for background refresh
// This ensures cached events persist and are not cleared by background refresh
const bgPromise = this.fetchFromRelays(filters, relays, { cacheResults: true, onUpdate: undefined, timeout });
bgPromise.finally(() => {
// Remove from background refreshes set after a delay to allow re-refresh if needed
setTimeout(() => {
this.backgroundRefreshes.delete(fetchKey);
}, 60000); // Allow re-refresh after 60 seconds
}).catch((error) => {
// Log but don't throw - background refresh failures shouldn't affect cached results
console.debug('[nostr-client] Background refresh failed:', error);
this.backgroundRefreshes.delete(fetchKey);
});
}, 5000); // 5 second delay for background refresh to avoid interfering
}
}
return cachedEvents;
} else {
// No cached events - this is expected and normal, so use debug level
console.debug(`[nostr-client] No cached events found for filter:`, filters);
}
} catch (error) {
console.error('[nostr-client] Error querying cache:', error);
// Continue to fetch from relays
}
}
// Fetch from relays
const fetchPromise = this.fetchFromRelays(filters, relays, { cacheResults, onUpdate, timeout });
this.activeFetches.set(fetchKey, fetchPromise);
fetchPromise.finally(() => {
this.activeFetches.delete(fetchKey);
});
return fetchPromise;
}
private async fetchFromRelays(
filters: Filter[],
relays: string[],
options: { cacheResults: boolean; onUpdate?: (events: NostrEvent[]) => void; timeout: number }
): Promise<NostrEvent[]> {
const timeout = options.timeout || config.relayTimeout;
// Filter out relays that have failed recently or permanently
const now = Date.now();
const availableRelays = relays.filter(url => {
if (this.relays.has(url)) return true; // Already connected
const failureInfo = this.failedRelays.get(url);
if (failureInfo) {
// Skip permanently failed relays
if (failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
return false; // Skip this relay, it has failed too many times
}
// Skip relays that failed recently (still in backoff period)
const timeSinceFailure = now - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
return false; // Skip this relay, it failed recently
}
}
return true; // Can try to connect
});
// Try to connect to relays that aren't already connected
// Like jumble, we gracefully handle failures - addRelay doesn't throw, it just doesn't add failed relays
const relaysToConnect = availableRelays.filter(url => !this.relays.has(url));
if (relaysToConnect.length > 0) {
await Promise.allSettled(
relaysToConnect.map(url => this.addRelay(url))
);
}
// Get list of actually connected relays
const connectedRelays = availableRelays.filter(url => this.relays.has(url));
if (connectedRelays.length === 0) {
// Only log at debug level to reduce console noise - cache will be used instead
console.debug(`[nostr-client] No connected relays available for fetch (${relays.length} requested, all failed or unavailable), will use cache if available`);
return [];
}
// Only log if we're missing a significant number of relays
if (connectedRelays.length < relays.length * 0.5) {
console.debug(`[nostr-client] Fetching from ${connectedRelays.length} connected relay(s) out of ${relays.length} requested`);
}
// Process relays sequentially with throttling to avoid overload
const events: Map<string, NostrEvent> = new Map();
for (const relayUrl of connectedRelays) {
await this.throttledRelayRequest(relayUrl, filters, events, timeout);
// Small delay between relays
await new Promise(resolve => setTimeout(resolve, 100));
}
const eventArray = Array.from(events.values());
const filtered = filterEvents(eventArray);
const zapFiltered = filtered.filter(event => !this.shouldFilterZapReceipt(event));
if (options.cacheResults && zapFiltered.length > 0) {
cacheEvents(zapFiltered).catch(() => {
// Silently fail
});
}
// Only call onUpdate if we got new events AND onUpdate is provided
// This prevents clearing the UI when background fetch returns fewer results
if (options.onUpdate && filtered.length > 0) {
console.log(`[nostr-client] Fetch returned ${filtered.length} events, calling onUpdate`);
options.onUpdate(filtered);
} else if (options.onUpdate && filtered.length === 0) {
console.debug(`[nostr-client] Fetch returned 0 events, skipping onUpdate to preserve cached results`);
} else if (!options.onUpdate) {
// Only log background refreshes that return events, not empty results
if (filtered.length > 0) {
console.debug(`[nostr-client] Fetch returned ${filtered.length} events (background refresh, no onUpdate)`);
}
}
return filtered;
}
async getEventById(id: string, relays: string[]): Promise<NostrEvent | null> {
try {
const dbEvent = await getEvent(id);
if (dbEvent) return dbEvent;
} catch (error) {
// Continue to fetch from relays
}
const filters: Filter[] = [{ ids: [id] }];
const events = await this.fetchEvents(filters, relays, { useCache: false });
return events[0] || null;
}
async getByFilters(filters: Filter[]): Promise<NostrEvent[]> {
return this.getCachedEvents(filters);
}
getConfig() {
return config;
}
getConnectedRelays(): string[] {
return Array.from(this.relays.keys());
}
close(): void {
for (const { sub } of this.subscriptions.values()) {
sub.close();
}
this.subscriptions.clear();
for (const relay of this.relays.values()) {
relay.close();
}
this.relays.clear();
this.initialized = false;
}
}
export const nostrClient = new NostrClient();