@ -19,11 +19,10 @@ export interface PublishOptions {
@@ -19,11 +19,10 @@ export interface PublishOptions {
}
interface FetchOptions {
useCache? : boolean ;
useCache? : boolean | 'cache-first' | 'relay-first' ; // Cache strategy: true/'cache-first' = check cache first (default), 'relay-first' = query relays first then cache fallback, false = no cache
cacheResults? : boolean ;
onUpdate ? : ( events : NostrEvent [ ] ) = > void ;
timeout? : number ;
relayFirst? : boolean ; // If true, query relays first with timeout, then fill from cache
priority ? : 'high' | 'medium' | 'low' ; // Priority level: high for critical UI (comments), low for background (reactions, profiles)
caller? : string ; // Optional caller identifier for logging (e.g., "topics/[name]/+page.svelte")
}
@ -1361,8 +1360,25 @@ class NostrClient {
@@ -1361,8 +1360,25 @@ class NostrClient {
relays : string [ ] ,
options : FetchOptions = { }
) : Promise < NostrEvent [ ] > {
const { useCache = true , cacheResults = true , onUpdate , timeout = 10000 , relayFirst = false , priority = 'medium' , caller : providedCaller } = options ;
const { cacheResults = true , onUpdate , timeout = 10000 , priority = 'medium' , caller : providedCaller } = options ;
const caller = providedCaller || this . getCallerInfo ( ) ;
// Normalize useCache to a string strategy
let useCacheValue : 'cache-first' | 'relay-first' | false ;
const useCacheOption = options . useCache ? ? true ;
// Normalize boolean to string for consistency
if ( useCacheOption === true ) {
useCacheValue = 'cache-first' ;
} else if ( useCacheOption === false ) {
useCacheValue = false ;
} else {
useCacheValue = useCacheOption ; // Already a string
}
// Determine if we should use cache at all
const shouldUseCache = useCacheValue !== false ;
const isCacheFirst = useCacheValue === 'cache-first' ;
// Create a key for this fetch to prevent duplicates
const fetchKey = JSON . stringify ( {
@ -1447,13 +1463,56 @@ class NostrClient {
@@ -1447,13 +1463,56 @@ class NostrClient {
// Mark this fetch as pending
this . emptyResultCache . set ( emptyCacheKey , { cachedAt : Date.now ( ) , pending : true } ) ;
// Always use relay-first mode: query relays first with timeout, then fill from cache if needed
{
// Fetching events from relays
const relayTimeout = timeout || 10000 ; // Default 10s timeout
const fetchPromise = ( async ( ) = > {
// For single relay queries, ensure connection is established and authenticated first
if ( relays . length === 1 ) {
// Create the main fetch promise
const relayTimeout = timeout || 10000 ; // Default 10s timeout
const fetchPromise = ( async ( ) = > {
// Check cache first if strategy is 'cache-first' (default behavior)
// This provides instant page loads from cache, then enhances with fresh relay data
if ( isCacheFirst ) {
try {
const cachedEvents = await this . getCachedEvents ( filters ) ;
if ( cachedEvents . length > 0 ) {
// Return cached data immediately for fast page loads
console . log ( ` [nostr-client] Fetch complete: ${ cachedEvents . length } from cache (instant), fetching from relays in background [ ${ filterDesc } ] from [ ${ relayDesc } ] ` ) ;
// Fetch from relays in background to enhance/update results
// Don't await this - let it run in background
this . fetchFromRelays ( filters , relays , {
cacheResults : cacheResults ,
onUpdate : ( freshEvents ) = > {
// Merge fresh events with cached events via onUpdate callback
if ( onUpdate ) {
onUpdate ( freshEvents ) ;
}
} ,
timeout : relayTimeout ,
priority : options.priority
} ) . then ( ( freshEvents ) = > {
// Log when background fetch completes
if ( freshEvents . length > 0 ) {
console . log ( ` [nostr-client] Background fetch complete: ${ freshEvents . length } fresh events from relays [ ${ filterDesc } ] from [ ${ relayDesc } ] ` ) ;
}
} ) . catch ( ( ) = > {
// Silently fail - background fetch is optional
} ) ;
// Clear pending flag since we got results
const currentEntry = this . emptyResultCache . get ( emptyCacheKey ) ;
if ( currentEntry ? . pending ) {
this . emptyResultCache . delete ( emptyCacheKey ) ;
}
return cachedEvents ;
}
} catch ( error ) {
console . debug ( '[nostr-client] Error querying cache first, falling back to relays:' , error ) ;
// Continue to relay fetch below
}
}
// Relay-first mode: query relays first with timeout, then fill from cache if needed
// For single relay queries, ensure connection is established and authenticated first
if ( relays . length === 1 ) {
const relayUrl = relays [ 0 ] ;
if ( ! this . relays . has ( relayUrl ) ) {
// Try to connect first
@ -1553,7 +1612,7 @@ class NostrClient {
@@ -1553,7 +1612,7 @@ class NostrClient {
if ( relayEvents . length > 0 ) {
// Start cache enhancement in background, but wait for it before logging
const cacheEnhancementPromise = ( async ( ) = > {
if ( useCache && onUpdate ) {
if ( sho uldU seCache && onUpdate ) {
try {
const cachedEvents = await this . getCachedEvents ( filters ) ;
if ( cachedEvents . length > 0 ) {
@ -1593,9 +1652,9 @@ class NostrClient {
@@ -1593,9 +1652,9 @@ class NostrClient {
return relayEvents ;
}
// If no results from relays, try to fill from cache (only if useCache is true )
// If no results from relays, try to fill from cache (only if cache is enabled )
// IMPORTANT: In single-relay mode, useCache should be false to avoid showing events from other relays
if ( useCache ) {
if ( sho uldU seCache) {
try {
const cachedEvents = await this . getCachedEvents ( filters ) ;
if ( cachedEvents . length > 0 ) {
@ -1647,14 +1706,13 @@ class NostrClient {
@@ -1647,14 +1706,13 @@ class NostrClient {
}
return finalEvents ;
} ) ( ) ;
this . activeFetches . set ( fetchKey , fetchPromise ) ;
fetchPromise . finally ( ( ) = > {
this . activeFetches . delete ( fetchKey ) ;
} ) ;
return fetchPromise ;
}
} ) ( ) ;
this . activeFetches . set ( fetchKey , fetchPromise ) ;
fetchPromise . finally ( ( ) = > {
this . activeFetches . delete ( fetchKey ) ;
} ) ;
return fetchPromise ;
}
private async fetchFromRelays (