Browse Source

make client more efficient

imwald
Silberengel 1 month ago
parent
commit
b14d9cd3c5
  1. 12
      src/PageManager.tsx
  2. 4
      src/components/Note/index.tsx
  3. 2
      src/components/NoteCard/MainNoteCard.tsx
  4. 2
      src/components/NotificationList/NotificationItem/Notification.tsx
  5. 2
      src/components/RelayInfo/RelayReviewCard.tsx
  6. 28
      src/hooks/useFetchEvent.tsx
  7. 43
      src/hooks/useFetchProfile.tsx
  8. 4
      src/pages/secondary/NotePage/index.tsx
  9. 430
      src/services/client-replaceable-events.service.ts
  10. 46
      src/services/navigation-event-store.ts

12
src/PageManager.tsx

@ -18,6 +18,9 @@ import RssFeedSettingsPage from '@/pages/secondary/RssFeedSettingsPage' @@ -18,6 +18,9 @@ import RssFeedSettingsPage from '@/pages/secondary/RssFeedSettingsPage'
import NoteDrawer from '@/components/NoteDrawer'
import SecondaryProfilePage from '@/pages/secondary/ProfilePage'
import storage from '@/services/local-storage.service'
import client from '@/services/client.service'
import { navigationEventStore } from '@/services/navigation-event-store'
import type { Event } from 'nostr-tools'
import { Sheet, SheetContent } from '@/components/ui/sheet'
import FollowingListPage from '@/pages/secondary/FollowingListPage'
import MuteListPage from '@/pages/secondary/MuteListPage'
@ -222,10 +225,17 @@ export function useSmartNoteNavigation() { @@ -222,10 +225,17 @@ export function useSmartNoteNavigation() {
const { isSmallScreen } = useScreenSize()
const { current: currentPrimaryPage } = usePrimaryPage()
const navigateToNote = (url: string) => {
const navigateToNote = (url: string, event?: Event) => {
// Extract noteId from URL (handles both /notes/{id} and /{context}/notes/{id})
const { noteId } = parseNoteUrl(url)
// If event is provided, store it in navigation event store to avoid re-fetching
if (event) {
navigationEventStore.setEvent(event)
// Also add to cache for future use
client.addEventToCache(event)
}
// Build contextual URL based on current page
const contextualUrl = buildNoteUrl(noteId, currentPrimaryPage)

4
src/components/Note/index.tsx

@ -262,7 +262,7 @@ export default function Note({ @@ -262,7 +262,7 @@ export default function Note({
}
e.stopPropagation()
client.addEventToCache(event)
navigateToNote(toNote(event))
navigateToNote(toNote(event), event)
}}
>
<div className="flex justify-between items-start gap-2">
@ -294,7 +294,7 @@ export default function Note({ @@ -294,7 +294,7 @@ export default function Note({
onClick={(e) => {
e.stopPropagation()
client.addEventToCache(event)
navigateToNote(toNote(event))
navigateToNote(toNote(event), event)
}}
title="View in Discussions"
>

2
src/components/NoteCard/MainNoteCard.tsx

@ -44,7 +44,7 @@ export default function MainNoteCard({ @@ -44,7 +44,7 @@ export default function MainNoteCard({
e.stopPropagation()
client.addEventToCache(event)
const noteUrl = toNote(originalNoteId ?? event)
navigateToNote(noteUrl)
navigateToNote(noteUrl, event)
}}
>
<div className={`clickable ${embedded ? 'p-2 sm:p-3 border rounded-lg' : 'py-3'}`} style={embedded ? { position: 'relative', isolation: 'isolate', overflow: 'visible' } : undefined}>

2
src/components/NotificationList/NotificationItem/Notification.tsx

@ -54,7 +54,7 @@ export default function Notification({ @@ -54,7 +54,7 @@ export default function Notification({
if (targetEvent) {
client.addEventToCache(targetEvent)
navigateToNote(toNote(targetEvent.id))
navigateToNote(toNote(targetEvent.id), targetEvent)
} else if (pubkey) {
push(toProfile(pubkey))
}

2
src/components/RelayInfo/RelayReviewCard.tsx

@ -33,7 +33,7 @@ export default function RelayReviewCard({ @@ -33,7 +33,7 @@ export default function RelayReviewCard({
return
}
client.addEventToCache(event)
navigateToNote(toNote(event))
navigateToNote(toNote(event), event)
}}
>
<div className="flex justify-between items-start gap-2">

28
src/hooks/useFetchEvent.tsx

@ -1,15 +1,16 @@ @@ -1,15 +1,16 @@
import { useDeletedEvent } from '@/providers/DeletedEventProvider'
import { useReply } from '@/providers/ReplyProvider'
import { eventService } from '@/services/client.service'
import { navigationEventStore } from '@/services/navigation-event-store'
import { Event } from 'nostr-tools'
import { useEffect, useState } from 'react'
export function useFetchEvent(eventId?: string) {
export function useFetchEvent(eventId?: string, initialEvent?: Event) {
const { isEventDeleted } = useDeletedEvent()
const { addReplies } = useReply()
const [error, setError] = useState<Error | null>(null)
const [event, setEvent] = useState<Event | undefined>(undefined)
const [isFetching, setIsFetching] = useState(true)
const [event, setEvent] = useState<Event | undefined>(initialEvent)
const [isFetching, setIsFetching] = useState(!initialEvent)
useEffect(() => {
if (!eventId) {
@ -18,6 +19,25 @@ export function useFetchEvent(eventId?: string) { @@ -18,6 +19,25 @@ export function useFetchEvent(eventId?: string) {
return
}
// If we have an initial event that matches the eventId, use it and skip fetching
if (initialEvent && (initialEvent.id === eventId || eventId.includes(initialEvent.id))) {
if (!isEventDeleted(initialEvent)) {
setEvent(initialEvent)
addReplies([initialEvent])
setIsFetching(false)
}
return
}
// Check navigation event store first (events passed through navigation)
const navigationEvent = navigationEventStore.getEvent(eventId)
if (navigationEvent && !isEventDeleted(navigationEvent)) {
setEvent(navigationEvent)
addReplies([navigationEvent])
setIsFetching(false)
return
}
setIsFetching(true)
const fetchEvent = async () => {
@ -36,7 +56,7 @@ export function useFetchEvent(eventId?: string) { @@ -36,7 +56,7 @@ export function useFetchEvent(eventId?: string) {
}
fetchEvent()
}, [eventId, isEventDeleted, addReplies])
}, [eventId, initialEvent, isEventDeleted, addReplies])
useEffect(() => {
if (event && isEventDeleted(event)) {

43
src/hooks/useFetchProfile.tsx

@ -117,8 +117,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -117,8 +117,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
// Extract pubkey early to check if id has changed
const extractedPubkey = id ? userIdToPubkey(id) : null
// EARLY EXIT: If we're already processing this exact pubkey, skip immediately
// This prevents the effect from doing any work if it's already running
// CRITICAL: Early exit if already processing this exact pubkey - prevents infinite loops
if (extractedPubkey && processingPubkeyRef.current === extractedPubkey) {
logger.info('[useFetchProfile] EARLY EXIT: Already processing this pubkey', {
extractedPubkey,
@ -127,14 +126,18 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -127,14 +126,18 @@ export function useFetchProfile(id?: string, skipCache = false) {
return
}
// Guard against infinite loops: limit effect runs per pubkey
// CRITICAL: Guard against infinite loops - limit effect runs per pubkey (reduced from 10 to 3)
if (extractedPubkey) {
const runCount = effectRunCountRef.current.get(extractedPubkey) || 0
if (runCount > 10) {
if (runCount >= 3) {
logger.warn('[useFetchProfile] Too many effect runs for this pubkey, preventing infinite loop', {
extractedPubkey,
runCount
})
// Clear the run count after a delay to allow retries later
setTimeout(() => {
effectRunCountRef.current.delete(extractedPubkey)
}, 30000) // Clear after 30 seconds
return
}
effectRunCountRef.current.set(extractedPubkey, runCount + 1)
@ -220,17 +223,17 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -220,17 +223,17 @@ export function useFetchProfile(id?: string, skipCache = false) {
return
}
// Also check if we already have a profile for this pubkey before starting a new fetch
// CRITICAL: Check if we already have a profile for this pubkey before starting a new fetch
// This prevents re-fetching when profile state already exists
if (profile && profile.pubkey === extractedPubkey) {
logger.info('[useFetchProfile] Already have profile for this pubkey, skipping fetch', {
extractedPubkey
})
// Still update the ref to prevent re-processing
// Mark as processing to prevent re-fetch, but don't update state unnecessarily
processingPubkeyRef.current = extractedPubkey
setIsFetching(false)
if (pubkey !== extractedPubkey) {
setPubkey(extractedPubkey)
}
// Clear run count since we have the profile
effectRunCountRef.current.delete(extractedPubkey)
return
}
@ -239,8 +242,8 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -239,8 +242,8 @@ export function useFetchProfile(id?: string, skipCache = false) {
// This prevents the effect from running again for the same pubkey
processingPubkeyRef.current = extractedPubkey
// Only set pubkey state if it's different to avoid unnecessary re-renders
// Do this AFTER setting the ref to prevent loops
// CRITICAL: Only update pubkey state if it's actually different
// Avoid state updates that could trigger re-renders and loops
if (pubkey !== extractedPubkey) {
setPubkey(extractedPubkey)
}
@ -290,9 +293,10 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -290,9 +293,10 @@ export function useFetchProfile(id?: string, skipCache = false) {
setError(null) // Clear any previous errors
// If no profile was found, periodically re-check (profiles might load asynchronously)
// Check every 2 seconds for up to 30 seconds (15 checks)
// REDUCED: Check every 5 seconds for up to 20 seconds (4 checks) to prevent too many intervals
// This reduces memory usage when many profiles are being fetched (e.g., trending page)
let checkCount = 0
const maxChecks = 15
const maxChecks = 4 // Reduced from 15 to prevent browser crashes
checkIntervalRef.current = setInterval(async () => {
if (cancelled.current || checkCount >= maxChecks) {
@ -312,7 +316,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -312,7 +316,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
checkIntervalRef.current = null
}
}
}, 2000) // Check every 2 seconds
}, 5000) // Increased from 2 seconds to 5 seconds to reduce load
} catch (err) {
logger.error('[useFetchProfile] run() error', {
pubkey: extractedPubkey,
@ -353,12 +357,13 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -353,12 +357,13 @@ export function useFetchProfile(id?: string, skipCache = false) {
}, [id, skipCache]) // checkProfile is memoized and stable, no need to include it
useEffect(() => {
// Only use currentAccountProfile if it matches the pubkey we're looking for
// CRITICAL: Only use currentAccountProfile if it matches the pubkey we're looking for
// Use pubkey from the profile object to avoid reference equality issues
// Only update if we don't have a profile yet AND we're not currently processing
if (currentAccountProfile?.pubkey && pubkey && pubkey === currentAccountProfile.pubkey) {
// Only update if we don't have a profile yet (avoid unnecessary updates)
// Using a ref to track if we've already set it to prevent loops
if (!profile) {
// Also check that we're processing this pubkey to prevent race conditions
if (!profile && processingPubkeyRef.current === pubkey) {
setProfile(currentAccountProfile)
setIsFetching(false)
// Clear interval if we got the profile from current account
@ -366,9 +371,11 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -366,9 +371,11 @@ export function useFetchProfile(id?: string, skipCache = false) {
clearInterval(checkIntervalRef.current)
checkIntervalRef.current = null
}
// Clear run count since we have the profile
effectRunCountRef.current.delete(pubkey)
}
}
}, [currentAccountProfile?.pubkey, pubkey]) // Removed profile?.pubkey to prevent loops
}, [currentAccountProfile?.pubkey, pubkey, profile]) // Include profile to prevent unnecessary updates
return { isFetching, error, profile }
}

4
src/pages/secondary/NotePage/index.tsx

@ -90,9 +90,9 @@ function stripMarkdown(content: string): string { @@ -90,9 +90,9 @@ function stripMarkdown(content: string): string {
return text.trim()
}
const NotePage = forwardRef(({ id, index, hideTitlebar = false }: { id?: string; index?: number; hideTitlebar?: boolean }, ref) => {
const NotePage = forwardRef(({ id, index, hideTitlebar = false, initialEvent }: { id?: string; index?: number; hideTitlebar?: boolean; initialEvent?: Event }, ref) => {
const { t } = useTranslation()
const { event, isFetching } = useFetchEvent(id)
const { event, isFetching } = useFetchEvent(id, initialEvent)
const [externalEvent, setExternalEvent] = useState<Event | undefined>(undefined)
const finalEvent = event || externalEvent

430
src/services/client-replaceable-events.service.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants'
import { ExtendedKind, FAST_READ_RELAY_URLS, PROFILE_FETCH_RELAY_URLS } from '@/constants'
import { kinds, nip19 } from 'nostr-tools'
import type { Event as NEvent, Filter } from 'nostr-tools'
import DataLoader from 'dataloader'
@ -60,36 +60,6 @@ export class ReplaceableEventService { @@ -60,36 +60,6 @@ export class ReplaceableEventService {
)
}
/**
* Extract relay hints from event tags (e, a, q tags - 3rd position)
*/
private extractRelayHintsFromEvent(event: NEvent | undefined): string[] {
if (!event) return []
const hints = new Set<string>()
// Extract from e, a, q tags (relay hint is in position 2, index 2)
const tagTypesWithRelayHints = ['e', 'a', 'q']
for (const tag of event.tags) {
if (tagTypesWithRelayHints.includes(tag[0]) && tag.length > 2 && typeof tag[2] === 'string') {
const hint = tag[2]
if (hint.startsWith('wss://') || hint.startsWith('ws://')) {
hints.add(hint)
}
}
}
// Also check for dedicated "relays" tag
const relaysTag = event.tags.find(tag => tag[0] === 'relays')
if (relaysTag && relaysTag.length > 1) {
relaysTag.slice(1).forEach(url => {
if (typeof url === 'string' && (url.startsWith('wss://') || url.startsWith('ws://'))) {
hints.add(url)
}
})
}
return Array.from(hints)
}
/**
* Build comprehensive relay list: author's outboxes + user's inboxes + relay hints + defaults
@ -119,7 +89,7 @@ export class ReplaceableEventService { @@ -119,7 +89,7 @@ export class ReplaceableEventService {
/**
* Fetch replaceable event (profile, relay list, etc.)
* Always checks in-memory cache FIRST (instant), then IndexedDB, then fetches from relays
* Uses DataLoader to batch IndexedDB checks and network fetches
* ALWAYS uses: author's outboxes + user's inboxes + relay hints + defaults
* For profiles/metadata: includes user's own relays (read/write/local) + PROFILE_FETCH_RELAY_URLS
*
@ -143,73 +113,37 @@ export class ReplaceableEventService { @@ -143,73 +113,37 @@ export class ReplaceableEventService {
containingEventRelays: containingEventRelays.length
})
// 1. Check IndexedDB (async but faster than network)
try {
logger.info('[ReplaceableEventService] Checking IndexedDB', {
pubkey,
kind
})
const indexedDbCached = await indexedDb.getReplaceableEvent(pubkey, kind, d)
logger.info('[ReplaceableEventService] IndexedDB query completed', {
pubkey,
kind,
found: !!indexedDbCached
})
if (indexedDbCached) {
logger.info('[ReplaceableEventService] Found in IndexedDB', {
// If we have containing event relays and this is a profile, we need to use a custom relay list
// Otherwise, use DataLoader (which batches IndexedDB checks and network fetches)
let event: NEvent | undefined
if (containingEventRelays.length > 0 && kind === kinds.Metadata && !d) {
// For profiles with containing event relays (author's relay list), check IndexedDB first, then query directly
logger.info('[ReplaceableEventService] Checking IndexedDB for profile with containing relays', {
pubkey,
kind,
eventId: indexedDbCached.id,
created_at: indexedDbCached.created_at
kind
})
// Check tombstone in background (non-blocking)
const tombstoneKey = isReplaceableEvent(kind)
? getReplaceableCoordinateFromEvent(indexedDbCached)
: indexedDbCached.id
indexedDb.isTombstoned(tombstoneKey).then(isTombstoned => {
if (isTombstoned) {
// Event is tombstoned - will be handled by IndexedDB cleanup
logger.warn('[ReplaceableEventService] Event is tombstoned', {
try {
const indexedDbCached = await indexedDb.getReplaceableEvent(pubkey, kind, d)
if (indexedDbCached) {
logger.info('[ReplaceableEventService] Found in IndexedDB', {
pubkey,
kind,
tombstoneKey
eventId: indexedDbCached.id
})
// Refresh in background
this.refreshInBackground(pubkey, kind, d).catch(() => {})
return indexedDbCached
}
}).catch(() => {
// If tombstone check fails, keep it in cache (better to show stale than nothing)
})
} catch (error) {
logger.warn('[ReplaceableEventService] IndexedDB error', {
pubkey,
kind,
error: error instanceof Error ? error.message : String(error)
})
}
// Fetch in background to update cache if newer version exists
this.refreshInBackground(pubkey, kind, d).catch(() => {})
return indexedDbCached
}
logger.info('[ReplaceableEventService] Not found in IndexedDB', {
pubkey,
kind
})
} catch (error) {
// IndexedDB error - continue to network fetch
logger.warn('[ReplaceableEventService] IndexedDB error', {
pubkey,
kind,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined
})
}
// 2. Not in cache, fetch from network
logger.info('[ReplaceableEventService] Fetching from network', {
pubkey,
kind,
usingContainingRelays: containingEventRelays.length > 0 && kind === kinds.Metadata && !d
})
try {
// If we have containing event relays and this is a profile, we need to use a custom relay list
// Otherwise, use DataLoader (which uses comprehensive relay list)
let event: NEvent | undefined
if (containingEventRelays.length > 0 && kind === kinds.Metadata && !d) {
// For profiles with containing event relays (author's relay list), build custom relay list and query directly
// Not in IndexedDB, fetch from network with custom relay list
logger.info('[ReplaceableEventService] Building relay list with containing event relays', {
pubkey,
containingRelayCount: containingEventRelays.length
@ -218,7 +152,7 @@ export class ReplaceableEventService { @@ -218,7 +152,7 @@ export class ReplaceableEventService {
logger.info('[ReplaceableEventService] Querying relays', {
pubkey,
relayCount: relayUrls.length,
relays: relayUrls.slice(0, 5) // Log first 5 for debugging
relays: relayUrls.slice(0, 5)
})
const startTime = Date.now()
const events = await this.queryService.query(relayUrls, {
@ -238,14 +172,11 @@ export class ReplaceableEventService { @@ -238,14 +172,11 @@ export class ReplaceableEventService {
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
event = sortedEvents.length > 0 ? sortedEvents[0] : undefined
} else {
// Use DataLoader for batching
logger.info('[ReplaceableEventService] Using DataLoader', {
// Use DataLoader for batching (IndexedDB checks and network fetches are batched)
logger.info('[ReplaceableEventService] Using DataLoader (batches IndexedDB + network)', {
pubkey,
kind,
d,
kindValue: kind,
isMetadata: kind === kinds.Metadata,
expectedMetadata: kinds.Metadata
d
})
const startTime = Date.now()
const loadedEvent = d
@ -261,23 +192,12 @@ export class ReplaceableEventService { @@ -261,23 +192,12 @@ export class ReplaceableEventService {
}
if (event) {
logger.info('[ReplaceableEventService] Event found from network', {
logger.info('[ReplaceableEventService] Event found', {
pubkey,
kind,
eventId: event.id,
created_at: event.created_at
})
// Extract relay hints from the found event (for future related fetches)
const eventRelayHints = this.extractRelayHintsFromEvent(event)
// If we found relay hints, log them (they're already used in the batch load function)
if (eventRelayHints.length > 0) {
logger.debug('[ReplaceableEventService] Found relay hints in event', {
pubkey,
hintCount: eventRelayHints.length
})
}
return event
}
@ -389,6 +309,7 @@ export class ReplaceableEventService { @@ -389,6 +309,7 @@ export class ReplaceableEventService {
/**
* Private: Batch load function for replaceable events from big relays
* Batches IndexedDB checks first, then only fetches missing events from network
*/
private async replaceableEventFromBigRelaysBatchLoadFn(
params: readonly { pubkey: string; kind: number }[]
@ -397,6 +318,8 @@ export class ReplaceableEventService { @@ -397,6 +318,8 @@ export class ReplaceableEventService {
paramCount: params.length,
pubkeys: params.map(p => p.pubkey.substring(0, 8))
})
// Step 1: Batch check IndexedDB for all requested events
const groups = new Map<number, string[]>()
params.forEach(({ pubkey, kind }) => {
if (!groups.has(kind)) {
@ -404,10 +327,84 @@ export class ReplaceableEventService { @@ -404,10 +327,84 @@ export class ReplaceableEventService {
}
groups.get(kind)!.push(pubkey)
})
const results: (NEvent | null)[] = new Array(params.length).fill(null)
const eventsMap = new Map<string, NEvent>()
const missingParams: { pubkey: string; kind: number; index: number }[] = []
// Batch IndexedDB checks by kind
await Promise.allSettled(
Array.from(groups.entries()).map(async ([kind, pubkeys]) => {
try {
// Use batched IndexedDB query
const indexedDbEvents = await indexedDb.getManyReplaceableEvents(pubkeys, kind)
logger.info('[ReplaceableEventService] IndexedDB batch query completed', {
kind,
pubkeyCount: pubkeys.length,
foundCount: indexedDbEvents.filter(e => e !== null && e !== undefined).length
})
// Map IndexedDB results back to params
pubkeys.forEach((pubkey, idx) => {
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind)
if (paramIndex >= 0) {
const event = indexedDbEvents[idx]
if (event && event !== null) {
results[paramIndex] = event
eventsMap.set(`${pubkey}:${kind}`, event)
// Check tombstone in background (non-blocking)
const tombstoneKey = isReplaceableEvent(kind)
? getReplaceableCoordinateFromEvent(event)
: event.id
indexedDb.isTombstoned(tombstoneKey).catch(() => {})
// Refresh in background
this.refreshInBackground(pubkey, kind).catch(() => {})
} else {
missingParams.push({ pubkey, kind, index: paramIndex })
}
}
})
} catch (error) {
logger.warn('[ReplaceableEventService] IndexedDB batch query error', {
kind,
error: error instanceof Error ? error.message : String(error)
})
// If IndexedDB fails, mark all as missing
pubkeys.forEach((pubkey) => {
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind)
if (paramIndex >= 0) {
missingParams.push({ pubkey, kind, index: paramIndex })
}
})
}
})
)
// Step 2: Only fetch missing events from network
if (missingParams.length === 0) {
logger.info('[ReplaceableEventService] All events found in IndexedDB, skipping network fetch', {
totalCount: params.length
})
return results
}
logger.info('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
totalCount: params.length
})
// Group missing params by kind for network fetch
const missingGroups = new Map<number, { pubkey: string; index: number }[]>()
missingParams.forEach(({ pubkey, kind, index }) => {
if (!missingGroups.has(kind)) {
missingGroups.set(kind, [])
}
missingGroups.get(kind)!.push({ pubkey, index })
})
await Promise.allSettled(
Array.from(missingGroups.entries()).map(async ([kind, missingItems]) => {
const pubkeys = missingItems.map(item => item.pubkey)
// ALWAYS use comprehensive relay list: author's outboxes + user's inboxes + defaults
// For profiles/metadata: includes user's own relays (read/write/local) + PROFILE_FETCH_RELAY_URLS
// For each pubkey, build comprehensive relay list
@ -415,25 +412,13 @@ export class ReplaceableEventService { @@ -415,25 +412,13 @@ export class ReplaceableEventService {
kind,
pubkeyCount: pubkeys.length
})
const relayUrlSets = await Promise.all(
pubkeys.map(async (pubkey) => {
// Build comprehensive relay list for this author
return await this.buildComprehensiveRelayListForAuthor(pubkey, kind, [], [])
})
)
logger.info('[ReplaceableEventService] Relay lists built, merging', {
kind,
pubkeyCount: pubkeys.length,
relayListCount: relayUrlSets.length
})
// Merge all relay sets
const mergedRelays = new Set<string>()
relayUrlSets.forEach(relayList => {
relayList.forEach(url => mergedRelays.add(url))
})
const relayUrls = Array.from(mergedRelays)
// CRITICAL FIX: For batch fetches, use default relays instead of fetching relay lists for each author
// Fetching relay lists for hundreds of authors causes infinite loops and browser crashes
// Use PROFILE_FETCH_RELAY_URLS + FAST_READ_RELAY_URLS for profiles, or FAST_READ_RELAY_URLS for other kinds
const relayUrls = kind === kinds.Metadata
? Array.from(new Set([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]))
: [...FAST_READ_RELAY_URLS]
logger.info('[ReplaceableEventService] Using comprehensive relay list', {
pubkeyCount: pubkeys.length,
totalRelayCount: relayUrls.length,
@ -476,25 +461,35 @@ export class ReplaceableEventService { @@ -476,25 +461,35 @@ export class ReplaceableEventService {
const existing = eventsMap.get(key)
if (!existing || existing.created_at < event.created_at) {
eventsMap.set(key, event)
// Update results array for this event
const itemIndex = missingItems.findIndex(item => item.pubkey === event.pubkey)
if (itemIndex >= 0) {
const paramIndex = missingItems[itemIndex]!.index
results[paramIndex] = event
}
}
}
})
)
const results = params.map(({ pubkey, kind }) => {
const key = `${pubkey}:${kind}`
const event = eventsMap.get(key)
if (event) {
indexedDb.putReplaceableEvent(event)
return event
} else {
indexedDb.putNullReplaceableEvent(pubkey, kind)
return null
}
})
// Step 3: Save network-fetched events to IndexedDB and mark missing ones as null
await Promise.allSettled(
missingParams.map(async ({ pubkey, kind }) => {
const key = `${pubkey}:${kind}`
const event = eventsMap.get(key)
if (event) {
await indexedDb.putReplaceableEvent(event)
} else {
await indexedDb.putNullReplaceableEvent(pubkey, kind)
}
})
)
logger.info('[ReplaceableEventService] Batch load function completed', {
paramCount: params.length,
foundCount: results.filter(r => r !== null).length
foundCount: results.filter(r => r !== null).length,
indexedDbCount: params.length - missingParams.length,
networkCount: missingParams.length
})
return results
}
@ -613,23 +608,48 @@ export class ReplaceableEventService { @@ -613,23 +608,48 @@ export class ReplaceableEventService {
throw new Error('Invalid id')
}
logger.info('[ReplaceableEventService] Fetching author relay list', {
// CRITICAL: Always use relay hints from bech32 addresses (nprofile, naddr, nevent) when available
// Relay hints should have highest priority and always be included
const relayHints = relays.length > 0 ? [...relays] : []
// Step 1: Try with relay hints + default relays first (checks IndexedDB via DataLoader, then network)
// Always include relay hints if provided, then add default profile fetch relays
const defaultRelays = relayHints.length > 0
? [...new Set([...relayHints, ...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS])]
: [...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]
logger.info('[ReplaceableEventService] Step 1: Trying with relay hints + default relays (checks cache first)', {
pubkey,
relayHintCount: relayHints.length,
totalRelayCount: defaultRelays.length,
hasRelayHints: relayHints.length > 0
})
// fetchReplaceableEvent uses DataLoader which checks IndexedDB first, then queries relays
const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, defaultRelays)
if (profileEvent) {
logger.info('[ReplaceableEventService] Profile found with relay hints + default relays', {
pubkey,
eventId: profileEvent.id
})
await this.indexProfile(profileEvent)
return profileEvent
}
// Step 2: Not found in cache or default relays - fetch author's relay list as fallback
logger.info('[ReplaceableEventService] Step 2: Profile not found, fetching author relay list as fallback', {
pubkey
})
// For profiles: get author's relay list (from cache if available) and use those relays
// Profiles are often on the same relays where the author publishes their events
let authorRelayList: { read?: string[]; write?: string[] } | null = null
try {
const relayListStartTime = Date.now()
logger.info('[ReplaceableEventService] About to call client.fetchRelayList', {
pubkey
})
// Add timeout to prevent hanging - 2 seconds max
const relayListPromise = client.fetchRelayList(pubkey)
const timeoutPromise = new Promise<null>((resolve) => {
setTimeout(() => {
logger.warn('[ReplaceableEventService] fetchRelayList timeout, continuing without author relays', {
logger.warn('[ReplaceableEventService] fetchRelayList timeout, giving up', {
pubkey
})
resolve(null)
@ -637,83 +657,63 @@ export class ReplaceableEventService { @@ -637,83 +657,63 @@ export class ReplaceableEventService {
})
authorRelayList = await Promise.race([relayListPromise, timeoutPromise])
const relayListTime = Date.now() - relayListStartTime
logger.info('[ReplaceableEventService] client.fetchRelayList returned', {
logger.info('[ReplaceableEventService] Author relay list fetched', {
pubkey,
hasRelayList: !!authorRelayList,
fetchTime: `${relayListTime}ms`
})
// Use author's outboxes (write relays) and inboxes (read relays) - profiles are often there
if (authorRelayList) {
const authorRelays = [
...(authorRelayList.write || []).slice(0, 10),
...(authorRelayList.read || []).slice(0, 10)
]
relays = [...new Set([...relays, ...authorRelays])]
logger.info('[ReplaceableEventService] Using author relay list for profile fetch', {
pubkey,
authorRelayCount: authorRelays.length,
totalRelayCount: relays.length,
fetchTime: `${relayListTime}ms`,
writeRelays: authorRelayList.write?.slice(0, 3) || [],
readRelays: authorRelayList.read?.slice(0, 3) || []
})
} else {
logger.info('[ReplaceableEventService] No author relay list available, using default relays', {
pubkey,
existingRelayCount: relays.length
})
}
} catch (error) {
logger.error('[ReplaceableEventService] Failed to fetch author relay list for profile', {
logger.error('[ReplaceableEventService] Failed to fetch author relay list', {
pubkey,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined
error: error instanceof Error ? error.message : String(error)
})
}
// Use fetchReplaceableEvent which checks IndexedDB then network
logger.info('[ReplaceableEventService] Calling fetchReplaceableEvent', {
pubkey,
relayCount: relays.length
})
const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, relays)
logger.info('[ReplaceableEventService] fetchReplaceableEvent returned', {
pubkey,
hasEvent: !!profileEvent,
eventId: profileEvent?.id
})
if (profileEvent) {
await this.indexProfile(profileEvent)
return profileEvent
}
if (!relays.length) {
return undefined
}
// Try harder with specified relays
const events = await this.queryService.query(
relays,
{
authors: [pubkey],
kinds: [kinds.Metadata],
limit: 1
},
undefined,
{
replaceableRace: true,
eoseTimeout: 200,
globalTimeout: 3000
// Step 3: Try with relay hints + author's relays if we got them
// CRITICAL: Always include relay hints first (highest priority), then author relays, then defaults
if (authorRelayList) {
const authorRelays = [
...(authorRelayList.write || []).slice(0, 10),
...(authorRelayList.read || []).slice(0, 10)
]
// Relay hints first (highest priority), then author relays, then defaults
const allRelays = [...new Set([
...relayHints, // Relay hints from bech32 (highest priority)
...authorRelays, // Author's relays
...PROFILE_FETCH_RELAY_URLS, // Default profile relays
...FAST_READ_RELAY_URLS // Fast read relays
])]
logger.info('[ReplaceableEventService] Step 3: Trying with relay hints + author relays', {
pubkey,
relayHintCount: relayHints.length,
authorRelayCount: authorRelays.length,
totalRelayCount: allRelays.length
})
// Use fetchReplaceableEvent with relay hints + author's relays
const profileEventFromAuthorRelays = await this.fetchReplaceableEvent(
pubkey,
kinds.Metadata,
undefined,
allRelays
)
if (profileEventFromAuthorRelays) {
logger.info('[ReplaceableEventService] Profile found with relay hints + author relays', {
pubkey,
eventId: profileEventFromAuthorRelays.id
})
await this.indexProfile(profileEventFromAuthorRelays)
return profileEventFromAuthorRelays
}
)
const profileEventFromRelays = events[0]
if (profileEventFromRelays) {
await this.indexProfile(profileEventFromRelays)
await indexedDb.putReplaceableEvent(profileEventFromRelays)
}
return profileEventFromRelays
logger.warn('[ReplaceableEventService] Profile not found after trying all relays', {
pubkey,
triedRelayHints: relayHints.length > 0
})
return undefined
}
/**

46
src/services/navigation-event-store.ts

@ -0,0 +1,46 @@ @@ -0,0 +1,46 @@
/**
* Navigation Event Store
* Temporarily stores events when navigating to avoid re-fetching
*/
import { Event } from 'nostr-tools'
class NavigationEventStore {
private eventMap = new Map<string, Event>()
/**
* Store an event for navigation (keyed by event ID)
*/
setEvent(event: Event): void {
this.eventMap.set(event.id, event)
// Also store by bech32 ID if available (for naddr/nevent)
// This will be handled by the navigation system
}
/**
* Get an event by ID (removes it after retrieval to prevent memory leaks)
*/
getEvent(eventId: string): Event | undefined {
const event = this.eventMap.get(eventId)
if (event) {
// Remove after retrieval to prevent memory leaks
this.eventMap.delete(eventId)
}
return event
}
/**
* Check if an event exists without removing it
*/
hasEvent(eventId: string): boolean {
return this.eventMap.has(eventId)
}
/**
* Clear all stored events (cleanup)
*/
clear(): void {
this.eventMap.clear()
}
}
export const navigationEventStore = new NavigationEventStore()
Loading…
Cancel
Save