Browse Source

speed up spells

imwald
Silberengel 1 month ago
parent
commit
45c61bacd0
  1. 4
      package-lock.json
  2. 2
      package.json
  3. 48
      src/components/Embedded/EmbeddedNote.tsx
  4. 45
      src/components/Embedded/embeddedNotePointer.ts
  5. 1
      src/components/Embedded/index.tsx
  6. 396
      src/components/NoteList/index.tsx
  7. 3
      src/constants.ts
  8. 25
      src/hooks/useFetchProfile.tsx
  9. 115
      src/pages/primary/SpellsPage/fauxSpellFeeds.ts
  10. 234
      src/pages/primary/SpellsPage/index.tsx
  11. 14
      src/providers/NoteFeedProfileContext.tsx
  12. 209
      src/services/client.service.ts
  13. 6
      src/services/spell.service.ts

4
package-lock.json generated

@ -1,12 +1,12 @@
{ {
"name": "jumble-imwald", "name": "jumble-imwald",
"version": "19.1.0", "version": "19.1.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "jumble-imwald", "name": "jumble-imwald",
"version": "19.1.0", "version": "19.1.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@asciidoctor/core": "^3.0.4", "@asciidoctor/core": "^3.0.4",

2
package.json

@ -1,6 +1,6 @@
{ {
"name": "jumble-imwald", "name": "jumble-imwald",
"version": "19.1.0", "version": "19.1.1",
"description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble", "description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble",
"private": true, "private": true,
"type": "module", "type": "module",

48
src/components/Embedded/EmbeddedNote.tsx

@ -19,6 +19,10 @@ import { extractBookMetadata } from '@/lib/bookstr-parser'
import { contentParserService } from '@/services/content-parser.service' import { contentParserService } from '@/services/content-parser.service'
import { useSmartNoteNavigation } from '@/PageManager' import { useSmartNoteNavigation } from '@/PageManager'
import { toNote } from '@/lib/link' import { toNote } from '@/lib/link'
import {
type EmbeddedNoteIdValidation,
validateEmbeddedNotePointer
} from './embeddedNotePointer'
/** Embedded `noteId` is often raw hex from parsers; must accept A–F and normalize for REQ `ids`. */ /** Embedded `noteId` is often raw hex from parsers; must accept A–F and normalize for REQ `ids`. */
function hexEventIdFromNoteId(noteId: string): string | null { function hexEventIdFromNoteId(noteId: string): string | null {
@ -46,50 +50,6 @@ function canSearchOnExternalRelays(noteId: string): boolean {
} }
} }
export type EmbeddedNoteIdValidation =
| { valid: true }
| {
valid: false
reason: 'empty' | 'invalid_hex' | 'invalid_bech32' | 'wrong_nip19_type'
decodedType?: string
}
/**
* Only hex (64), note1, nevent1, and naddr1 are valid embedded note targets.
* Malformed bech32, wrong kinds (npub, ), or bad hex length fail before fetch/search UI.
*/
export function validateEmbeddedNotePointer(noteId: string): EmbeddedNoteIdValidation {
const s = noteId.trim()
if (!s) return { valid: false, reason: 'empty' }
if (/^[0-9a-f]{64}$/i.test(s)) return { valid: true }
if (/^[0-9a-f]+$/i.test(s)) {
return { valid: false, reason: 'invalid_hex' }
}
const looksLikeNostrBech32 =
s.startsWith('n') && s.includes('1') && /^[a-z0-9]+$/i.test(s) && s.length >= 10
if (looksLikeNostrBech32) {
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
export function EmbeddedNote({ export function EmbeddedNote({
noteId, noteId,
className, className,

45
src/components/Embedded/embeddedNotePointer.ts

@ -0,0 +1,45 @@
import { nip19 } from 'nostr-tools'
export type EmbeddedNoteIdValidation =
| { valid: true }
| {
valid: false
reason: 'empty' | 'invalid_hex' | 'invalid_bech32' | 'wrong_nip19_type'
decodedType?: string
}
/**
* Only hex (64), note1, nevent1, and naddr1 are valid embedded note targets.
* Malformed bech32, wrong kinds (npub, ), or bad hex length fail before fetch/search UI.
*/
export function validateEmbeddedNotePointer(noteId: string): EmbeddedNoteIdValidation {
const s = noteId.trim()
if (!s) return { valid: false, reason: 'empty' }
if (/^[0-9a-f]{64}$/i.test(s)) return { valid: true }
if (/^[0-9a-f]+$/i.test(s)) {
return { valid: false, reason: 'invalid_hex' }
}
const looksLikeNostrBech32 =
s.startsWith('n') && s.includes('1') && /^[a-z0-9]+$/i.test(s) && s.length >= 10
if (looksLikeNostrBech32) {
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}

1
src/components/Embedded/index.tsx

@ -4,4 +4,5 @@ export * from './EmbeddedLNInvoice'
export * from './EmbeddedMention' export * from './EmbeddedMention'
export * from './EmbeddedNormalUrl' export * from './EmbeddedNormalUrl'
export * from './EmbeddedNote' export * from './EmbeddedNote'
export * from './embeddedNotePointer'
export * from './EmbeddedWebsocketUrl' export * from './EmbeddedWebsocketUrl'

396
src/components/NoteList/index.tsx

@ -1,6 +1,6 @@
import NewNotesButton from '@/components/NewNotesButton' import NewNotesButton from '@/components/NewNotesButton'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { ExtendedKind } from '@/constants' import { ExtendedKind, FIRST_RELAY_RESULT_GRACE_MS } from '@/constants'
import { import {
getEmbeddedNoteBech32Ids, getEmbeddedNoteBech32Ids,
getReplaceableCoordinateFromEvent, getReplaceableCoordinateFromEvent,
@ -29,17 +29,23 @@ import {
useCallback, useCallback,
useEffect, useEffect,
useImperativeHandle, useImperativeHandle,
useLayoutEffect,
useMemo, useMemo,
useRef, useRef,
useState useState
} from 'react' } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import PullToRefresh from 'react-simple-pull-to-refresh' import PullToRefresh from 'react-simple-pull-to-refresh'
import { formatPubkey, pubkeyToNpub } from '@/lib/pubkey'
import { NoteFeedProfileContext, type NoteFeedProfileContextValue } from '@/providers/NoteFeedProfileContext'
import type { TProfile } from '@/types'
import NoteCard, { NoteCardLoadingSkeleton } from '../NoteCard' import NoteCard, { NoteCardLoadingSkeleton } from '../NoteCard'
const LIMIT = 500 // Increased from 200 to load more events per request const LIMIT = 500 // Increased from 200 to load more events per request
const ALGO_LIMIT = 1000 // Increased from 500 for algorithm feeds const ALGO_LIMIT = 1000 // Increased from 500 for algorithm feeds
const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing scroll load frequency const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing scroll load frequency
const FEED_PROFILE_BATCH_DEBOUNCE_MS = 120
const FEED_PROFILE_CHUNK = 36
const NoteList = forwardRef( const NoteList = forwardRef(
( (
@ -62,7 +68,17 @@ const NoteList = forwardRef(
* When true, hydrate the list from the client timeline cache (IndexedDB-backed) before/at same time as * When true, hydrate the list from the client timeline cache (IndexedDB-backed) before/at same time as
* live REQ, so feeds feel instant on repeat visits. Spells faux feeds use this; home feed stays false. * live REQ, so feeds feel instant on repeat visits. Spells faux feeds use this; home feed stays false.
*/ */
useTimelineCacheBootstrap = false useTimelineCacheBootstrap = false,
/**
* When set (Spells page), passed to `subscribeTimeline` as `firstRelayResultGraceMs` only ms to wait after
* the first live event before treating initial load as EOSE. Subscribe setup and loading fallback keep
* longer defaults so multi-relay spell feeds do not race-fail and stay blank after refresh.
*/
spellFetchTimeoutMs,
/** Spells page: bumps when user picks a feed; used with {@link onSpellFeedFirstPaint}. */
spellFeedInstrumentToken,
/** Spells page: fired once when the filtered list first has rows after a picker change. */
onSpellFeedFirstPaint
}: { }: {
subRequests: TFeedSubRequest[] subRequests: TFeedSubRequest[]
showKinds: number[] showKinds: number[]
@ -80,6 +96,9 @@ const NoteList = forwardRef(
extraShouldHideEvent?: (evt: Event) => boolean extraShouldHideEvent?: (evt: Event) => boolean
feedSubscriptionKey?: string feedSubscriptionKey?: string
useTimelineCacheBootstrap?: boolean useTimelineCacheBootstrap?: boolean
spellFetchTimeoutMs?: number
spellFeedInstrumentToken?: number
onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void
}, },
ref ref
) => { ) => {
@ -91,6 +110,7 @@ const NoteList = forwardRef(
const { isEventDeleted } = useDeletedEvent() const { isEventDeleted } = useDeletedEvent()
const { zapReplyThreshold } = useZap() const { zapReplyThreshold } = useZap()
const [events, setEvents] = useState<Event[]>([]) const [events, setEvents] = useState<Event[]>([])
const eventsRef = useRef<Event[]>([])
const [newEvents, setNewEvents] = useState<Event[]>([]) const [newEvents, setNewEvents] = useState<Event[]>([])
const [hasMore, setHasMore] = useState<boolean>(true) const [hasMore, setHasMore] = useState<boolean>(true)
const [loading, setLoading] = useState(true) const [loading, setLoading] = useState(true)
@ -100,8 +120,29 @@ const NoteList = forwardRef(
const supportTouch = useMemo(() => isTouchDevice(), []) const supportTouch = useMemo(() => isTouchDevice(), [])
const bottomRef = useRef<HTMLDivElement | null>(null) const bottomRef = useRef<HTMLDivElement | null>(null)
const topRef = useRef<HTMLDivElement | null>(null) const topRef = useRef<HTMLDivElement | null>(null)
const spellFeedFirstPaintLoggedKeyRef = useRef('')
const consecutiveEmptyRef = useRef(0) // Track consecutive empty results to prevent infinite retries const consecutiveEmptyRef = useRef(0) // Track consecutive empty results to prevent infinite retries
const loadMoreTimeoutRef = useRef<NodeJS.Timeout | null>(null) // Throttle loadMore calls to prevent stuttering const loadMoreTimeoutRef = useRef<NodeJS.Timeout | null>(null) // Throttle loadMore calls to prevent stuttering
/** Batched profile + embed prefetch after timeline updates (avoids N×9s profile storms while relays stream). */
const timelinePrefetchDebounceRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const lastEventsForTimelinePrefetchRef = useRef<Event[]>([])
const [feedProfileBatch, setFeedProfileBatch] = useState<{
profiles: Map<string, TProfile>
pending: Set<string>
version: number
}>(() => ({ profiles: new Map(), pending: new Set(), version: 0 }))
const feedProfileLoadedRef = useRef<Set<string>>(new Set())
const feedProfileBatchGenRef = useRef(0)
const noteFeedProfileContextValue = useMemo<NoteFeedProfileContextValue>(
() => ({
profiles: feedProfileBatch.profiles,
pendingPubkeys: feedProfileBatch.pending,
version: feedProfileBatch.version
}),
[feedProfileBatch]
)
// Memoize subRequests serialization to avoid expensive JSON.stringify on every render // Memoize subRequests serialization to avoid expensive JSON.stringify on every render
const subRequestsKey = useMemo(() => { const subRequestsKey = useMemo(() => {
@ -115,6 +156,12 @@ const NoteList = forwardRef(
const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey
useEffect(() => {
feedProfileBatchGenRef.current += 1
feedProfileLoadedRef.current.clear()
setFeedProfileBatch({ profiles: new Map(), pending: new Set(), version: 0 })
}, [timelineSubscriptionKey, refreshCount])
const subRequestsRef = useRef(subRequests) const subRequestsRef = useRef(subRequests)
subRequestsRef.current = subRequests subRequestsRef.current = subRequests
@ -232,6 +279,91 @@ const NoteList = forwardRef(
}) })
}, [newEvents, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111]) }, [newEvents, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111])
useLayoutEffect(() => {
if (!onSpellFeedFirstPaint || spellFeedInstrumentToken === undefined) return
if (filteredEvents.length === 0) return
const first = filteredEvents[0]
if (!first) return
const fpKey = `${spellFeedInstrumentToken}|${timelineSubscriptionKey ?? ''}`
if (spellFeedFirstPaintLoggedKeyRef.current === fpKey) return
spellFeedFirstPaintLoggedKeyRef.current = fpKey
onSpellFeedFirstPaint({
eventCount: filteredEvents.length,
firstEventId: first.id
})
}, [
onSpellFeedFirstPaint,
spellFeedInstrumentToken,
timelineSubscriptionKey,
filteredEvents.length,
filteredEvents[0]?.id
])
useEffect(() => {
const handle = window.setTimeout(() => {
const gen = feedProfileBatchGenRef.current
const candidates = new Set<string>()
const addPk = (p: string | undefined) => {
if (p && p.length === 64 && /^[0-9a-f]{64}$/.test(p)) {
candidates.add(p)
}
}
filteredEvents.slice(0, 50).forEach((e) => addPk(e.pubkey))
events.slice(0, 120).forEach((e) => addPk(e.pubkey))
events.slice(showCount, showCount + 60).forEach((e) => addPk(e.pubkey))
const need = [...candidates].filter((pk) => !feedProfileLoadedRef.current.has(pk))
if (need.length === 0) return
need.forEach((pk) => feedProfileLoadedRef.current.add(pk))
setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending)
need.forEach((pk) => pending.add(pk))
return { ...prev, pending, version: prev.version + 1 }
})
void (async () => {
for (let i = 0; i < need.length; i += FEED_PROFILE_CHUNK) {
if (gen !== feedProfileBatchGenRef.current) return
const chunk = need.slice(i, i + FEED_PROFILE_CHUNK)
try {
const profiles = await client.fetchProfilesForPubkeys(chunk)
if (gen !== feedProfileBatchGenRef.current) return
setFeedProfileBatch((prev) => {
const next = new Map(prev.profiles)
const pend = new Set(prev.pending)
for (const p of profiles) {
next.set(p.pubkey, p)
pend.delete(p.pubkey)
}
for (const pk of chunk) {
pend.delete(pk)
if (!next.has(pk)) {
next.set(pk, {
pubkey: pk,
npub: pubkeyToNpub(pk) ?? '',
username: formatPubkey(pk)
})
}
}
return { profiles: next, pending: pend, version: prev.version + 1 }
})
} catch {
chunk.forEach((pk) => feedProfileLoadedRef.current.delete(pk))
if (gen !== feedProfileBatchGenRef.current) return
setFeedProfileBatch((prev) => {
const pend = new Set(prev.pending)
chunk.forEach((pk) => pend.delete(pk))
return { ...prev, pending: pend, version: prev.version + 1 }
})
}
}
})()
}, FEED_PROFILE_BATCH_DEBOUNCE_MS)
return () => window.clearTimeout(handle)
}, [filteredEvents, events, showCount])
const scrollToTop = (behavior: ScrollBehavior = 'instant') => { const scrollToTop = (behavior: ScrollBehavior = 'instant') => {
setTimeout(() => { setTimeout(() => {
topRef.current?.scrollIntoView({ behavior, block: 'start' }) topRef.current?.scrollIntoView({ behavior, block: 'start' })
@ -308,13 +440,17 @@ const NoteList = forwardRef(
| undefined | undefined
try { try {
// Add timeout wrapper to prevent subscribeTimeline from hanging indefinitely // Opening subs + IndexedDB timeline hydration can exceed 2s on spell feeds with many relays; a short race
// rejects, the catch closes the late subscription, and the list stays empty after refresh.
const subscribeSetupRaceMs = 5000
const timeoutPromise = new Promise<never>((_, reject) => { const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => { setTimeout(() => {
reject(new Error('subscribeTimeline timeout after 5 seconds')) reject(new Error(`subscribeTimeline timeout after ${subscribeSetupRaceMs}ms`))
}, 5000) // 5 second timeout }, subscribeSetupRaceMs)
}) })
const firstRelayGraceMs = spellFetchTimeoutMs ?? FIRST_RELAY_RESULT_GRACE_MS
timelineSubscribePromise = client.subscribeTimeline( timelineSubscribePromise = client.subscribeTimeline(
mappedSubRequests, mappedSubRequests,
{ {
@ -325,51 +461,32 @@ const NoteList = forwardRef(
// Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+ // Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
setLoading(false) setLoading(false)
// CRITICAL: Prefetch profiles for initial events (optimized for faster initial load) // Defer profile + embed prefetch: streaming timelines fire onEvents often; starting
// Only prefetch for first 50 events to reduce initial load time // fetchProfilesForPubkeys on every update spams relays (multi-second each) and cancels hooks.
// Additional prefetching happens on scroll via the useEffect hooks lastEventsForTimelinePrefetchRef.current = events
const initialPubkeys = Array.from( if (timelinePrefetchDebounceRef.current) {
new Set(events.slice(0, 50).map((ev: Event) => ev.pubkey).filter((p: string) => p?.length === 64)) clearTimeout(timelinePrefetchDebounceRef.current)
)
if (initialPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = initialPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking) with delay to not block initial render
setTimeout(() => {
if (!effectActive) return
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}, 100)
}
} }
timelinePrefetchDebounceRef.current = setTimeout(() => {
// CRITICAL: Prefetch embedded events for initial events (reduced scope) timelinePrefetchDebounceRef.current = null
// Only prefetch for first 50 events to reduce initial load time if (!effectActive) return
const initialEmbeddedEventIds = new Set<string>() const evs = lastEventsForTimelinePrefetchRef.current
events.slice(0, 50).forEach((ev: Event) => { if (evs.length === 0) return
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id: string) => initialEmbeddedEventIds.add(id)) const initialEmbeddedEventIds = new Set<string>()
}) evs.slice(0, 50).forEach((ev: Event) => {
const eventIdsToFetch = Array.from(initialEmbeddedEventIds).filter( extractEmbeddedEventIds(ev).forEach((id: string) => initialEmbeddedEventIds.add(id))
(id) => !prefetchedEventIdsRef.current.has(id) })
) const eventIdsToFetch = Array.from(initialEmbeddedEventIds).filter(
if (eventIdsToFetch.length > 0) { (id) => !prefetchedEventIdsRef.current.has(id)
// Mark as prefetched immediately to prevent duplicate requests )
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id)) if (eventIdsToFetch.length > 0) {
// Batch fetch embedded events in background (non-blocking) with delay eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
setTimeout(() => {
if (!effectActive) return
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => { Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id)) eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
}) })
}, 200) }
} }, 450)
} else if (eosed) { } else if (eosed) {
// No events received but EOSE - set empty events array and stop loading // No events received but EOSE - set empty events array and stop loading
setEvents([]) setEvents([])
@ -422,7 +539,8 @@ const NoteList = forwardRef(
startLogin, startLogin,
needSort: !areAlgoRelays, needSort: !areAlgoRelays,
useCache: useTimelineCacheBootstrap, useCache: useTimelineCacheBootstrap,
omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap,
firstRelayResultGraceMs: firstRelayGraceMs
} }
) )
@ -452,6 +570,10 @@ const NoteList = forwardRef(
const promise = init() const promise = init()
return () => { return () => {
effectActive = false effectActive = false
if (timelinePrefetchDebounceRef.current) {
clearTimeout(timelinePrefetchDebounceRef.current)
timelinePrefetchDebounceRef.current = null
}
promise.then((closer) => closer?.()) promise.then((closer) => closer?.())
} }
}, [ }, [
@ -463,15 +585,25 @@ const NoteList = forwardRef(
showKind1111, showKind1111,
useFilterAsIs, useFilterAsIs,
areAlgoRelays, areAlgoRelays,
useTimelineCacheBootstrap useTimelineCacheBootstrap,
spellFetchTimeoutMs
]) ])
useEffect(() => {
eventsRef.current = events
}, [events])
useEffect(() => { useEffect(() => {
if (!subRequestsRef.current.length) return if (!subRequestsRef.current.length) return
let cancelled = false let cancelled = false
const timer = window.setTimeout(() => { const timer = window.setTimeout(() => {
if (cancelled) return if (cancelled) return
setLoading((prev) => (prev ? false : prev)) setLoading((prev) => (prev ? false : prev))
// hasMore defaults true; if timeline never sends eosed (slow/hung relays), we would keep a
// bottom skeleton forever while loading is false — unblock empty state / reload.
if (eventsRef.current.length === 0) {
setHasMore(false)
}
}, 15_000) }, 15_000)
return () => { return () => {
cancelled = true cancelled = true
@ -480,16 +612,11 @@ const NoteList = forwardRef(
}, [timelineSubscriptionKey, refreshCount]) }, [timelineSubscriptionKey, refreshCount])
// Use refs to avoid dependency issues and ensure latest values in async callbacks // Use refs to avoid dependency issues and ensure latest values in async callbacks
const eventsRef = useRef(events)
const showCountRef = useRef(showCount) const showCountRef = useRef(showCount)
const loadingRef = useRef(loading) const loadingRef = useRef(loading)
const hasMoreRef = useRef(hasMore) const hasMoreRef = useRef(hasMore)
const timelineKeyRef = useRef(timelineKey) const timelineKeyRef = useRef(timelineKey)
useEffect(() => {
eventsRef.current = events
}, [events])
useEffect(() => { useEffect(() => {
showCountRef.current = showCount showCountRef.current = showCount
}, [showCount]) }, [showCount])
@ -639,23 +766,6 @@ const NoteList = forwardRef(
} }
schedulePrefetch(() => { schedulePrefetch(() => {
const newPubkeys = Array.from(
new Set(newEvents.map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = newPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking)
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events (throttled) // CRITICAL: Prefetch embedded events for newly loaded events (throttled)
const newEmbeddedEventIds = new Set<string>() const newEmbeddedEventIds = new Set<string>()
// Only prefetch for first 30 events to reduce load // Only prefetch for first 30 events to reduce load
@ -719,12 +829,6 @@ const NoteList = forwardRef(
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []) }, [])
// CRITICAL: Prefetch profiles for visible authors + upcoming events in one batched request
// This prevents browser crashes during rapid scrolling by pre-loading profiles before they're needed
const visiblePubkeysRef = useRef<Set<string>>(new Set())
const prefetchedPubkeysRef = useRef<Set<string>>(new Set())
const prefetchTimeoutRef = useRef<NodeJS.Timeout | null>(null)
// CRITICAL: Prefetch embedded events (referenced in e tags, a tags, and content) // CRITICAL: Prefetch embedded events (referenced in e tags, a tags, and content)
// This ensures embedded events are ready before user scrolls to them // This ensures embedded events are ready before user scrolls to them
const prefetchedEventIdsRef = useRef<Set<string>>(new Set()) const prefetchedEventIdsRef = useRef<Set<string>>(new Set())
@ -767,76 +871,6 @@ const NoteList = forwardRef(
return Array.from(new Set(eventIds)) // Deduplicate return Array.from(new Set(eventIds)) // Deduplicate
}, []) }, [])
useEffect(() => {
// Throttle profile prefetching to reduce frequency during rapid scrolling
// Clear any existing timeout
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
}
// Debounce profile prefetching by 300ms to reduce frequency during rapid scrolling
prefetchTimeoutRef.current = setTimeout(() => {
// Prefetch profiles for:
// 1. Currently visible events (first 40, reduced to reduce stuttering)
// 2. Upcoming events that will be visible when scrolling (next 80, reduced to reduce load)
// This ensures profiles are ready before they're needed during rapid scrolling
const visiblePubkeys = Array.from(
new Set(filteredEvents.slice(0, 40).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
const upcomingPubkeys = Array.from(
new Set(events.slice(0, 80).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
// Combine visible and upcoming, but prioritize visible ones
const allPubkeys = Array.from(new Set([...visiblePubkeys, ...upcomingPubkeys]))
if (allPubkeys.length === 0) return
// Check if we've already prefetched these exact pubkeys
const prev = visiblePubkeysRef.current
const same = allPubkeys.length === prev.size && allPubkeys.every((p) => prev.has(p))
if (same) return
// Find pubkeys that haven't been prefetched yet
const newPubkeys = allPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length === 0) {
// All pubkeys already prefetched, just update the ref
visiblePubkeysRef.current = new Set(allPubkeys)
return
}
// Update refs
visiblePubkeysRef.current = new Set(allPubkeys)
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch profiles for new pubkeys (IndexedDB + network in one request)
// This is the key optimization: batch processing prevents individual fetches during scrolling
// Use requestIdleCallback if available to avoid blocking scroll
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
} else {
setTimeout(callback, 0)
}
}
scheduleFetch(() => {
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
})
}, 300) // Debounce by 300ms to reduce frequency during rapid scrolling
return () => {
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
prefetchTimeoutRef.current = null
}
}
}, [filteredEvents, events, extractEmbeddedEventIds])
// CRITICAL: Prefetch embedded events for visible events // CRITICAL: Prefetch embedded events for visible events
useEffect(() => { useEffect(() => {
// Throttle embedded event prefetching to reduce frequency during rapid scrolling // Throttle embedded event prefetching to reduce frequency during rapid scrolling
@ -915,40 +949,8 @@ const NoteList = forwardRef(
clearTimeout(prefetchNewEventsTimeoutRef.current) clearTimeout(prefetchNewEventsTimeoutRef.current)
} }
// Debounce profile prefetching for newly loaded events (optimized to reduce stuttering) // Debounce embedded-event prefetch for newly revealed rows (profiles use NoteFeed batcher above)
prefetchNewEventsTimeoutRef.current = setTimeout(() => { prefetchNewEventsTimeoutRef.current = setTimeout(() => {
// When we have more events loaded, prefetch profiles for the newly loaded ones
// Reduced to 50 to reduce batch size and prevent stuttering
const newlyLoadedPubkeys = Array.from(
new Set(events.slice(showCount, showCount + 50).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newlyLoadedPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const newPubkeys = newlyLoadedPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking) using requestIdleCallback
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
} else {
setTimeout(callback, 0)
}
}
scheduleFetch(() => {
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events (reduced scope) // CRITICAL: Prefetch embedded events for newly loaded events (reduced scope)
const newlyLoadedEmbeddedEventIds = new Set<string>() const newlyLoadedEmbeddedEventIds = new Set<string>()
events.slice(showCount, showCount + 50).forEach((ev) => { events.slice(showCount, showCount + 50).forEach((ev) => {
@ -1005,11 +1007,15 @@ const NoteList = forwardRef(
filterMutedNotes={filterMutedNotes} filterMutedNotes={filterMutedNotes}
/> />
))} ))}
{hasMore || loading ? ( {events.length === 0 && loading ? (
<div ref={bottomRef}>
<NoteCardLoadingSkeleton />
</div>
) : events.length > 0 && (hasMore || loading) ? (
<div ref={bottomRef}> <div ref={bottomRef}>
<NoteCardLoadingSkeleton /> <NoteCardLoadingSkeleton />
</div> </div>
) : events.length ? ( ) : events.length > 0 ? (
<div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div> <div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div>
) : ( ) : (
<div className="flex justify-center w-full mt-2"> <div className="flex justify-center w-full mt-2">
@ -1024,19 +1030,21 @@ const NoteList = forwardRef(
return ( return (
<div> <div>
<div ref={topRef} className="scroll-mt-[calc(6rem+1px)]" /> <div ref={topRef} className="scroll-mt-[calc(6rem+1px)]" />
{supportTouch ? ( <NoteFeedProfileContext.Provider value={noteFeedProfileContextValue}>
<PullToRefresh {supportTouch ? (
onRefresh={async () => { <PullToRefresh
refresh() onRefresh={async () => {
await new Promise((resolve) => setTimeout(resolve, 1000)) refresh()
}} await new Promise((resolve) => setTimeout(resolve, 1000))
pullingContent="" }}
> pullingContent=""
{list} >
</PullToRefresh> {list}
) : ( </PullToRefresh>
list ) : (
)} list
)}
</NoteFeedProfileContext.Provider>
<div className="h-40" /> <div className="h-40" />
{filteredNewEvents.length > 0 && ( {filteredNewEvents.length > 0 && (
<NewNotesButton newEvents={filteredNewEvents} onClick={showNewEvents} /> <NewNotesButton newEvents={filteredNewEvents} onClick={showNewEvents} />

3
src/constants.ts

@ -17,6 +17,9 @@ export const DEFAULT_FAVORITE_RELAYS = [
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */ /** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000 export const FIRST_RELAY_RESULT_GRACE_MS = 2000
/** Spells page feeds: shorter grace so multi-relay spell REQs finalize initial load sooner (still keeps subscription open for `onNew`). */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = 450
/** /**
* Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among * Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among
* filters is at least this value. Omitting `limit` counts as 0 (no implicit grace). * filters is at least this value. Omitting `limit` counts as 0 (no implicit grace).

25
src/hooks/useFetchProfile.tsx

@ -2,6 +2,7 @@ import { PROFILE_FETCH_PROMISE_TIMEOUT_MS } from '@/constants'
import { getProfileFromEvent } from '@/lib/event-metadata' import { getProfileFromEvent } from '@/lib/event-metadata'
import { userIdToPubkey } from '@/lib/pubkey' import { userIdToPubkey } from '@/lib/pubkey'
import { useNostr } from '@/providers/NostrProvider' import { useNostr } from '@/providers/NostrProvider'
import { useNoteFeedProfileContext } from '@/providers/NoteFeedProfileContext'
import { replaceableEventService } from '@/services/client.service' import { replaceableEventService } from '@/services/client.service'
import { TProfile } from '@/types' import { TProfile } from '@/types'
import { useEffect, useState, useRef, useCallback } from 'react' import { useEffect, useState, useRef, useCallback } from 'react'
@ -24,6 +25,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
// }) // })
const { profile: currentAccountProfile } = useNostr() const { profile: currentAccountProfile } = useNostr()
const noteFeed = useNoteFeedProfileContext()
const [isFetching, setIsFetching] = useState(true) const [isFetching, setIsFetching] = useState(true)
const [error, setError] = useState<Error | null>(null) const [error, setError] = useState<Error | null>(null)
const [profile, setProfile] = useState<TProfile | null>(null) const [profile, setProfile] = useState<TProfile | null>(null)
@ -285,6 +287,27 @@ export function useFetchProfile(id?: string, skipCache = false) {
// Extract pubkey early to check if id has changed // Extract pubkey early to check if id has changed
const extractedPubkey = userIdToPubkey(id) const extractedPubkey = userIdToPubkey(id)
// Note feeds: profiles are batch-fetched in NoteList — skip per-row relay storms while pending
if (extractedPubkey && noteFeed && !skipCache) {
const fromBatch = noteFeed.profiles.get(extractedPubkey)
if (fromBatch) {
setProfile(fromBatch)
setPubkey(extractedPubkey)
setIsFetching(false)
setError(null)
processingPubkeyRef.current = extractedPubkey
initializedPubkeysRef.current.add(extractedPubkey)
effectRunCountRef.current.delete(extractedPubkey)
return
}
if (noteFeed.pendingPubkeys.has(extractedPubkey)) {
setPubkey(extractedPubkey)
setIsFetching(false)
setError(null)
return
}
}
// CRITICAL: Early exit if already processing this exact pubkey - prevents infinite loops // CRITICAL: Early exit if already processing this exact pubkey - prevents infinite loops
// This check must happen FIRST, before any other logic // This check must happen FIRST, before any other logic
// Set processingPubkeyRef IMMEDIATELY after extraction to prevent race conditions // Set processingPubkeyRef IMMEDIATELY after extraction to prevent race conditions
@ -568,7 +591,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
} }
} }
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [id, skipCache]) // checkProfile is memoized and stable, no need to include it }, [id, skipCache, noteFeed?.version]) // checkProfile is memoized; noteFeed.version hydrates batch profiles
useEffect(() => { useEffect(() => {
// CRITICAL: Only use currentAccountProfile if it matches the pubkey we're looking for // CRITICAL: Only use currentAccountProfile if it matches the pubkey we're looking for

115
src/pages/primary/SpellsPage/fauxSpellFeeds.ts

@ -6,7 +6,8 @@ import {
ExtendedKind, ExtendedKind,
FAST_READ_RELAY_URLS, FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS, FAST_WRITE_RELAY_URLS,
PROFILE_FEED_KINDS PROFILE_FEED_KINDS,
READ_ONLY_RELAY_URLS
} from '@/constants' } from '@/constants'
import { import {
extractHashtagsFromContent, extractHashtagsFromContent,
@ -28,6 +29,44 @@ const MAX_BOOKMARK_IDS = 250
* subscription slots; cap keeps first paint fast. Full coverage remains on /discussions. * subscription slots; cap keeps first paint fast. Full coverage remains on /discussions.
*/ */
const DISCUSSION_FAUX_SPELL_MAX_RELAYS = 32 const DISCUSSION_FAUX_SPELL_MAX_RELAYS = 32
/** Without caps, a long NIP-66 read list consumes the whole 32 slots and fast public relays never get a REQ — discussions stay empty while notifications still work (they blend fast reads). */
const DISCUSSION_SPELL_READ_CAP = 10
const DISCUSSION_SPELL_WRITE_CAP = 8
const DISCUSSION_SPELL_FAV_CAP = 8
function dedupe(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/**
* Append {@link READ_ONLY_RELAY_URLS} (e.g. aggr) after the curated set so every faux REQ includes them unless blocked.
*/
export function appendCuratedReadOnlyRelays(curated: string[], blockedRelays: string[]): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const seen = new Set<string>()
const out: string[] = []
for (const u of curated) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
for (const u of READ_ONLY_RELAY_URLS) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
export const MEDIA_SPELL_KINDS = [ export const MEDIA_SPELL_KINDS = [
ExtendedKind.PICTURE, ExtendedKind.PICTURE,
@ -136,17 +175,21 @@ export function fauxFavoriteRelayUrls(favoriteRelays: string[], blockedRelays: s
return k && !blocked.has(k) return k && !blocked.has(k)
}) })
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
return dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]) const curated = dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[])
return appendCuratedReadOnlyRelays(curated, blockedRelays)
} }
/** /**
* Notifications / bookmarks faux spells: prefer inbox (then favorites), but **always** merge FAST_READ. * Notifications / bookmarks faux spells: **fast public relays first**, then inbox/favorites.
* Using only the first N inbox relays meant one dead relay (e.g. offline personal relay) could dominate * `FAST_READ_RELAY_URLS` has 7 entries; the old cap of 6 never subscribed to `wss://aggr.nostr.land`
* connection/EOSE latency while public relays were never asked skeletons until timeout. * (last in the list) a major `#p` indexer so mentions could take tens of seconds or look empty.
* Fast-write relays catch mentions replicated to outboxes (damus/primal/nos.lol) with little overlap.
*/ */
const NOTIFICATION_PRIMARY_MAX = 6 const NOTIFICATION_PRIMARY_MAX = 4
const NOTIFICATION_BLEND_FAST_MAX = 6 /** Must be ≥ FAST_READ length so every default fast read relay is eligible (currently 7). */
const NOTIFICATION_RELAY_CAP = 12 const NOTIFICATION_FAST_READ_MAX = 10
const NOTIFICATION_FAST_WRITE_MAX = 4
const NOTIFICATION_RELAY_CAP = 14
function relayUrlsUpToUnblocked(urls: string[], blocked: Set<string>, max: number): string[] { function relayUrlsUpToUnblocked(urls: string[], blocked: Set<string>, max: number): string[] {
const seen = new Set<string>() const seen = new Set<string>()
@ -198,22 +241,18 @@ export function notificationRelayUrls(
: favoriteRelays.length > 0 : favoriteRelays.length > 0
? relayUrlsUpToUnblocked(favSorted, blocked, NOTIFICATION_PRIMARY_MAX) ? relayUrlsUpToUnblocked(favSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: [] : []
const fromFast = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_BLEND_FAST_MAX) const fromFastRead = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_FAST_READ_MAX)
const merged = mergeRelayListsUnique([primary, fromFast], blocked, NOTIFICATION_RELAY_CAP) const fromFastWrite = relayUrlsUpToUnblocked(FAST_WRITE_RELAY_URLS, blocked, NOTIFICATION_FAST_WRITE_MAX)
if (merged.length > 0) return merged const merged = mergeRelayListsUnique(
return relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP) [fromFastRead, fromFastWrite, primary],
} blocked,
NOTIFICATION_RELAY_CAP
function dedupe(urls: string[]): string[] { )
const seen = new Set<string>() if (merged.length > 0) return appendCuratedReadOnlyRelays(merged, blockedRelays)
const out: string[] = [] return appendCuratedReadOnlyRelays(
for (const u of urls) { relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP),
const k = normalizeUrl(u) || u blockedRelays
if (!k || seen.has(k)) continue )
seen.add(k)
out.push(k)
}
return out
} }
/** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */ /** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */
@ -249,16 +288,19 @@ export function discussionRelayUrls(
const fav = tier(favoriteRelays) const fav = tier(favoriteRelays)
const fastR = tier([...FAST_READ_RELAY_URLS]) const fastR = tier([...FAST_READ_RELAY_URLS])
const fastW = tier([...FAST_WRITE_RELAY_URLS]) const fastW = tier([...FAST_WRITE_RELAY_URLS])
const merged = [...read, ...write, ...fav, ...fastR, ...fastW]
const seen = new Set<string>() const curated = mergeRelayListsUnique(
const out: string[] = [] [
for (const k of merged) { read.slice(0, DISCUSSION_SPELL_READ_CAP),
if (seen.has(k)) continue write.slice(0, DISCUSSION_SPELL_WRITE_CAP),
seen.add(k) fav.slice(0, DISCUSSION_SPELL_FAV_CAP),
out.push(k) fastR,
if (out.length >= DISCUSSION_FAUX_SPELL_MAX_RELAYS) break fastW
} ],
return out blocked,
DISCUSSION_FAUX_SPELL_MAX_RELAYS
)
return appendCuratedReadOnlyRelays(curated, blockedRelays)
} }
export function buildDiscussionFilter(): Filter { export function buildDiscussionFilter(): Filter {
@ -283,8 +325,9 @@ const FOLLOW_PACK_LIMIT = 100
/** Kind 39089 follow/starter packs from fast read relays (same scope as the old Follow Packs page). */ /** Kind 39089 follow/starter packs from fast read relays (same scope as the old Follow Packs page). */
export function buildFollowPacksSubRequests(): TFeedSubRequest[] { export function buildFollowPacksSubRequests(): TFeedSubRequest[] {
const urls = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[] const curated = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
if (!urls.length) return [] if (!curated.length) return []
const urls = appendCuratedReadOnlyRelays(curated, [])
return [ return [
{ {
urls, urls,

234
src/pages/primary/SpellsPage/index.tsx

@ -1,5 +1,5 @@
import HideUntrustedContentButton from '@/components/HideUntrustedContentButton' import HideUntrustedContentButton from '@/components/HideUntrustedContentButton'
import NoteList from '@/components/NoteList' import NoteList, { type TNoteListRef } from '@/components/NoteList'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { import {
Dialog, Dialog,
@ -32,7 +32,13 @@ import { useUserTrust } from '@/providers/UserTrustProvider'
import client from '@/services/client.service' import client from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service' import indexedDb from '@/services/indexed-db.service'
import storage from '@/services/local-storage.service' import storage from '@/services/local-storage.service'
import { ExtendedKind, FAUX_SPELL_ORDER, PROFILE_FEED_KINDS } from '@/constants' import {
ExtendedKind,
FAUX_SPELL_ORDER,
FIRST_RELAY_RESULT_GRACE_MS,
PROFILE_FEED_KINDS,
SPELL_FEED_FIRST_RELAY_GRACE_MS
} from '@/constants'
import { isUserInEventMentions } from '@/lib/event' import { isUserInEventMentions } from '@/lib/event'
import { formatPubkey } from '@/lib/pubkey' import { formatPubkey } from '@/lib/pubkey'
import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity' import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity'
@ -45,6 +51,7 @@ import {
isSpellEvent, isSpellEvent,
SPELL_CATALOG_SYNC_LIMIT, SPELL_CATALOG_SYNC_LIMIT,
SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS, SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS,
SPELL_CATALOG_SYNC_TIMEOUT_MS,
spellEventToFilter spellEventToFilter
} from '@/services/spell.service' } from '@/services/spell.service'
import { TFeedSubRequest } from '@/types' import { TFeedSubRequest } from '@/types'
@ -63,6 +70,7 @@ import {
MoreVertical, MoreVertical,
Pencil, Pencil,
Plus, Plus,
RefreshCw,
Star, Star,
Trash2, Trash2,
Users, Users,
@ -74,6 +82,7 @@ import { forwardRef, useCallback, useEffect, useMemo, useRef, useState } from 'r
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import CreateSpellDialog from './CreateSpellDialog' import CreateSpellDialog from './CreateSpellDialog'
import { import {
appendCuratedReadOnlyRelays,
buildBookmarksSubRequests, buildBookmarksSubRequests,
buildCalendarSpellFilter, buildCalendarSpellFilter,
buildDiscussionFilter, buildDiscussionFilter,
@ -263,42 +272,56 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
/** True while fetching kind 777 authored by the user from write relays into IndexedDB */ /** True while fetching kind 777 authored by the user from write relays into IndexedDB */
const [spellsCatalogSyncing, setSpellsCatalogSyncing] = useState(false) const [spellsCatalogSyncing, setSpellsCatalogSyncing] = useState(false)
const spellCatalogCloserRef = useRef<(() => void) | null>(null) const spellCatalogCloserRef = useRef<(() => void) | null>(null)
/** Bumps spell catalog relay re-sync when the user taps refresh in the titlebar. */
const [spellCatalogManualRefreshKey, setSpellCatalogManualRefreshKey] = useState(0)
const spellFeedListRef = useRef<TNoteListRef>(null)
const [titlebarRefreshSpin, setTitlebarRefreshSpin] = useState(false)
const [spellPickerOpen, setSpellPickerOpen] = useState(false) const [spellPickerOpen, setSpellPickerOpen] = useState(false)
/** Monotonic token + wall time for spell-feed latency instrumentation (picker → first rows). */
const spellFeedInstrTokenRef = useRef(0)
const spellFeedInstrT0Ref = useRef(0)
const spellFeedInstrLabelRef = useRef('')
const [spellFeedInstrumentToken, setSpellFeedInstrumentToken] = useState(0)
const logSpellFeedPickerSelection = useCallback((label: string, extra?: Record<string, unknown>) => {
spellFeedInstrT0Ref.current = performance.now()
spellFeedInstrLabelRef.current = label
spellFeedInstrTokenRef.current += 1
const instrumentToken = spellFeedInstrTokenRef.current
setSpellFeedInstrumentToken(instrumentToken)
logger.info('[SpellsPage] Spell feed — picker selection', {
label,
instrumentToken,
...extra
})
}, [])
const urlFauxSpellInstrumentedRef = useRef<string | null>(null)
/** Set when picker calls `navigatePrimary(..., { spell })` so URL effect does not log/bump token again. */
const fauxSpellUrlSyncFromPickerRef = useRef<string | null>(null)
useEffect(() => { useEffect(() => {
if (spellProp && isFauxSpellName(spellProp)) { if (spellProp && isFauxSpellName(spellProp)) {
if (fauxSpellUrlSyncFromPickerRef.current === spellProp) {
fauxSpellUrlSyncFromPickerRef.current = null
urlFauxSpellInstrumentedRef.current = spellProp
setSelectedFauxSpell(spellProp)
setSelectedSpell(null)
return
}
if (urlFauxSpellInstrumentedRef.current === spellProp) return
urlFauxSpellInstrumentedRef.current = spellProp
logSpellFeedPickerSelection(`faux:${spellProp} (from URL)`, { fauxSpell: spellProp, fromUrl: true })
setSelectedFauxSpell(spellProp) setSelectedFauxSpell(spellProp)
setSelectedSpell(null) setSelectedSpell(null)
} else {
urlFauxSpellInstrumentedRef.current = null
} }
}, [spellProp]) }, [spellProp, logSpellFeedPickerSelection])
const [followingSubRequests, setFollowingSubRequests] = useState<TFeedSubRequest[]>([]) const [followingSubRequests, setFollowingSubRequests] = useState<TFeedSubRequest[]>([])
const [followingFeedLoading, setFollowingFeedLoading] = useState(false) const [followingFeedLoading, setFollowingFeedLoading] = useState(false)
useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([])
setFollowingFeedLoading(false)
return
}
let cancelled = false
setFollowingFeedLoading(true)
void (async () => {
try {
const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
if (!cancelled) setFollowingSubRequests(req)
} catch {
if (!cancelled) setFollowingSubRequests([])
} finally {
if (!cancelled) setFollowingFeedLoading(false)
}
})()
return () => {
cancelled = true
}
}, [selectedFauxSpell, pubkey])
const loadSpells = useCallback(async () => { const loadSpells = useCallback(async () => {
const [events, ids] = await Promise.all([ const [events, ids] = await Promise.all([
indexedDb.getSpellEvents(), indexedDb.getSpellEvents(),
@ -308,6 +331,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
setFavoriteIds(new Set(ids)) setFavoriteIds(new Set(ids))
}, []) }, [])
const refreshSpellsFeedAndCatalog = useCallback(() => {
setTitlebarRefreshSpin(true)
window.setTimeout(() => setTitlebarRefreshSpin(false), 600)
void loadSpells()
if (pubkey) setSpellCatalogManualRefreshKey((k) => k + 1)
spellFeedListRef.current?.refresh()
}, [loadSpells, pubkey])
/** /**
* Fingerprint by value `relayList` from NostrProvider often gets a new object ref each render. * Fingerprint by value `relayList` from NostrProvider often gets a new object ref each render.
* Using `[relayList]` in useMemo deps was invalidating every tick new subRequests browse-relay * Using `[relayList]` in useMemo deps was invalidating every tick new subRequests browse-relay
@ -372,7 +403,16 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
spellCatalogCloserRef.current?.() spellCatalogCloserRef.current?.()
spellCatalogCloserRef.current = null spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false) setSpellsCatalogSyncing(false)
}, 40_000) }, SPELL_CATALOG_SYNC_TIMEOUT_MS)
let afterFirstBatchTimer: ReturnType<typeof setTimeout> | null = null
let catalogSyncDone = false
const clearAfterFirstBatchTimer = () => {
if (afterFirstBatchTimer != null) {
clearTimeout(afterFirstBatchTimer)
afterFirstBatchTimer = null
}
}
void (async () => { void (async () => {
try { try {
@ -394,7 +434,28 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
} }
} }
if (wrote) scheduleLoadSpells() if (wrote) scheduleLoadSpells()
if (wrote && afterFirstBatchTimer == null) {
afterFirstBatchTimer = setTimeout(() => {
afterFirstBatchTimer = null
if (cancelled || catalogSyncDone) return
catalogSyncDone = true
window.clearTimeout(syncTimeout)
if (loadSpellsDebounce != null) {
clearTimeout(loadSpellsDebounce)
loadSpellsDebounce = null
}
void (async () => {
if (!cancelled) await loadSpells()
if (!cancelled) setSpellsCatalogSyncing(false)
})()
closer()
spellCatalogCloserRef.current = null
}, FIRST_RELAY_RESULT_GRACE_MS)
}
if (eosed) { if (eosed) {
clearAfterFirstBatchTimer()
if (cancelled || catalogSyncDone) return
catalogSyncDone = true
window.clearTimeout(syncTimeout) window.clearTimeout(syncTimeout)
if (loadSpellsDebounce != null) { if (loadSpellsDebounce != null) {
clearTimeout(loadSpellsDebounce) clearTimeout(loadSpellsDebounce)
@ -410,7 +471,8 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
}, },
{ {
useCache: true, useCache: true,
omitDefaultSinceWhenUseCache: true omitDefaultSinceWhenUseCache: true,
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
} }
) )
if (cancelled) { if (cancelled) {
@ -427,13 +489,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return () => { return () => {
cancelled = true cancelled = true
clearAfterFirstBatchTimer()
if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce) if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce)
window.clearTimeout(syncTimeout) window.clearTimeout(syncTimeout)
spellCatalogCloserRef.current?.() spellCatalogCloserRef.current?.()
spellCatalogCloserRef.current = null spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false) setSpellsCatalogSyncing(false)
} }
}, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey]) }, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey, spellCatalogManualRefreshKey])
useEffect(() => { useEffect(() => {
if (!pubkey) { if (!pubkey) {
@ -451,6 +514,34 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b)) [...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
) )
useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([])
setFollowingFeedLoading(false)
return
}
let cancelled = false
setFollowingFeedLoading(true)
void (async () => {
try {
const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
const withReadOnly = req.map((r) => ({
...r,
urls: appendCuratedReadOnlyRelays(r.urls, blockedRelays)
}))
if (!cancelled) setFollowingSubRequests(withReadOnly)
} catch {
if (!cancelled) setFollowingSubRequests([])
} finally {
if (!cancelled) setFollowingFeedLoading(false)
}
})()
return () => {
cancelled = true
}
}, [selectedFauxSpell, pubkey, sortedBlockedRelaysKey])
const interestTagsStableKey = interestListEvent const interestTagsStableKey = interestListEvent
? JSON.stringify( ? JSON.stringify(
[...interestListEvent.tags].sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b))) [...interestListEvent.tags].sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)))
@ -683,34 +774,63 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickSpell = useCallback( const pickSpell = useCallback(
(spell: Event | null) => { (spell: Event | null) => {
if (spell) {
logSpellFeedPickerSelection(`kind777:${getSpellName(spell)}`, {
spellId: spell.id,
spellAuthorPubkey: spell.pubkey,
kind777: true
})
}
setSelectedSpell(spell) setSelectedSpell(spell)
setSelectedFauxSpell(null) setSelectedFauxSpell(null)
setSpellPickerOpen(false) setSpellPickerOpen(false)
navigatePrimary('spells') navigatePrimary('spells')
}, },
[navigatePrimary] [logSpellFeedPickerSelection, navigatePrimary]
) )
const clearSpellSelection = useCallback(() => { const clearSpellSelection = useCallback(() => {
logSpellFeedPickerSelection('(cleared)', { cleared: true })
setSelectedSpell(null) setSelectedSpell(null)
setSelectedFauxSpell(null) setSelectedFauxSpell(null)
setSpellPickerOpen(false) setSpellPickerOpen(false)
navigatePrimary('spells') navigatePrimary('spells')
}, [navigatePrimary]) }, [logSpellFeedPickerSelection, navigatePrimary])
const pickFauxSpell = useCallback( const pickFauxSpell = useCallback(
(name: FauxSpellName | null) => { (name: FauxSpellName | null) => {
if (name) {
logSpellFeedPickerSelection(`faux:${name}`, { fauxSpell: name })
fauxSpellUrlSyncFromPickerRef.current = name
} else {
logSpellFeedPickerSelection('(cleared faux)', { clearedFaux: true })
fauxSpellUrlSyncFromPickerRef.current = null
}
setSelectedFauxSpell(name) setSelectedFauxSpell(name)
setSelectedSpell(null) setSelectedSpell(null)
setSpellPickerOpen(false) setSpellPickerOpen(false)
if (name) navigatePrimary('spells', { spell: name }) if (name) navigatePrimary('spells', { spell: name })
else navigatePrimary('spells') else navigatePrimary('spells')
}, },
[navigatePrimary] [logSpellFeedPickerSelection, navigatePrimary]
) )
const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey) const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey)
const handleSpellFeedFirstPaint = useCallback(
(detail: { eventCount: number; firstEventId: string }) => {
const elapsedMsSincePickerMs = Math.round(performance.now() - spellFeedInstrT0Ref.current)
logger.info('[SpellsPage] Spell feed — first events rendered (list has rows)', {
...detail,
eventCountMeaning: 'filtered visible rows (slice), not full relay buffer',
elapsedMsSincePickerMs,
selectionLabel: spellFeedInstrLabelRef.current,
instrumentToken: spellFeedInstrTokenRef.current
})
},
[]
)
const fauxNoteListUseFilterAsIs = useMemo(() => { const fauxNoteListUseFilterAsIs = useMemo(() => {
if (!selectedFauxSpell) return true if (!selectedFauxSpell) return true
return selectedFauxSpell !== 'following' && selectedFauxSpell !== 'bookmarks' return selectedFauxSpell !== 'following' && selectedFauxSpell !== 'bookmarks'
@ -890,20 +1010,32 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
ref={ref} ref={ref}
pageName="spells" pageName="spells"
titlebar={ titlebar={
<div className="flex w-full items-center justify-between gap-2"> <div className="flex h-full w-full items-center justify-between gap-2 pr-1">
<div className="font-semibold">{t('Spells')}</div> <div className="pl-3 text-lg font-semibold">{t('Spells')}</div>
<Button <div className="flex shrink-0 items-center gap-1">
variant="ghost" <Button
size="titlebar-icon" type="button"
onClick={() => { variant="ghost"
setSpellToEdit(null) size="titlebar-icon"
setSpellToClone(null) title={t('Refresh')}
setCreateOpen(true) aria-label={t('Refresh')}
}} onClick={refreshSpellsFeedAndCatalog}
title={t('Create a Spell')} >
> <RefreshCw className={`size-5 ${titlebarRefreshSpin ? 'animate-spin' : ''}`} />
<Plus className="size-5" /> </Button>
</Button> <Button
variant="ghost"
size="titlebar-icon"
onClick={() => {
setSpellToEdit(null)
setSpellToClone(null)
setCreateOpen(true)
}}
title={t('Create a Spell')}
>
<Plus className="size-5" />
</Button>
</div>
</div> </div>
} }
displayScrollToTopButton displayScrollToTopButton
@ -1095,10 +1227,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
) : null} ) : null}
<div className="min-h-0 min-w-0 flex-1"> <div className="min-h-0 min-w-0 flex-1">
<NoteList <NoteList
ref={spellFeedListRef}
subRequests={subRequests} subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey} feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds} showKinds={showKinds}
useTimelineCacheBootstrap useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs={fauxNoteListUseFilterAsIs} useFilterAsIs={fauxNoteListUseFilterAsIs}
showKind1OPs={selectedFauxSpell === 'following' ? showKind1OPs : true} showKind1OPs={selectedFauxSpell === 'following' ? showKind1OPs : true}
showKind1Replies={selectedFauxSpell === 'following' ? showKind1Replies : true} showKind1Replies={selectedFauxSpell === 'following' ? showKind1Replies : true}
@ -1120,10 +1256,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
) : selectedSpell ? ( ) : selectedSpell ? (
subRequests.length > 0 ? ( subRequests.length > 0 ? (
<NoteList <NoteList
ref={spellFeedListRef}
subRequests={subRequests} subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey} feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds} showKinds={showKinds}
useTimelineCacheBootstrap useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs useFilterAsIs
/> />
) : !pubkey && ) : !pubkey &&

14
src/providers/NoteFeedProfileContext.tsx

@ -0,0 +1,14 @@
import { TProfile } from '@/types'
import { createContext, useContext } from 'react'
export type NoteFeedProfileContextValue = {
profiles: ReadonlyMap<string, TProfile>
pendingPubkeys: ReadonlySet<string>
version: number
}
export const NoteFeedProfileContext = createContext<NoteFeedProfileContextValue | null>(null)
export function useNoteFeedProfileContext() {
return useContext(NoteFeedProfileContext)
}

209
src/services/client.service.ts

@ -54,6 +54,13 @@ import { MacroService, createBookstrService } from './client-macro.service'
type TTimelineRef = [string, number] type TTimelineRef = [string, number]
/**
* Timeline bootstrap used to await up to `filter.limit` IndexedDB reads before opening a live REQ,
* which blocked first paint for many seconds. We only prefetch this many newest refs; the subscription
* streams the rest immediately.
*/
const TIMELINE_CACHE_PREFETCH_CAP = 48
class ClientService extends EventTarget { class ClientService extends EventTarget {
static instance: ClientService static instance: ClientService
@ -854,25 +861,51 @@ class ClientService extends EventTarget {
startLogin, startLogin,
needSort = true, needSort = true,
useCache = false, useCache = false,
omitDefaultSinceWhenUseCache = false omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: { }: {
startLogin?: () => void startLogin?: () => void
needSort?: boolean needSort?: boolean
useCache?: boolean useCache?: boolean
/** When useCache is true but there are no timeline refs yet, skip the default 24h `since` so REQ stays unbounded (spell feeds / catalog). */ /** When useCache is true but there are no timeline refs yet, skip the default 24h `since` so REQ stays unbounded (spell feeds / catalog). */
omitDefaultSinceWhenUseCache?: boolean omitDefaultSinceWhenUseCache?: boolean
/**
* After the first live event before EOSE, wait this long then treat initial load as EOSE (query-style finalize).
* Spells pass {@link FIRST_RELAY_RESULT_GRACE_MS} explicitly; feeds may override.
*/
firstRelayResultGraceMs?: number
} = {} } = {}
) { ) {
const newEventIdSet = new Set<string>() const newEventIdSet = new Set<string>()
const requestCount = subRequests.length const requestCount = subRequests.length
// For requestCount===1, floor(1/2)=0 makes eosedCount>=threshold true from the first inner
// callback, so every progressive update forwards to the outer onEvents → setState storms and
// stuck feeds (e.g. Spells Discussions). Require at least one EOSE before opening the gate.
const threshold = requestCount <= 1 ? 1 : Math.floor(requestCount / 2)
let eventIdSet = new Set<string>() let eventIdSet = new Set<string>()
let events: NEvent[] = [] let events: NEvent[] = []
let eosedCount = 0 let eosedCount = 0
let progressiveDelivered = false
/** First merged batch goes out synchronously so the list paints without waiting a frame. */
let outerMergedDelivered = false
/** One React update per animation frame after the first paint — limits setEvents/profile churn. */
let outerFlushRaf: number | null = null
const scheduleOuterFlush = () => {
const snapshot = events.length ? [...events] : []
const allEosed = eosedCount >= requestCount
if (!outerMergedDelivered && (snapshot.length > 0 || allEosed)) {
outerMergedDelivered = true
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents(snapshot, allEosed)
return
}
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
}
outerFlushRaf = requestAnimationFrame(() => {
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
})
}
const subs = await Promise.all( const subs = await Promise.all(
subRequests.map(({ urls, filter }) => { subRequests.map(({ urls, filter }) => {
@ -893,12 +926,7 @@ class ClientService extends EventTarget {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
eventIdSet = new Set(events.map((evt) => evt.id)) eventIdSet = new Set(events.map((evt) => evt.id))
if (eosedCount >= threshold) { scheduleOuterFlush()
onEvents(events, eosedCount >= requestCount)
} else if (!progressiveDelivered && events.length > 0) {
progressiveDelivered = true
onEvents(events, false)
}
}, },
onNew: (evt) => { onNew: (evt) => {
if (newEventIdSet.has(evt.id)) return if (newEventIdSet.has(evt.id)) return
@ -907,7 +935,7 @@ class ClientService extends EventTarget {
}, },
onClose onClose
}, },
{ startLogin, needSort, useCache, omitDefaultSinceWhenUseCache } { startLogin, needSort, useCache, omitDefaultSinceWhenUseCache, firstRelayResultGraceMs }
) )
}) })
) )
@ -915,8 +943,18 @@ class ClientService extends EventTarget {
const key = this.generateMultipleTimelinesKey(subRequests) const key = this.generateMultipleTimelinesKey(subRequests)
this.timelines[key] = subs.map((sub) => sub.timelineKey) this.timelines[key] = subs.map((sub) => sub.timelineKey)
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
}
return { return {
closer: () => { closer: () => {
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents = () => {} onEvents = () => {}
onNew = () => {} onNew = () => {}
subs.forEach((sub) => { subs.forEach((sub) => {
@ -1198,12 +1236,14 @@ class ClientService extends EventTarget {
startLogin, startLogin,
needSort = true, needSort = true,
useCache = false, useCache = false,
omitDefaultSinceWhenUseCache = false omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: { }: {
startLogin?: () => void startLogin?: () => void
needSort?: boolean needSort?: boolean
useCache?: boolean useCache?: boolean
omitDefaultSinceWhenUseCache?: boolean omitDefaultSinceWhenUseCache?: boolean
firstRelayResultGraceMs?: number
} = {} } = {}
) { ) {
const relays = Array.from(new Set(urls)) const relays = Array.from(new Set(urls))
@ -1223,42 +1263,7 @@ class ClientService extends EventTarget {
let cachedEvents: NEvent[] = [] let cachedEvents: NEvent[] = []
let since: number | undefined let since: number | undefined
// CRITICAL: Only use cache if explicitly enabled (for profile timelines) const oneDayAgo = dayjs().subtract(24, 'hours').unix()
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
cachedEvents = (
await Promise.all(timeline.refs.slice(0, filter.limit).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
// Sort cached events by newest first
cachedEvents.sort((a, b) => b.created_at - a.created_at)
// CRITICAL FIX: Filter out very old cached events (older than 24 hours)
// This prevents showing 15+ hour old events when the cache is stale
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
const recentCachedEvents = cachedEvents.filter(evt => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
// Only show cached events if they're recent
onEvents([...recentCachedEvents], false)
// Use the NEWEST cached event's timestamp + 1 to fetch only newer events
since = recentCachedEvents[0].created_at + 1
} else {
// All cached events are too old, ignore them and start fresh
cachedEvents = []
}
}
}
// CRITICAL FIX: Only set since parameter if caching is enabled
// When useCache is false, we want to stream raw from relays without time restrictions
// This allows relay feeds to show all available events, not just recent ones
if (!since && needSort && useCache && !omitDefaultSinceWhenUseCache) {
// Default to last 24 hours if no recent cached events (only when caching is enabled)
// This ensures we get recent content even if relays are slow
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
since = oneDayAgo
}
// eslint-disable-next-line @typescript-eslint/no-this-alias // eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this const that = this
@ -1266,30 +1271,92 @@ class ClientService extends EventTarget {
let eosedAt: number | null = null let eosedAt: number | null = null
let initialBatchScheduled = false let initialBatchScheduled = false
let lastDeliveredCount = 0 let lastDeliveredCount = 0
// Progressive loading: show the first event(s) as soon as they arrive (not only after 5+ events)
const PROGRESSIVE_INTERVAL_MS = 100 // Poll for more events while relays are still streaming
const MIN_NEW_EVENTS_AFTER_FIRST = 5 // After first paint, batch updates to limit re-renders
let progressiveIntervalId: ReturnType<typeof setInterval> | null = null let progressiveIntervalId: ReturnType<typeof setInterval> | null = null
let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const deliverProgressive = () => { const PROGRESSIVE_INTERVAL_MS = 100 // Backup tick while relays stream without new onevent bursts
if (eosedAt || events.length === 0) return const MIN_NEW_EVENTS_AFTER_FIRST = 1
const sortedEvents = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
const newEventCount = sortedEvents.length - lastDeliveredCount const mergeTimelineLiveAndCache = (): NEvent[] => {
const sortedLive = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (!needSort || !useCache || cachedEvents.length === 0) {
return sortedLive
}
const byId = new Map<string, NEvent>()
for (const e of cachedEvents) {
byId.set(e.id, e)
}
for (const e of sortedLive) {
byId.set(e.id, e)
}
return [...byId.values()].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
}
const deliverProgressive = () => {
if (eosedAt) return
const combined = mergeTimelineLiveAndCache()
if (combined.length === 0) return
const newEventCount = combined.length - lastDeliveredCount
const isFirstPaint = lastDeliveredCount === 0 const isFirstPaint = lastDeliveredCount === 0
const shouldDeliver = const shouldDeliver =
isFirstPaint isFirstPaint
? sortedEvents.length >= 1 ? combined.length >= 1
: newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || sortedEvents.length >= filter.limit * 0.5 : newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || combined.length >= filter.limit * 0.5
if (shouldDeliver) { if (shouldDeliver) {
lastDeliveredCount = sortedEvents.length lastDeliveredCount = combined.length
const snap = sortedEvents onEvents(combined, false)
// Only include cached events if caching is enabled }
onEvents(needSort && useCache ? snap.concat(cachedEvents).slice(0, filter.limit) : snap, false) }
// CRITICAL: Only use cache if explicitly enabled (for profile timelines)
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
const refs = timeline.refs
const prefetchN = Math.min(refs.length, filter.limit, TIMELINE_CACHE_PREFETCH_CAP)
// Spell / catalog feeds: refs already carry created_at — set `since` immediately and open the live REQ
// without awaiting dozens of IndexedDB reads (that delayed first events by seconds).
if (omitDefaultSinceWhenUseCache && refs[0]![1] >= oneDayAgo) {
since = refs[0]![1] + 1
void (async () => {
try {
const loaded = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => that.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (!loaded.length) return
loaded.sort((a, b) => b.created_at - a.created_at)
const recent = loaded.filter((evt) => evt.created_at >= oneDayAgo)
if (!recent.length) return
cachedEvents = recent
deliverProgressive()
} catch {
// ignore
}
})()
} else if (!omitDefaultSinceWhenUseCache) {
cachedEvents = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
cachedEvents.sort((a, b) => b.created_at - a.created_at)
const recentCachedEvents = cachedEvents.filter((evt) => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
onEvents([...recentCachedEvents], false)
since = recentCachedEvents[0].created_at + 1
} else {
cachedEvents = []
}
}
} }
} }
// CRITICAL FIX: Only set since parameter if caching is enabled
// When useCache is false, we want to stream raw from relays without time restrictions
// This allows relay feeds to show all available events, not just recent ones
if (!since && needSort && useCache && !omitDefaultSinceWhenUseCache) {
since = oneDayAgo
}
const handleTimelineEose = (eosed: boolean) => { const handleTimelineEose = (eosed: boolean) => {
if (eosed && eosedAt != null) return if (eosed && eosedAt != null) return
@ -1382,15 +1449,17 @@ class ClientService extends EventTarget {
firstRelayResultGraceTimer = setTimeout(() => { firstRelayResultGraceTimer = setTimeout(() => {
firstRelayResultGraceTimer = null firstRelayResultGraceTimer = null
handleTimelineEose(true) handleTimelineEose(true)
}, FIRST_RELAY_RESULT_GRACE_MS) }, firstRelayResultGraceMs)
} }
// Deliver as soon as we have any event while waiting for EOSE (then batch further updates) // Deliver on every live event before EOSE (plus interval as a safety net)
if (needSort && events.length >= 1 && !initialBatchScheduled) { if (needSort && events.length >= 1) {
initialBatchScheduled = true if (!initialBatchScheduled) {
deliverProgressive() initialBatchScheduled = true
if (!progressiveIntervalId) { if (!progressiveIntervalId) {
progressiveIntervalId = setInterval(deliverProgressive, PROGRESSIVE_INTERVAL_MS) progressiveIntervalId = setInterval(deliverProgressive, PROGRESSIVE_INTERVAL_MS)
}
} }
deliverProgressive()
} }
return return
} }

6
src/services/spell.service.ts

@ -65,6 +65,12 @@ export const SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS = 600
/** Max distinct pubkeys in one catalog REQ (relay compatibility). Your pubkey is always first. */ /** Max distinct pubkeys in one catalog REQ (relay compatibility). Your pubkey is always first. */
export const SPELL_CATALOG_MAX_AUTHORS = 400 export const SPELL_CATALOG_MAX_AUTHORS = 400
/**
* If no relay sends EOSE, stop showing the catalog sync state and close the sub after this long.
* Keeps the UI from feeling stuck when relays are slow or silent.
*/
export const SPELL_CATALOG_SYNC_TIMEOUT_MS = 12_000
/** Build author list for spell catalog sync: always include `pubkey`, then follows, deduped. */ /** Build author list for spell catalog sync: always include `pubkey`, then follows, deduped. */
export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): string[] { export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): string[] {
const rest = contacts.filter((c) => typeof c === 'string' && c.length > 0 && c !== pubkey) const rest = contacts.filter((c) => typeof c === 'string' && c.length > 0 && c !== pubkey)

Loading…
Cancel
Save