Browse Source

speed up spells

imwald
Silberengel 1 month ago
parent
commit
45c61bacd0
  1. 4
      package-lock.json
  2. 2
      package.json
  3. 48
      src/components/Embedded/EmbeddedNote.tsx
  4. 45
      src/components/Embedded/embeddedNotePointer.ts
  5. 1
      src/components/Embedded/index.tsx
  6. 352
      src/components/NoteList/index.tsx
  7. 3
      src/constants.ts
  8. 25
      src/hooks/useFetchProfile.tsx
  9. 115
      src/pages/primary/SpellsPage/fauxSpellFeeds.ts
  10. 210
      src/pages/primary/SpellsPage/index.tsx
  11. 14
      src/providers/NoteFeedProfileContext.tsx
  12. 193
      src/services/client.service.ts
  13. 6
      src/services/spell.service.ts

4
package-lock.json generated

@ -1,12 +1,12 @@ @@ -1,12 +1,12 @@
{
"name": "jumble-imwald",
"version": "19.1.0",
"version": "19.1.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "jumble-imwald",
"version": "19.1.0",
"version": "19.1.1",
"license": "MIT",
"dependencies": {
"@asciidoctor/core": "^3.0.4",

2
package.json

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
{
"name": "jumble-imwald",
"version": "19.1.0",
"version": "19.1.1",
"description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble",
"private": true,
"type": "module",

48
src/components/Embedded/EmbeddedNote.tsx

@ -19,6 +19,10 @@ import { extractBookMetadata } from '@/lib/bookstr-parser' @@ -19,6 +19,10 @@ import { extractBookMetadata } from '@/lib/bookstr-parser'
import { contentParserService } from '@/services/content-parser.service'
import { useSmartNoteNavigation } from '@/PageManager'
import { toNote } from '@/lib/link'
import {
type EmbeddedNoteIdValidation,
validateEmbeddedNotePointer
} from './embeddedNotePointer'
/** Embedded `noteId` is often raw hex from parsers; must accept A–F and normalize for REQ `ids`. */
function hexEventIdFromNoteId(noteId: string): string | null {
@ -46,50 +50,6 @@ function canSearchOnExternalRelays(noteId: string): boolean { @@ -46,50 +50,6 @@ function canSearchOnExternalRelays(noteId: string): boolean {
}
}
export type EmbeddedNoteIdValidation =
| { valid: true }
| {
valid: false
reason: 'empty' | 'invalid_hex' | 'invalid_bech32' | 'wrong_nip19_type'
decodedType?: string
}
/**
* Only hex (64), note1, nevent1, and naddr1 are valid embedded note targets.
* Malformed bech32, wrong kinds (npub, ), or bad hex length fail before fetch/search UI.
*/
export function validateEmbeddedNotePointer(noteId: string): EmbeddedNoteIdValidation {
const s = noteId.trim()
if (!s) return { valid: false, reason: 'empty' }
if (/^[0-9a-f]{64}$/i.test(s)) return { valid: true }
if (/^[0-9a-f]+$/i.test(s)) {
return { valid: false, reason: 'invalid_hex' }
}
const looksLikeNostrBech32 =
s.startsWith('n') && s.includes('1') && /^[a-z0-9]+$/i.test(s) && s.length >= 10
if (looksLikeNostrBech32) {
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
export function EmbeddedNote({
noteId,
className,

45
src/components/Embedded/embeddedNotePointer.ts

@ -0,0 +1,45 @@ @@ -0,0 +1,45 @@
import { nip19 } from 'nostr-tools'
export type EmbeddedNoteIdValidation =
| { valid: true }
| {
valid: false
reason: 'empty' | 'invalid_hex' | 'invalid_bech32' | 'wrong_nip19_type'
decodedType?: string
}
/**
* Only hex (64), note1, nevent1, and naddr1 are valid embedded note targets.
* Malformed bech32, wrong kinds (npub, ), or bad hex length fail before fetch/search UI.
*/
export function validateEmbeddedNotePointer(noteId: string): EmbeddedNoteIdValidation {
const s = noteId.trim()
if (!s) return { valid: false, reason: 'empty' }
if (/^[0-9a-f]{64}$/i.test(s)) return { valid: true }
if (/^[0-9a-f]+$/i.test(s)) {
return { valid: false, reason: 'invalid_hex' }
}
const looksLikeNostrBech32 =
s.startsWith('n') && s.includes('1') && /^[a-z0-9]+$/i.test(s) && s.length >= 10
if (looksLikeNostrBech32) {
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}
try {
const { type } = nip19.decode(s)
if (type === 'note' || type === 'nevent' || type === 'naddr') return { valid: true }
return { valid: false, reason: 'wrong_nip19_type', decodedType: type }
} catch {
return { valid: false, reason: 'invalid_bech32' }
}
}

1
src/components/Embedded/index.tsx

@ -4,4 +4,5 @@ export * from './EmbeddedLNInvoice' @@ -4,4 +4,5 @@ export * from './EmbeddedLNInvoice'
export * from './EmbeddedMention'
export * from './EmbeddedNormalUrl'
export * from './EmbeddedNote'
export * from './embeddedNotePointer'
export * from './EmbeddedWebsocketUrl'

352
src/components/NoteList/index.tsx

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
import NewNotesButton from '@/components/NewNotesButton'
import { Button } from '@/components/ui/button'
import { ExtendedKind } from '@/constants'
import { ExtendedKind, FIRST_RELAY_RESULT_GRACE_MS } from '@/constants'
import {
getEmbeddedNoteBech32Ids,
getReplaceableCoordinateFromEvent,
@ -29,17 +29,23 @@ import { @@ -29,17 +29,23 @@ import {
useCallback,
useEffect,
useImperativeHandle,
useLayoutEffect,
useMemo,
useRef,
useState
} from 'react'
import { useTranslation } from 'react-i18next'
import PullToRefresh from 'react-simple-pull-to-refresh'
import { formatPubkey, pubkeyToNpub } from '@/lib/pubkey'
import { NoteFeedProfileContext, type NoteFeedProfileContextValue } from '@/providers/NoteFeedProfileContext'
import type { TProfile } from '@/types'
import NoteCard, { NoteCardLoadingSkeleton } from '../NoteCard'
const LIMIT = 500 // Increased from 200 to load more events per request
const ALGO_LIMIT = 1000 // Increased from 500 for algorithm feeds
const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing scroll load frequency
const FEED_PROFILE_BATCH_DEBOUNCE_MS = 120
const FEED_PROFILE_CHUNK = 36
const NoteList = forwardRef(
(
@ -62,7 +68,17 @@ const NoteList = forwardRef( @@ -62,7 +68,17 @@ const NoteList = forwardRef(
* When true, hydrate the list from the client timeline cache (IndexedDB-backed) before/at same time as
* live REQ, so feeds feel instant on repeat visits. Spells faux feeds use this; home feed stays false.
*/
useTimelineCacheBootstrap = false
useTimelineCacheBootstrap = false,
/**
* When set (Spells page), passed to `subscribeTimeline` as `firstRelayResultGraceMs` only ms to wait after
* the first live event before treating initial load as EOSE. Subscribe setup and loading fallback keep
* longer defaults so multi-relay spell feeds do not race-fail and stay blank after refresh.
*/
spellFetchTimeoutMs,
/** Spells page: bumps when user picks a feed; used with {@link onSpellFeedFirstPaint}. */
spellFeedInstrumentToken,
/** Spells page: fired once when the filtered list first has rows after a picker change. */
onSpellFeedFirstPaint
}: {
subRequests: TFeedSubRequest[]
showKinds: number[]
@ -80,6 +96,9 @@ const NoteList = forwardRef( @@ -80,6 +96,9 @@ const NoteList = forwardRef(
extraShouldHideEvent?: (evt: Event) => boolean
feedSubscriptionKey?: string
useTimelineCacheBootstrap?: boolean
spellFetchTimeoutMs?: number
spellFeedInstrumentToken?: number
onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void
},
ref
) => {
@ -91,6 +110,7 @@ const NoteList = forwardRef( @@ -91,6 +110,7 @@ const NoteList = forwardRef(
const { isEventDeleted } = useDeletedEvent()
const { zapReplyThreshold } = useZap()
const [events, setEvents] = useState<Event[]>([])
const eventsRef = useRef<Event[]>([])
const [newEvents, setNewEvents] = useState<Event[]>([])
const [hasMore, setHasMore] = useState<boolean>(true)
const [loading, setLoading] = useState(true)
@ -100,8 +120,29 @@ const NoteList = forwardRef( @@ -100,8 +120,29 @@ const NoteList = forwardRef(
const supportTouch = useMemo(() => isTouchDevice(), [])
const bottomRef = useRef<HTMLDivElement | null>(null)
const topRef = useRef<HTMLDivElement | null>(null)
const spellFeedFirstPaintLoggedKeyRef = useRef('')
const consecutiveEmptyRef = useRef(0) // Track consecutive empty results to prevent infinite retries
const loadMoreTimeoutRef = useRef<NodeJS.Timeout | null>(null) // Throttle loadMore calls to prevent stuttering
/** Batched profile + embed prefetch after timeline updates (avoids N×9s profile storms while relays stream). */
const timelinePrefetchDebounceRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const lastEventsForTimelinePrefetchRef = useRef<Event[]>([])
const [feedProfileBatch, setFeedProfileBatch] = useState<{
profiles: Map<string, TProfile>
pending: Set<string>
version: number
}>(() => ({ profiles: new Map(), pending: new Set(), version: 0 }))
const feedProfileLoadedRef = useRef<Set<string>>(new Set())
const feedProfileBatchGenRef = useRef(0)
const noteFeedProfileContextValue = useMemo<NoteFeedProfileContextValue>(
() => ({
profiles: feedProfileBatch.profiles,
pendingPubkeys: feedProfileBatch.pending,
version: feedProfileBatch.version
}),
[feedProfileBatch]
)
// Memoize subRequests serialization to avoid expensive JSON.stringify on every render
const subRequestsKey = useMemo(() => {
@ -115,6 +156,12 @@ const NoteList = forwardRef( @@ -115,6 +156,12 @@ const NoteList = forwardRef(
const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey
useEffect(() => {
feedProfileBatchGenRef.current += 1
feedProfileLoadedRef.current.clear()
setFeedProfileBatch({ profiles: new Map(), pending: new Set(), version: 0 })
}, [timelineSubscriptionKey, refreshCount])
const subRequestsRef = useRef(subRequests)
subRequestsRef.current = subRequests
@ -232,6 +279,91 @@ const NoteList = forwardRef( @@ -232,6 +279,91 @@ const NoteList = forwardRef(
})
}, [newEvents, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111])
useLayoutEffect(() => {
if (!onSpellFeedFirstPaint || spellFeedInstrumentToken === undefined) return
if (filteredEvents.length === 0) return
const first = filteredEvents[0]
if (!first) return
const fpKey = `${spellFeedInstrumentToken}|${timelineSubscriptionKey ?? ''}`
if (spellFeedFirstPaintLoggedKeyRef.current === fpKey) return
spellFeedFirstPaintLoggedKeyRef.current = fpKey
onSpellFeedFirstPaint({
eventCount: filteredEvents.length,
firstEventId: first.id
})
}, [
onSpellFeedFirstPaint,
spellFeedInstrumentToken,
timelineSubscriptionKey,
filteredEvents.length,
filteredEvents[0]?.id
])
useEffect(() => {
const handle = window.setTimeout(() => {
const gen = feedProfileBatchGenRef.current
const candidates = new Set<string>()
const addPk = (p: string | undefined) => {
if (p && p.length === 64 && /^[0-9a-f]{64}$/.test(p)) {
candidates.add(p)
}
}
filteredEvents.slice(0, 50).forEach((e) => addPk(e.pubkey))
events.slice(0, 120).forEach((e) => addPk(e.pubkey))
events.slice(showCount, showCount + 60).forEach((e) => addPk(e.pubkey))
const need = [...candidates].filter((pk) => !feedProfileLoadedRef.current.has(pk))
if (need.length === 0) return
need.forEach((pk) => feedProfileLoadedRef.current.add(pk))
setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending)
need.forEach((pk) => pending.add(pk))
return { ...prev, pending, version: prev.version + 1 }
})
void (async () => {
for (let i = 0; i < need.length; i += FEED_PROFILE_CHUNK) {
if (gen !== feedProfileBatchGenRef.current) return
const chunk = need.slice(i, i + FEED_PROFILE_CHUNK)
try {
const profiles = await client.fetchProfilesForPubkeys(chunk)
if (gen !== feedProfileBatchGenRef.current) return
setFeedProfileBatch((prev) => {
const next = new Map(prev.profiles)
const pend = new Set(prev.pending)
for (const p of profiles) {
next.set(p.pubkey, p)
pend.delete(p.pubkey)
}
for (const pk of chunk) {
pend.delete(pk)
if (!next.has(pk)) {
next.set(pk, {
pubkey: pk,
npub: pubkeyToNpub(pk) ?? '',
username: formatPubkey(pk)
})
}
}
return { profiles: next, pending: pend, version: prev.version + 1 }
})
} catch {
chunk.forEach((pk) => feedProfileLoadedRef.current.delete(pk))
if (gen !== feedProfileBatchGenRef.current) return
setFeedProfileBatch((prev) => {
const pend = new Set(prev.pending)
chunk.forEach((pk) => pend.delete(pk))
return { ...prev, pending: pend, version: prev.version + 1 }
})
}
}
})()
}, FEED_PROFILE_BATCH_DEBOUNCE_MS)
return () => window.clearTimeout(handle)
}, [filteredEvents, events, showCount])
const scrollToTop = (behavior: ScrollBehavior = 'instant') => {
setTimeout(() => {
topRef.current?.scrollIntoView({ behavior, block: 'start' })
@ -308,13 +440,17 @@ const NoteList = forwardRef( @@ -308,13 +440,17 @@ const NoteList = forwardRef(
| undefined
try {
// Add timeout wrapper to prevent subscribeTimeline from hanging indefinitely
// Opening subs + IndexedDB timeline hydration can exceed 2s on spell feeds with many relays; a short race
// rejects, the catch closes the late subscription, and the list stays empty after refresh.
const subscribeSetupRaceMs = 5000
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => {
reject(new Error('subscribeTimeline timeout after 5 seconds'))
}, 5000) // 5 second timeout
reject(new Error(`subscribeTimeline timeout after ${subscribeSetupRaceMs}ms`))
}, subscribeSetupRaceMs)
})
const firstRelayGraceMs = spellFetchTimeoutMs ?? FIRST_RELAY_RESULT_GRACE_MS
timelineSubscribePromise = client.subscribeTimeline(
mappedSubRequests,
{
@ -325,51 +461,32 @@ const NoteList = forwardRef( @@ -325,51 +461,32 @@ const NoteList = forwardRef(
// Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
setLoading(false)
// CRITICAL: Prefetch profiles for initial events (optimized for faster initial load)
// Only prefetch for first 50 events to reduce initial load time
// Additional prefetching happens on scroll via the useEffect hooks
const initialPubkeys = Array.from(
new Set(events.slice(0, 50).map((ev: Event) => ev.pubkey).filter((p: string) => p?.length === 64))
)
if (initialPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = initialPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking) with delay to not block initial render
setTimeout(() => {
if (!effectActive) return
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}, 100)
}
// Defer profile + embed prefetch: streaming timelines fire onEvents often; starting
// fetchProfilesForPubkeys on every update spams relays (multi-second each) and cancels hooks.
lastEventsForTimelinePrefetchRef.current = events
if (timelinePrefetchDebounceRef.current) {
clearTimeout(timelinePrefetchDebounceRef.current)
}
timelinePrefetchDebounceRef.current = setTimeout(() => {
timelinePrefetchDebounceRef.current = null
if (!effectActive) return
const evs = lastEventsForTimelinePrefetchRef.current
if (evs.length === 0) return
// CRITICAL: Prefetch embedded events for initial events (reduced scope)
// Only prefetch for first 50 events to reduce initial load time
const initialEmbeddedEventIds = new Set<string>()
events.slice(0, 50).forEach((ev: Event) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id: string) => initialEmbeddedEventIds.add(id))
evs.slice(0, 50).forEach((ev: Event) => {
extractEmbeddedEventIds(ev).forEach((id: string) => initialEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(initialEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking) with delay
setTimeout(() => {
if (!effectActive) return
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
}, 200)
}
}, 450)
} else if (eosed) {
// No events received but EOSE - set empty events array and stop loading
setEvents([])
@ -422,7 +539,8 @@ const NoteList = forwardRef( @@ -422,7 +539,8 @@ const NoteList = forwardRef(
startLogin,
needSort: !areAlgoRelays,
useCache: useTimelineCacheBootstrap,
omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap
omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap,
firstRelayResultGraceMs: firstRelayGraceMs
}
)
@ -452,6 +570,10 @@ const NoteList = forwardRef( @@ -452,6 +570,10 @@ const NoteList = forwardRef(
const promise = init()
return () => {
effectActive = false
if (timelinePrefetchDebounceRef.current) {
clearTimeout(timelinePrefetchDebounceRef.current)
timelinePrefetchDebounceRef.current = null
}
promise.then((closer) => closer?.())
}
}, [
@ -463,15 +585,25 @@ const NoteList = forwardRef( @@ -463,15 +585,25 @@ const NoteList = forwardRef(
showKind1111,
useFilterAsIs,
areAlgoRelays,
useTimelineCacheBootstrap
useTimelineCacheBootstrap,
spellFetchTimeoutMs
])
useEffect(() => {
eventsRef.current = events
}, [events])
useEffect(() => {
if (!subRequestsRef.current.length) return
let cancelled = false
const timer = window.setTimeout(() => {
if (cancelled) return
setLoading((prev) => (prev ? false : prev))
// hasMore defaults true; if timeline never sends eosed (slow/hung relays), we would keep a
// bottom skeleton forever while loading is false — unblock empty state / reload.
if (eventsRef.current.length === 0) {
setHasMore(false)
}
}, 15_000)
return () => {
cancelled = true
@ -480,16 +612,11 @@ const NoteList = forwardRef( @@ -480,16 +612,11 @@ const NoteList = forwardRef(
}, [timelineSubscriptionKey, refreshCount])
// Use refs to avoid dependency issues and ensure latest values in async callbacks
const eventsRef = useRef(events)
const showCountRef = useRef(showCount)
const loadingRef = useRef(loading)
const hasMoreRef = useRef(hasMore)
const timelineKeyRef = useRef(timelineKey)
useEffect(() => {
eventsRef.current = events
}, [events])
useEffect(() => {
showCountRef.current = showCount
}, [showCount])
@ -639,23 +766,6 @@ const NoteList = forwardRef( @@ -639,23 +766,6 @@ const NoteList = forwardRef(
}
schedulePrefetch(() => {
const newPubkeys = Array.from(
new Set(newEvents.map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = newPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking)
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events (throttled)
const newEmbeddedEventIds = new Set<string>()
// Only prefetch for first 30 events to reduce load
@ -719,12 +829,6 @@ const NoteList = forwardRef( @@ -719,12 +829,6 @@ const NoteList = forwardRef(
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
// CRITICAL: Prefetch profiles for visible authors + upcoming events in one batched request
// This prevents browser crashes during rapid scrolling by pre-loading profiles before they're needed
const visiblePubkeysRef = useRef<Set<string>>(new Set())
const prefetchedPubkeysRef = useRef<Set<string>>(new Set())
const prefetchTimeoutRef = useRef<NodeJS.Timeout | null>(null)
// CRITICAL: Prefetch embedded events (referenced in e tags, a tags, and content)
// This ensures embedded events are ready before user scrolls to them
const prefetchedEventIdsRef = useRef<Set<string>>(new Set())
@ -767,76 +871,6 @@ const NoteList = forwardRef( @@ -767,76 +871,6 @@ const NoteList = forwardRef(
return Array.from(new Set(eventIds)) // Deduplicate
}, [])
useEffect(() => {
// Throttle profile prefetching to reduce frequency during rapid scrolling
// Clear any existing timeout
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
}
// Debounce profile prefetching by 300ms to reduce frequency during rapid scrolling
prefetchTimeoutRef.current = setTimeout(() => {
// Prefetch profiles for:
// 1. Currently visible events (first 40, reduced to reduce stuttering)
// 2. Upcoming events that will be visible when scrolling (next 80, reduced to reduce load)
// This ensures profiles are ready before they're needed during rapid scrolling
const visiblePubkeys = Array.from(
new Set(filteredEvents.slice(0, 40).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
const upcomingPubkeys = Array.from(
new Set(events.slice(0, 80).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
// Combine visible and upcoming, but prioritize visible ones
const allPubkeys = Array.from(new Set([...visiblePubkeys, ...upcomingPubkeys]))
if (allPubkeys.length === 0) return
// Check if we've already prefetched these exact pubkeys
const prev = visiblePubkeysRef.current
const same = allPubkeys.length === prev.size && allPubkeys.every((p) => prev.has(p))
if (same) return
// Find pubkeys that haven't been prefetched yet
const newPubkeys = allPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length === 0) {
// All pubkeys already prefetched, just update the ref
visiblePubkeysRef.current = new Set(allPubkeys)
return
}
// Update refs
visiblePubkeysRef.current = new Set(allPubkeys)
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch profiles for new pubkeys (IndexedDB + network in one request)
// This is the key optimization: batch processing prevents individual fetches during scrolling
// Use requestIdleCallback if available to avoid blocking scroll
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
} else {
setTimeout(callback, 0)
}
}
scheduleFetch(() => {
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
})
}, 300) // Debounce by 300ms to reduce frequency during rapid scrolling
return () => {
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
prefetchTimeoutRef.current = null
}
}
}, [filteredEvents, events, extractEmbeddedEventIds])
// CRITICAL: Prefetch embedded events for visible events
useEffect(() => {
// Throttle embedded event prefetching to reduce frequency during rapid scrolling
@ -915,40 +949,8 @@ const NoteList = forwardRef( @@ -915,40 +949,8 @@ const NoteList = forwardRef(
clearTimeout(prefetchNewEventsTimeoutRef.current)
}
// Debounce profile prefetching for newly loaded events (optimized to reduce stuttering)
// Debounce embedded-event prefetch for newly revealed rows (profiles use NoteFeed batcher above)
prefetchNewEventsTimeoutRef.current = setTimeout(() => {
// When we have more events loaded, prefetch profiles for the newly loaded ones
// Reduced to 50 to reduce batch size and prevent stuttering
const newlyLoadedPubkeys = Array.from(
new Set(events.slice(showCount, showCount + 50).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newlyLoadedPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const newPubkeys = newlyLoadedPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking) using requestIdleCallback
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
} else {
setTimeout(callback, 0)
}
}
scheduleFetch(() => {
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events (reduced scope)
const newlyLoadedEmbeddedEventIds = new Set<string>()
events.slice(showCount, showCount + 50).forEach((ev) => {
@ -1005,11 +1007,15 @@ const NoteList = forwardRef( @@ -1005,11 +1007,15 @@ const NoteList = forwardRef(
filterMutedNotes={filterMutedNotes}
/>
))}
{hasMore || loading ? (
{events.length === 0 && loading ? (
<div ref={bottomRef}>
<NoteCardLoadingSkeleton />
</div>
) : events.length > 0 && (hasMore || loading) ? (
<div ref={bottomRef}>
<NoteCardLoadingSkeleton />
</div>
) : events.length ? (
) : events.length > 0 ? (
<div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div>
) : (
<div className="flex justify-center w-full mt-2">
@ -1024,6 +1030,7 @@ const NoteList = forwardRef( @@ -1024,6 +1030,7 @@ const NoteList = forwardRef(
return (
<div>
<div ref={topRef} className="scroll-mt-[calc(6rem+1px)]" />
<NoteFeedProfileContext.Provider value={noteFeedProfileContextValue}>
{supportTouch ? (
<PullToRefresh
onRefresh={async () => {
@ -1037,6 +1044,7 @@ const NoteList = forwardRef( @@ -1037,6 +1044,7 @@ const NoteList = forwardRef(
) : (
list
)}
</NoteFeedProfileContext.Provider>
<div className="h-40" />
{filteredNewEvents.length > 0 && (
<NewNotesButton newEvents={filteredNewEvents} onClick={showNewEvents} />

3
src/constants.ts

@ -17,6 +17,9 @@ export const DEFAULT_FAVORITE_RELAYS = [ @@ -17,6 +17,9 @@ export const DEFAULT_FAVORITE_RELAYS = [
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000
/** Spells page feeds: shorter grace so multi-relay spell REQs finalize initial load sooner (still keeps subscription open for `onNew`). */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = 450
/**
* Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among
* filters is at least this value. Omitting `limit` counts as 0 (no implicit grace).

25
src/hooks/useFetchProfile.tsx

@ -2,6 +2,7 @@ import { PROFILE_FETCH_PROMISE_TIMEOUT_MS } from '@/constants' @@ -2,6 +2,7 @@ import { PROFILE_FETCH_PROMISE_TIMEOUT_MS } from '@/constants'
import { getProfileFromEvent } from '@/lib/event-metadata'
import { userIdToPubkey } from '@/lib/pubkey'
import { useNostr } from '@/providers/NostrProvider'
import { useNoteFeedProfileContext } from '@/providers/NoteFeedProfileContext'
import { replaceableEventService } from '@/services/client.service'
import { TProfile } from '@/types'
import { useEffect, useState, useRef, useCallback } from 'react'
@ -24,6 +25,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -24,6 +25,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
// })
const { profile: currentAccountProfile } = useNostr()
const noteFeed = useNoteFeedProfileContext()
const [isFetching, setIsFetching] = useState(true)
const [error, setError] = useState<Error | null>(null)
const [profile, setProfile] = useState<TProfile | null>(null)
@ -285,6 +287,27 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -285,6 +287,27 @@ export function useFetchProfile(id?: string, skipCache = false) {
// Extract pubkey early to check if id has changed
const extractedPubkey = userIdToPubkey(id)
// Note feeds: profiles are batch-fetched in NoteList — skip per-row relay storms while pending
if (extractedPubkey && noteFeed && !skipCache) {
const fromBatch = noteFeed.profiles.get(extractedPubkey)
if (fromBatch) {
setProfile(fromBatch)
setPubkey(extractedPubkey)
setIsFetching(false)
setError(null)
processingPubkeyRef.current = extractedPubkey
initializedPubkeysRef.current.add(extractedPubkey)
effectRunCountRef.current.delete(extractedPubkey)
return
}
if (noteFeed.pendingPubkeys.has(extractedPubkey)) {
setPubkey(extractedPubkey)
setIsFetching(false)
setError(null)
return
}
}
// CRITICAL: Early exit if already processing this exact pubkey - prevents infinite loops
// This check must happen FIRST, before any other logic
// Set processingPubkeyRef IMMEDIATELY after extraction to prevent race conditions
@ -568,7 +591,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -568,7 +591,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [id, skipCache]) // checkProfile is memoized and stable, no need to include it
}, [id, skipCache, noteFeed?.version]) // checkProfile is memoized; noteFeed.version hydrates batch profiles
useEffect(() => {
// CRITICAL: Only use currentAccountProfile if it matches the pubkey we're looking for

115
src/pages/primary/SpellsPage/fauxSpellFeeds.ts

@ -6,7 +6,8 @@ import { @@ -6,7 +6,8 @@ import {
ExtendedKind,
FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS,
PROFILE_FEED_KINDS
PROFILE_FEED_KINDS,
READ_ONLY_RELAY_URLS
} from '@/constants'
import {
extractHashtagsFromContent,
@ -28,6 +29,44 @@ const MAX_BOOKMARK_IDS = 250 @@ -28,6 +29,44 @@ const MAX_BOOKMARK_IDS = 250
* subscription slots; cap keeps first paint fast. Full coverage remains on /discussions.
*/
const DISCUSSION_FAUX_SPELL_MAX_RELAYS = 32
/** Without caps, a long NIP-66 read list consumes the whole 32 slots and fast public relays never get a REQ — discussions stay empty while notifications still work (they blend fast reads). */
const DISCUSSION_SPELL_READ_CAP = 10
const DISCUSSION_SPELL_WRITE_CAP = 8
const DISCUSSION_SPELL_FAV_CAP = 8
function dedupe(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/**
* Append {@link READ_ONLY_RELAY_URLS} (e.g. aggr) after the curated set so every faux REQ includes them unless blocked.
*/
export function appendCuratedReadOnlyRelays(curated: string[], blockedRelays: string[]): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const seen = new Set<string>()
const out: string[] = []
for (const u of curated) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
for (const u of READ_ONLY_RELAY_URLS) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
export const MEDIA_SPELL_KINDS = [
ExtendedKind.PICTURE,
@ -136,17 +175,21 @@ export function fauxFavoriteRelayUrls(favoriteRelays: string[], blockedRelays: s @@ -136,17 +175,21 @@ export function fauxFavoriteRelayUrls(favoriteRelays: string[], blockedRelays: s
return k && !blocked.has(k)
})
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
return dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[])
const curated = dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[])
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
/**
* Notifications / bookmarks faux spells: prefer inbox (then favorites), but **always** merge FAST_READ.
* Using only the first N inbox relays meant one dead relay (e.g. offline personal relay) could dominate
* connection/EOSE latency while public relays were never asked skeletons until timeout.
* Notifications / bookmarks faux spells: **fast public relays first**, then inbox/favorites.
* `FAST_READ_RELAY_URLS` has 7 entries; the old cap of 6 never subscribed to `wss://aggr.nostr.land`
* (last in the list) a major `#p` indexer so mentions could take tens of seconds or look empty.
* Fast-write relays catch mentions replicated to outboxes (damus/primal/nos.lol) with little overlap.
*/
const NOTIFICATION_PRIMARY_MAX = 6
const NOTIFICATION_BLEND_FAST_MAX = 6
const NOTIFICATION_RELAY_CAP = 12
const NOTIFICATION_PRIMARY_MAX = 4
/** Must be ≥ FAST_READ length so every default fast read relay is eligible (currently 7). */
const NOTIFICATION_FAST_READ_MAX = 10
const NOTIFICATION_FAST_WRITE_MAX = 4
const NOTIFICATION_RELAY_CAP = 14
function relayUrlsUpToUnblocked(urls: string[], blocked: Set<string>, max: number): string[] {
const seen = new Set<string>()
@ -198,22 +241,18 @@ export function notificationRelayUrls( @@ -198,22 +241,18 @@ export function notificationRelayUrls(
: favoriteRelays.length > 0
? relayUrlsUpToUnblocked(favSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: []
const fromFast = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_BLEND_FAST_MAX)
const merged = mergeRelayListsUnique([primary, fromFast], blocked, NOTIFICATION_RELAY_CAP)
if (merged.length > 0) return merged
return relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP)
}
function dedupe(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
const fromFastRead = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_FAST_READ_MAX)
const fromFastWrite = relayUrlsUpToUnblocked(FAST_WRITE_RELAY_URLS, blocked, NOTIFICATION_FAST_WRITE_MAX)
const merged = mergeRelayListsUnique(
[fromFastRead, fromFastWrite, primary],
blocked,
NOTIFICATION_RELAY_CAP
)
if (merged.length > 0) return appendCuratedReadOnlyRelays(merged, blockedRelays)
return appendCuratedReadOnlyRelays(
relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP),
blockedRelays
)
}
/** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */
@ -249,16 +288,19 @@ export function discussionRelayUrls( @@ -249,16 +288,19 @@ export function discussionRelayUrls(
const fav = tier(favoriteRelays)
const fastR = tier([...FAST_READ_RELAY_URLS])
const fastW = tier([...FAST_WRITE_RELAY_URLS])
const merged = [...read, ...write, ...fav, ...fastR, ...fastW]
const seen = new Set<string>()
const out: string[] = []
for (const k of merged) {
if (seen.has(k)) continue
seen.add(k)
out.push(k)
if (out.length >= DISCUSSION_FAUX_SPELL_MAX_RELAYS) break
}
return out
const curated = mergeRelayListsUnique(
[
read.slice(0, DISCUSSION_SPELL_READ_CAP),
write.slice(0, DISCUSSION_SPELL_WRITE_CAP),
fav.slice(0, DISCUSSION_SPELL_FAV_CAP),
fastR,
fastW
],
blocked,
DISCUSSION_FAUX_SPELL_MAX_RELAYS
)
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
export function buildDiscussionFilter(): Filter {
@ -283,8 +325,9 @@ const FOLLOW_PACK_LIMIT = 100 @@ -283,8 +325,9 @@ const FOLLOW_PACK_LIMIT = 100
/** Kind 39089 follow/starter packs from fast read relays (same scope as the old Follow Packs page). */
export function buildFollowPacksSubRequests(): TFeedSubRequest[] {
const urls = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
if (!urls.length) return []
const curated = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
if (!curated.length) return []
const urls = appendCuratedReadOnlyRelays(curated, [])
return [
{
urls,

210
src/pages/primary/SpellsPage/index.tsx

@ -1,5 +1,5 @@ @@ -1,5 +1,5 @@
import HideUntrustedContentButton from '@/components/HideUntrustedContentButton'
import NoteList from '@/components/NoteList'
import NoteList, { type TNoteListRef } from '@/components/NoteList'
import { Button } from '@/components/ui/button'
import {
Dialog,
@ -32,7 +32,13 @@ import { useUserTrust } from '@/providers/UserTrustProvider' @@ -32,7 +32,13 @@ import { useUserTrust } from '@/providers/UserTrustProvider'
import client from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import storage from '@/services/local-storage.service'
import { ExtendedKind, FAUX_SPELL_ORDER, PROFILE_FEED_KINDS } from '@/constants'
import {
ExtendedKind,
FAUX_SPELL_ORDER,
FIRST_RELAY_RESULT_GRACE_MS,
PROFILE_FEED_KINDS,
SPELL_FEED_FIRST_RELAY_GRACE_MS
} from '@/constants'
import { isUserInEventMentions } from '@/lib/event'
import { formatPubkey } from '@/lib/pubkey'
import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity'
@ -45,6 +51,7 @@ import { @@ -45,6 +51,7 @@ import {
isSpellEvent,
SPELL_CATALOG_SYNC_LIMIT,
SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS,
SPELL_CATALOG_SYNC_TIMEOUT_MS,
spellEventToFilter
} from '@/services/spell.service'
import { TFeedSubRequest } from '@/types'
@ -63,6 +70,7 @@ import { @@ -63,6 +70,7 @@ import {
MoreVertical,
Pencil,
Plus,
RefreshCw,
Star,
Trash2,
Users,
@ -74,6 +82,7 @@ import { forwardRef, useCallback, useEffect, useMemo, useRef, useState } from 'r @@ -74,6 +82,7 @@ import { forwardRef, useCallback, useEffect, useMemo, useRef, useState } from 'r
import { useTranslation } from 'react-i18next'
import CreateSpellDialog from './CreateSpellDialog'
import {
appendCuratedReadOnlyRelays,
buildBookmarksSubRequests,
buildCalendarSpellFilter,
buildDiscussionFilter,
@ -263,42 +272,56 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -263,42 +272,56 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
/** True while fetching kind 777 authored by the user from write relays into IndexedDB */
const [spellsCatalogSyncing, setSpellsCatalogSyncing] = useState(false)
const spellCatalogCloserRef = useRef<(() => void) | null>(null)
/** Bumps spell catalog relay re-sync when the user taps refresh in the titlebar. */
const [spellCatalogManualRefreshKey, setSpellCatalogManualRefreshKey] = useState(0)
const spellFeedListRef = useRef<TNoteListRef>(null)
const [titlebarRefreshSpin, setTitlebarRefreshSpin] = useState(false)
const [spellPickerOpen, setSpellPickerOpen] = useState(false)
/** Monotonic token + wall time for spell-feed latency instrumentation (picker → first rows). */
const spellFeedInstrTokenRef = useRef(0)
const spellFeedInstrT0Ref = useRef(0)
const spellFeedInstrLabelRef = useRef('')
const [spellFeedInstrumentToken, setSpellFeedInstrumentToken] = useState(0)
const logSpellFeedPickerSelection = useCallback((label: string, extra?: Record<string, unknown>) => {
spellFeedInstrT0Ref.current = performance.now()
spellFeedInstrLabelRef.current = label
spellFeedInstrTokenRef.current += 1
const instrumentToken = spellFeedInstrTokenRef.current
setSpellFeedInstrumentToken(instrumentToken)
logger.info('[SpellsPage] Spell feed — picker selection', {
label,
instrumentToken,
...extra
})
}, [])
const urlFauxSpellInstrumentedRef = useRef<string | null>(null)
/** Set when picker calls `navigatePrimary(..., { spell })` so URL effect does not log/bump token again. */
const fauxSpellUrlSyncFromPickerRef = useRef<string | null>(null)
useEffect(() => {
if (spellProp && isFauxSpellName(spellProp)) {
if (fauxSpellUrlSyncFromPickerRef.current === spellProp) {
fauxSpellUrlSyncFromPickerRef.current = null
urlFauxSpellInstrumentedRef.current = spellProp
setSelectedFauxSpell(spellProp)
setSelectedSpell(null)
return
}
}, [spellProp])
if (urlFauxSpellInstrumentedRef.current === spellProp) return
urlFauxSpellInstrumentedRef.current = spellProp
logSpellFeedPickerSelection(`faux:${spellProp} (from URL)`, { fauxSpell: spellProp, fromUrl: true })
setSelectedFauxSpell(spellProp)
setSelectedSpell(null)
} else {
urlFauxSpellInstrumentedRef.current = null
}
}, [spellProp, logSpellFeedPickerSelection])
const [followingSubRequests, setFollowingSubRequests] = useState<TFeedSubRequest[]>([])
const [followingFeedLoading, setFollowingFeedLoading] = useState(false)
useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([])
setFollowingFeedLoading(false)
return
}
let cancelled = false
setFollowingFeedLoading(true)
void (async () => {
try {
const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
if (!cancelled) setFollowingSubRequests(req)
} catch {
if (!cancelled) setFollowingSubRequests([])
} finally {
if (!cancelled) setFollowingFeedLoading(false)
}
})()
return () => {
cancelled = true
}
}, [selectedFauxSpell, pubkey])
const loadSpells = useCallback(async () => {
const [events, ids] = await Promise.all([
indexedDb.getSpellEvents(),
@ -308,6 +331,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -308,6 +331,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
setFavoriteIds(new Set(ids))
}, [])
const refreshSpellsFeedAndCatalog = useCallback(() => {
setTitlebarRefreshSpin(true)
window.setTimeout(() => setTitlebarRefreshSpin(false), 600)
void loadSpells()
if (pubkey) setSpellCatalogManualRefreshKey((k) => k + 1)
spellFeedListRef.current?.refresh()
}, [loadSpells, pubkey])
/**
* Fingerprint by value `relayList` from NostrProvider often gets a new object ref each render.
* Using `[relayList]` in useMemo deps was invalidating every tick new subRequests browse-relay
@ -372,7 +403,16 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -372,7 +403,16 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
spellCatalogCloserRef.current?.()
spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false)
}, 40_000)
}, SPELL_CATALOG_SYNC_TIMEOUT_MS)
let afterFirstBatchTimer: ReturnType<typeof setTimeout> | null = null
let catalogSyncDone = false
const clearAfterFirstBatchTimer = () => {
if (afterFirstBatchTimer != null) {
clearTimeout(afterFirstBatchTimer)
afterFirstBatchTimer = null
}
}
void (async () => {
try {
@ -394,7 +434,28 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -394,7 +434,28 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
}
}
if (wrote) scheduleLoadSpells()
if (wrote && afterFirstBatchTimer == null) {
afterFirstBatchTimer = setTimeout(() => {
afterFirstBatchTimer = null
if (cancelled || catalogSyncDone) return
catalogSyncDone = true
window.clearTimeout(syncTimeout)
if (loadSpellsDebounce != null) {
clearTimeout(loadSpellsDebounce)
loadSpellsDebounce = null
}
void (async () => {
if (!cancelled) await loadSpells()
if (!cancelled) setSpellsCatalogSyncing(false)
})()
closer()
spellCatalogCloserRef.current = null
}, FIRST_RELAY_RESULT_GRACE_MS)
}
if (eosed) {
clearAfterFirstBatchTimer()
if (cancelled || catalogSyncDone) return
catalogSyncDone = true
window.clearTimeout(syncTimeout)
if (loadSpellsDebounce != null) {
clearTimeout(loadSpellsDebounce)
@ -410,7 +471,8 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -410,7 +471,8 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
},
{
useCache: true,
omitDefaultSinceWhenUseCache: true
omitDefaultSinceWhenUseCache: true,
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
}
)
if (cancelled) {
@ -427,13 +489,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -427,13 +489,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return () => {
cancelled = true
clearAfterFirstBatchTimer()
if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce)
window.clearTimeout(syncTimeout)
spellCatalogCloserRef.current?.()
spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false)
}
}, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey])
}, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey, spellCatalogManualRefreshKey])
useEffect(() => {
if (!pubkey) {
@ -451,6 +514,34 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -451,6 +514,34 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
)
useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([])
setFollowingFeedLoading(false)
return
}
let cancelled = false
setFollowingFeedLoading(true)
void (async () => {
try {
const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
const withReadOnly = req.map((r) => ({
...r,
urls: appendCuratedReadOnlyRelays(r.urls, blockedRelays)
}))
if (!cancelled) setFollowingSubRequests(withReadOnly)
} catch {
if (!cancelled) setFollowingSubRequests([])
} finally {
if (!cancelled) setFollowingFeedLoading(false)
}
})()
return () => {
cancelled = true
}
}, [selectedFauxSpell, pubkey, sortedBlockedRelaysKey])
const interestTagsStableKey = interestListEvent
? JSON.stringify(
[...interestListEvent.tags].sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)))
@ -683,34 +774,63 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -683,34 +774,63 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickSpell = useCallback(
(spell: Event | null) => {
if (spell) {
logSpellFeedPickerSelection(`kind777:${getSpellName(spell)}`, {
spellId: spell.id,
spellAuthorPubkey: spell.pubkey,
kind777: true
})
}
setSelectedSpell(spell)
setSelectedFauxSpell(null)
setSpellPickerOpen(false)
navigatePrimary('spells')
},
[navigatePrimary]
[logSpellFeedPickerSelection, navigatePrimary]
)
const clearSpellSelection = useCallback(() => {
logSpellFeedPickerSelection('(cleared)', { cleared: true })
setSelectedSpell(null)
setSelectedFauxSpell(null)
setSpellPickerOpen(false)
navigatePrimary('spells')
}, [navigatePrimary])
}, [logSpellFeedPickerSelection, navigatePrimary])
const pickFauxSpell = useCallback(
(name: FauxSpellName | null) => {
if (name) {
logSpellFeedPickerSelection(`faux:${name}`, { fauxSpell: name })
fauxSpellUrlSyncFromPickerRef.current = name
} else {
logSpellFeedPickerSelection('(cleared faux)', { clearedFaux: true })
fauxSpellUrlSyncFromPickerRef.current = null
}
setSelectedFauxSpell(name)
setSelectedSpell(null)
setSpellPickerOpen(false)
if (name) navigatePrimary('spells', { spell: name })
else navigatePrimary('spells')
},
[navigatePrimary]
[logSpellFeedPickerSelection, navigatePrimary]
)
const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey)
const handleSpellFeedFirstPaint = useCallback(
(detail: { eventCount: number; firstEventId: string }) => {
const elapsedMsSincePickerMs = Math.round(performance.now() - spellFeedInstrT0Ref.current)
logger.info('[SpellsPage] Spell feed — first events rendered (list has rows)', {
...detail,
eventCountMeaning: 'filtered visible rows (slice), not full relay buffer',
elapsedMsSincePickerMs,
selectionLabel: spellFeedInstrLabelRef.current,
instrumentToken: spellFeedInstrTokenRef.current
})
},
[]
)
const fauxNoteListUseFilterAsIs = useMemo(() => {
if (!selectedFauxSpell) return true
return selectedFauxSpell !== 'following' && selectedFauxSpell !== 'bookmarks'
@ -890,8 +1010,19 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -890,8 +1010,19 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
ref={ref}
pageName="spells"
titlebar={
<div className="flex w-full items-center justify-between gap-2">
<div className="font-semibold">{t('Spells')}</div>
<div className="flex h-full w-full items-center justify-between gap-2 pr-1">
<div className="pl-3 text-lg font-semibold">{t('Spells')}</div>
<div className="flex shrink-0 items-center gap-1">
<Button
type="button"
variant="ghost"
size="titlebar-icon"
title={t('Refresh')}
aria-label={t('Refresh')}
onClick={refreshSpellsFeedAndCatalog}
>
<RefreshCw className={`size-5 ${titlebarRefreshSpin ? 'animate-spin' : ''}`} />
</Button>
<Button
variant="ghost"
size="titlebar-icon"
@ -905,6 +1036,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -905,6 +1036,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
<Plus className="size-5" />
</Button>
</div>
</div>
}
displayScrollToTopButton
>
@ -1095,10 +1227,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1095,10 +1227,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
) : null}
<div className="min-h-0 min-w-0 flex-1">
<NoteList
ref={spellFeedListRef}
subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds}
useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs={fauxNoteListUseFilterAsIs}
showKind1OPs={selectedFauxSpell === 'following' ? showKind1OPs : true}
showKind1Replies={selectedFauxSpell === 'following' ? showKind1Replies : true}
@ -1120,10 +1256,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1120,10 +1256,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
) : selectedSpell ? (
subRequests.length > 0 ? (
<NoteList
ref={spellFeedListRef}
subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds}
useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs
/>
) : !pubkey &&

14
src/providers/NoteFeedProfileContext.tsx

@ -0,0 +1,14 @@ @@ -0,0 +1,14 @@
import { TProfile } from '@/types'
import { createContext, useContext } from 'react'
export type NoteFeedProfileContextValue = {
profiles: ReadonlyMap<string, TProfile>
pendingPubkeys: ReadonlySet<string>
version: number
}
export const NoteFeedProfileContext = createContext<NoteFeedProfileContextValue | null>(null)
export function useNoteFeedProfileContext() {
return useContext(NoteFeedProfileContext)
}

193
src/services/client.service.ts

@ -54,6 +54,13 @@ import { MacroService, createBookstrService } from './client-macro.service' @@ -54,6 +54,13 @@ import { MacroService, createBookstrService } from './client-macro.service'
type TTimelineRef = [string, number]
/**
* Timeline bootstrap used to await up to `filter.limit` IndexedDB reads before opening a live REQ,
* which blocked first paint for many seconds. We only prefetch this many newest refs; the subscription
* streams the rest immediately.
*/
const TIMELINE_CACHE_PREFETCH_CAP = 48
class ClientService extends EventTarget {
static instance: ClientService
@ -854,25 +861,51 @@ class ClientService extends EventTarget { @@ -854,25 +861,51 @@ class ClientService extends EventTarget {
startLogin,
needSort = true,
useCache = false,
omitDefaultSinceWhenUseCache = false
omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
/** When useCache is true but there are no timeline refs yet, skip the default 24h `since` so REQ stays unbounded (spell feeds / catalog). */
omitDefaultSinceWhenUseCache?: boolean
/**
* After the first live event before EOSE, wait this long then treat initial load as EOSE (query-style finalize).
* Spells pass {@link FIRST_RELAY_RESULT_GRACE_MS} explicitly; feeds may override.
*/
firstRelayResultGraceMs?: number
} = {}
) {
const newEventIdSet = new Set<string>()
const requestCount = subRequests.length
// For requestCount===1, floor(1/2)=0 makes eosedCount>=threshold true from the first inner
// callback, so every progressive update forwards to the outer onEvents → setState storms and
// stuck feeds (e.g. Spells Discussions). Require at least one EOSE before opening the gate.
const threshold = requestCount <= 1 ? 1 : Math.floor(requestCount / 2)
let eventIdSet = new Set<string>()
let events: NEvent[] = []
let eosedCount = 0
let progressiveDelivered = false
/** First merged batch goes out synchronously so the list paints without waiting a frame. */
let outerMergedDelivered = false
/** One React update per animation frame after the first paint — limits setEvents/profile churn. */
let outerFlushRaf: number | null = null
const scheduleOuterFlush = () => {
const snapshot = events.length ? [...events] : []
const allEosed = eosedCount >= requestCount
if (!outerMergedDelivered && (snapshot.length > 0 || allEosed)) {
outerMergedDelivered = true
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents(snapshot, allEosed)
return
}
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
}
outerFlushRaf = requestAnimationFrame(() => {
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
})
}
const subs = await Promise.all(
subRequests.map(({ urls, filter }) => {
@ -893,12 +926,7 @@ class ClientService extends EventTarget { @@ -893,12 +926,7 @@ class ClientService extends EventTarget {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
eventIdSet = new Set(events.map((evt) => evt.id))
if (eosedCount >= threshold) {
onEvents(events, eosedCount >= requestCount)
} else if (!progressiveDelivered && events.length > 0) {
progressiveDelivered = true
onEvents(events, false)
}
scheduleOuterFlush()
},
onNew: (evt) => {
if (newEventIdSet.has(evt.id)) return
@ -907,7 +935,7 @@ class ClientService extends EventTarget { @@ -907,7 +935,7 @@ class ClientService extends EventTarget {
},
onClose
},
{ startLogin, needSort, useCache, omitDefaultSinceWhenUseCache }
{ startLogin, needSort, useCache, omitDefaultSinceWhenUseCache, firstRelayResultGraceMs }
)
})
)
@ -915,8 +943,18 @@ class ClientService extends EventTarget { @@ -915,8 +943,18 @@ class ClientService extends EventTarget {
const key = this.generateMultipleTimelinesKey(subRequests)
this.timelines[key] = subs.map((sub) => sub.timelineKey)
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
}
return {
closer: () => {
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents = () => {}
onNew = () => {}
subs.forEach((sub) => {
@ -1198,12 +1236,14 @@ class ClientService extends EventTarget { @@ -1198,12 +1236,14 @@ class ClientService extends EventTarget {
startLogin,
needSort = true,
useCache = false,
omitDefaultSinceWhenUseCache = false
omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
omitDefaultSinceWhenUseCache?: boolean
firstRelayResultGraceMs?: number
} = {}
) {
const relays = Array.from(new Set(urls))
@ -1223,73 +1263,100 @@ class ClientService extends EventTarget { @@ -1223,73 +1263,100 @@ class ClientService extends EventTarget {
let cachedEvents: NEvent[] = []
let since: number | undefined
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
let events: NEvent[] = []
let eosedAt: number | null = null
let initialBatchScheduled = false
let lastDeliveredCount = 0
let progressiveIntervalId: ReturnType<typeof setInterval> | null = null
let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const PROGRESSIVE_INTERVAL_MS = 100 // Backup tick while relays stream without new onevent bursts
const MIN_NEW_EVENTS_AFTER_FIRST = 1
const mergeTimelineLiveAndCache = (): NEvent[] => {
const sortedLive = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (!needSort || !useCache || cachedEvents.length === 0) {
return sortedLive
}
const byId = new Map<string, NEvent>()
for (const e of cachedEvents) {
byId.set(e.id, e)
}
for (const e of sortedLive) {
byId.set(e.id, e)
}
return [...byId.values()].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
}
const deliverProgressive = () => {
if (eosedAt) return
const combined = mergeTimelineLiveAndCache()
if (combined.length === 0) return
const newEventCount = combined.length - lastDeliveredCount
const isFirstPaint = lastDeliveredCount === 0
const shouldDeliver =
isFirstPaint
? combined.length >= 1
: newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || combined.length >= filter.limit * 0.5
if (shouldDeliver) {
lastDeliveredCount = combined.length
onEvents(combined, false)
}
}
// CRITICAL: Only use cache if explicitly enabled (for profile timelines)
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
const refs = timeline.refs
const prefetchN = Math.min(refs.length, filter.limit, TIMELINE_CACHE_PREFETCH_CAP)
// Spell / catalog feeds: refs already carry created_at — set `since` immediately and open the live REQ
// without awaiting dozens of IndexedDB reads (that delayed first events by seconds).
if (omitDefaultSinceWhenUseCache && refs[0]![1] >= oneDayAgo) {
since = refs[0]![1] + 1
void (async () => {
try {
const loaded = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => that.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (!loaded.length) return
loaded.sort((a, b) => b.created_at - a.created_at)
const recent = loaded.filter((evt) => evt.created_at >= oneDayAgo)
if (!recent.length) return
cachedEvents = recent
deliverProgressive()
} catch {
// ignore
}
})()
} else if (!omitDefaultSinceWhenUseCache) {
cachedEvents = (
await Promise.all(timeline.refs.slice(0, filter.limit).map(([id]) => this.eventService.fetchEvent(id)))
await Promise.all(refs.slice(0, prefetchN).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
// Sort cached events by newest first
cachedEvents.sort((a, b) => b.created_at - a.created_at)
// CRITICAL FIX: Filter out very old cached events (older than 24 hours)
// This prevents showing 15+ hour old events when the cache is stale
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
const recentCachedEvents = cachedEvents.filter(evt => evt.created_at >= oneDayAgo)
const recentCachedEvents = cachedEvents.filter((evt) => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
// Only show cached events if they're recent
onEvents([...recentCachedEvents], false)
// Use the NEWEST cached event's timestamp + 1 to fetch only newer events
since = recentCachedEvents[0].created_at + 1
} else {
// All cached events are too old, ignore them and start fresh
cachedEvents = []
}
}
}
}
// CRITICAL FIX: Only set since parameter if caching is enabled
// When useCache is false, we want to stream raw from relays without time restrictions
// This allows relay feeds to show all available events, not just recent ones
if (!since && needSort && useCache && !omitDefaultSinceWhenUseCache) {
// Default to last 24 hours if no recent cached events (only when caching is enabled)
// This ensures we get recent content even if relays are slow
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
since = oneDayAgo
}
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
let events: NEvent[] = []
let eosedAt: number | null = null
let initialBatchScheduled = false
let lastDeliveredCount = 0
// Progressive loading: show the first event(s) as soon as they arrive (not only after 5+ events)
const PROGRESSIVE_INTERVAL_MS = 100 // Poll for more events while relays are still streaming
const MIN_NEW_EVENTS_AFTER_FIRST = 5 // After first paint, batch updates to limit re-renders
let progressiveIntervalId: ReturnType<typeof setInterval> | null = null
let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const deliverProgressive = () => {
if (eosedAt || events.length === 0) return
const sortedEvents = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
const newEventCount = sortedEvents.length - lastDeliveredCount
const isFirstPaint = lastDeliveredCount === 0
const shouldDeliver =
isFirstPaint
? sortedEvents.length >= 1
: newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || sortedEvents.length >= filter.limit * 0.5
if (shouldDeliver) {
lastDeliveredCount = sortedEvents.length
const snap = sortedEvents
// Only include cached events if caching is enabled
onEvents(needSort && useCache ? snap.concat(cachedEvents).slice(0, filter.limit) : snap, false)
}
}
const handleTimelineEose = (eosed: boolean) => {
if (eosed && eosedAt != null) return
@ -1382,16 +1449,18 @@ class ClientService extends EventTarget { @@ -1382,16 +1449,18 @@ class ClientService extends EventTarget {
firstRelayResultGraceTimer = setTimeout(() => {
firstRelayResultGraceTimer = null
handleTimelineEose(true)
}, FIRST_RELAY_RESULT_GRACE_MS)
}, firstRelayResultGraceMs)
}
// Deliver as soon as we have any event while waiting for EOSE (then batch further updates)
if (needSort && events.length >= 1 && !initialBatchScheduled) {
// Deliver on every live event before EOSE (plus interval as a safety net)
if (needSort && events.length >= 1) {
if (!initialBatchScheduled) {
initialBatchScheduled = true
deliverProgressive()
if (!progressiveIntervalId) {
progressiveIntervalId = setInterval(deliverProgressive, PROGRESSIVE_INTERVAL_MS)
}
}
deliverProgressive()
}
return
}
// new event

6
src/services/spell.service.ts

@ -65,6 +65,12 @@ export const SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS = 600 @@ -65,6 +65,12 @@ export const SPELL_CATALOG_SYNC_LIMIT_WITH_FOLLOWS = 600
/** Max distinct pubkeys in one catalog REQ (relay compatibility). Your pubkey is always first. */
export const SPELL_CATALOG_MAX_AUTHORS = 400
/**
* If no relay sends EOSE, stop showing the catalog sync state and close the sub after this long.
* Keeps the UI from feeling stuck when relays are slow or silent.
*/
export const SPELL_CATALOG_SYNC_TIMEOUT_MS = 12_000
/** Build author list for spell catalog sync: always include `pubkey`, then follows, deduped. */
export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): string[] {
const rest = contacts.filter((c) => typeof c === 'string' && c.length > 0 && c !== pubkey)

Loading…
Cancel
Save