Browse Source

speed up timelines

imwald
Silberengel 1 month ago
parent
commit
e5ed682ee5
  1. 31
      src/components/Explore/ExploreRelayReviews.tsx
  2. 2
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  3. 166
      src/components/NoteList/index.tsx
  4. 4
      src/components/NoteStats/LikeButton.tsx
  5. 4
      src/components/NoteStats/Likes.tsx
  6. 4
      src/components/NoteStats/RepostButton.tsx
  7. 8
      src/components/NoteStats/VoteButtons.tsx
  8. 7
      src/components/PostEditor/PostContent.tsx
  9. 3
      src/components/QuoteList/index.tsx
  10. 17
      src/components/Username/index.tsx
  11. 7
      src/constants.ts
  12. 7
      src/hooks/useFetchProfile.tsx
  13. 293
      src/hooks/useProfileTimeline.tsx
  14. 15
      src/lib/event-metadata.ts
  15. 81
      src/lib/favorites-feed-relays.ts
  16. 34
      src/lib/relay-list-builder.ts
  17. 27
      src/lib/spell-feed-request-identity.ts
  18. 12
      src/lib/tag.ts
  19. 19
      src/pages/primary/NoteListPage/FollowingFeed.tsx
  20. 10
      src/pages/primary/SpellsPage/CreateSpellDialog.tsx
  21. 176
      src/pages/primary/SpellsPage/fauxSpellFeeds.ts
  22. 139
      src/pages/primary/SpellsPage/index.tsx
  23. 59
      src/pages/secondary/NoteListPage/index.tsx
  24. 24
      src/providers/FeedProvider.tsx
  25. 252
      src/services/client-replaceable-events.service.ts
  26. 383
      src/services/client.service.ts
  27. 102
      src/services/note-stats.service.ts
  28. 23
      src/services/spell.service.ts

31
src/components/Explore/ExploreRelayReviews.tsx

@ -1,27 +1,28 @@
import NoteList from '@/components/NoteList' import NoteList from '@/components/NoteList'
import { ExtendedKind, PROFILE_FETCH_RELAY_URLS } from '@/constants' import { ExtendedKind } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { import {
getRelayUrlFromRelayReviewEvent, getRelayUrlFromRelayReviewEvent,
getStarsFromRelayReviewEvent getStarsFromRelayReviewEvent
} from '@/lib/event-metadata' } from '@/lib/event-metadata'
import { buildExploreProfileAndUserRelayList } from '@/lib/relay-list-builder' import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider' import { useNostr } from '@/providers/NostrProvider'
import { Event } from 'nostr-tools' import { Event } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useState } from 'react' import { useCallback, useMemo } from 'react'
export default function ExploreRelayReviews() { export default function ExploreRelayReviews() {
const { pubkey } = useNostr() const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const [relayUrls, setRelayUrls] = useState<string[]>(() => [...PROFILE_FETCH_RELAY_URLS]) const { relayList } = useNostr()
useEffect(() => { const relayUrls = useMemo(
let cancelled = false () =>
buildExploreProfileAndUserRelayList(pubkey ?? null).then((urls) => { getRelayUrlsWithFavoritesFastReadAndInbox(
if (!cancelled) setRelayUrls(urls) favoriteRelays,
}) blockedRelays,
return () => { relayList?.read ?? []
cancelled = true ),
} [favoriteRelays, blockedRelays, relayList]
}, [pubkey]) )
const subRequests = useMemo(() => [{ urls: relayUrls, filter: {} }], [relayUrls]) const subRequests = useMemo(() => [{ urls: relayUrls, filter: {} }], [relayUrls])
@ -34,6 +35,8 @@ export default function ExploreRelayReviews() {
return ( return (
<div className="min-w-0 pt-1"> <div className="min-w-0 pt-1">
<NoteList <NoteList
feedSubscriptionKey="explore-relay-reviews"
preserveTimelineOnSubRequestsChange
showKinds={[ExtendedKind.RELAY_REVIEW]} showKinds={[ExtendedKind.RELAY_REVIEW]}
subRequests={subRequests} subRequests={subRequests}
showKind1OPs={false} showKind1OPs={false}

2
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -537,7 +537,7 @@ export default function PublicationIndex({
}, },
onNew: () => {} // Not needed for one-time fetch onNew: () => {} // Not needed for one-time fetch
}, },
{ needSort: false, useCache: false } // NO CACHING - stream raw from relays { needSort: false }
) )
// Wait for up to 10 seconds for events to arrive or eosed // Wait for up to 10 seconds for events to arrive or eosed

166
src/components/NoteList/index.tsx

@ -9,7 +9,10 @@ import {
isReplyNoteEvent isReplyNoteEvent
} from '@/lib/event' } from '@/lib/event'
import { shouldFilterEvent } from '@/lib/event-filtering' import { shouldFilterEvent } from '@/lib/event-filtering'
import { stableSpellFeedFilterKey } from '@/lib/spell-feed-request-identity' import {
isRelayUrlStrictSupersetIdentityKey,
stableSpellFeedFilterKey
} from '@/lib/spell-feed-request-identity'
import { syncUserDeletionTombstones } from '@/lib/sync-user-deletions' import { syncUserDeletionTombstones } from '@/lib/sync-user-deletions'
import { normalizeUrl } from '@/lib/url' import { normalizeUrl } from '@/lib/url'
import { getZapInfoFromEvent } from '@/lib/event-metadata' import { getZapInfoFromEvent } from '@/lib/event-metadata'
@ -48,6 +51,19 @@ const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing
const FEED_PROFILE_BATCH_DEBOUNCE_MS = 120 const FEED_PROFILE_BATCH_DEBOUNCE_MS = 120
const FEED_PROFILE_CHUNK = 36 const FEED_PROFILE_CHUNK = 36
function mergeEventBatchesById(prev: Event[], incoming: Event[], cap: number): Event[] {
const byId = new Map<string, Event>()
for (const e of prev) {
byId.set(e.id, e)
}
for (const e of incoming) {
byId.set(e.id, e)
}
return Array.from(byId.values())
.sort((a, b) => b.created_at - a.created_at)
.slice(0, cap)
}
const NoteList = forwardRef( const NoteList = forwardRef(
( (
{ {
@ -66,14 +82,12 @@ const NoteList = forwardRef(
/** When set (e.g. Spells page), timeline subscription keys off this string instead of `subRequests` reference churn. */ /** When set (e.g. Spells page), timeline subscription keys off this string instead of `subRequests` reference churn. */
feedSubscriptionKey, feedSubscriptionKey,
/** /**
* When true, hydrate the list from the client timeline cache (IndexedDB-backed) before/at same time as * When true (e.g. Explore relay reviews), `subRequests` may grow after first paint (bootstrap relays full list).
* live REQ, so feeds feel instant on repeat visits. Spells faux feeds use this; home feed stays false. * Re-subscribe when URLs change but **merge** new timeline batches into existing rows by event id instead of clearing.
*/ */
useTimelineCacheBootstrap = false, preserveTimelineOnSubRequestsChange = false,
/** /**
* When set (Spells page), passed to `subscribeTimeline` as `firstRelayResultGraceMs` only ms to wait after * Spells page: after this many ms, clear the loading skeleton so the list area renders; subscription keeps running.
* the first live event before treating initial load as EOSE. Subscribe setup and loading fallback keep
* longer defaults so multi-relay spell feeds do not race-fail and stay blank after refresh.
*/ */
spellFetchTimeoutMs, spellFetchTimeoutMs,
/** Spells page: bumps when user picks a feed; used with {@link onSpellFeedFirstPaint}. */ /** Spells page: bumps when user picks a feed; used with {@link onSpellFeedFirstPaint}. */
@ -96,7 +110,8 @@ const NoteList = forwardRef(
/** When provided and returns true, the event is omitted from the feed (in addition to built-in rules). */ /** When provided and returns true, the event is omitted from the feed (in addition to built-in rules). */
extraShouldHideEvent?: (evt: Event) => boolean extraShouldHideEvent?: (evt: Event) => boolean
feedSubscriptionKey?: string feedSubscriptionKey?: string
useTimelineCacheBootstrap?: boolean preserveTimelineOnSubRequestsChange?: boolean
/** When set (spells), max time to show the initial loading skeleton (ms). */
spellFetchTimeoutMs?: number spellFetchTimeoutMs?: number
spellFeedInstrumentToken?: number spellFeedInstrumentToken?: number
onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void
@ -156,6 +171,9 @@ const NoteList = forwardRef(
}, [subRequests]) }, [subRequests])
const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey
const prevSubRequestsKeyForTimelineRef = useRef<string | null>(null)
/** Detect pull-to-refresh so preserve-mode feeds still clear; unrelated dep changes must not clear. */
const timelineEffectLastRefreshCountRef = useRef(refreshCount)
useEffect(() => { useEffect(() => {
feedProfileBatchGenRef.current += 1 feedProfileBatchGenRef.current += 1
@ -163,6 +181,35 @@ const NoteList = forwardRef(
setFeedProfileBatch({ profiles: new Map(), pending: new Set(), version: 0 }) setFeedProfileBatch({ profiles: new Map(), pending: new Set(), version: 0 })
}, [timelineSubscriptionKey, refreshCount]) }, [timelineSubscriptionKey, refreshCount])
/** Pending pubkeys sync with rows so useFetchProfile skips per-note fetches before the debounced batch. */
useLayoutEffect(() => {
const candidates = new Set<string>()
const addPk = (p: string | undefined) => {
if (p && p.length === 64 && /^[0-9a-f]{64}$/.test(p)) {
candidates.add(p)
}
}
for (const e of events) {
addPk(e.pubkey)
}
for (const e of newEvents) {
addPk(e.pubkey)
}
setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending)
let changed = false
for (const pk of candidates) {
if (!prev.profiles.has(pk) && !pending.has(pk)) {
pending.add(pk)
changed = true
}
}
if (!changed) return prev
return { ...prev, pending, version: prev.version + 1 }
})
}, [events, newEvents])
const subRequestsRef = useRef(subRequests) const subRequestsRef = useRef(subRequests)
subRequestsRef.current = subRequests subRequestsRef.current = subRequests
@ -309,9 +356,12 @@ const NoteList = forwardRef(
candidates.add(p) candidates.add(p)
} }
} }
filteredEvents.slice(0, 50).forEach((e) => addPk(e.pubkey)) for (const e of events) {
events.slice(0, 120).forEach((e) => addPk(e.pubkey)) addPk(e.pubkey)
events.slice(showCount, showCount + 60).forEach((e) => addPk(e.pubkey)) }
for (const e of newEvents) {
addPk(e.pubkey)
}
const need = [...candidates].filter((pk) => !feedProfileLoadedRef.current.has(pk)) const need = [...candidates].filter((pk) => !feedProfileLoadedRef.current.has(pk))
if (need.length === 0) return if (need.length === 0) return
@ -320,7 +370,14 @@ const NoteList = forwardRef(
setFeedProfileBatch((prev) => { setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending) const pending = new Set(prev.pending)
need.forEach((pk) => pending.add(pk)) let pendingChanged = false
for (const pk of need) {
if (!pending.has(pk)) {
pending.add(pk)
pendingChanged = true
}
}
if (!pendingChanged) return prev
return { ...prev, pending, version: prev.version + 1 } return { ...prev, pending, version: prev.version + 1 }
}) })
@ -363,7 +420,7 @@ const NoteList = forwardRef(
})() })()
}, FEED_PROFILE_BATCH_DEBOUNCE_MS) }, FEED_PROFILE_BATCH_DEBOUNCE_MS)
return () => window.clearTimeout(handle) return () => window.clearTimeout(handle)
}, [filteredEvents, events, showCount]) }, [events, newEvents])
const scrollToTop = useCallback((behavior: ScrollBehavior = 'instant') => { const scrollToTop = useCallback((behavior: ScrollBehavior = 'instant') => {
setTimeout(() => { setTimeout(() => {
@ -392,13 +449,34 @@ const NoteList = forwardRef(
return () => {} return () => {}
} }
const prevSubKey = prevSubRequestsKeyForTimelineRef.current
const userPulledRefresh = refreshCount !== timelineEffectLastRefreshCountRef.current
if (userPulledRefresh) {
timelineEffectLastRefreshCountRef.current = refreshCount
}
const keepExistingTimelineEvents =
preserveTimelineOnSubRequestsChange &&
!userPulledRefresh &&
(prevSubKey === subRequestsKey ||
isRelayUrlStrictSupersetIdentityKey(prevSubKey, subRequestsKey))
prevSubRequestsKeyForTimelineRef.current = subRequestsKey
/** False after cleanup so stale timeline callbacks cannot overwrite state after switching feeds (e.g. Spells discussions → notifications). */ /** False after cleanup so stale timeline callbacks cannot overwrite state after switching feeds (e.g. Spells discussions → notifications). */
let effectActive = true let effectActive = true
async function init() { async function init() {
setLoading(true) // Re-subscribe with rows visible (e.g. relay URL expansion): don't flash global loading / skeleton.
setEvents([]) const keepRowsVisible =
setNewEvents([]) preserveTimelineOnSubRequestsChange &&
keepExistingTimelineEvents &&
eventsRef.current.length > 0
if (!keepRowsVisible) {
setLoading(true)
}
if (!keepExistingTimelineEvents) {
setEvents([])
setNewEvents([])
}
setHasMore(true) setHasMore(true)
consecutiveEmptyRef.current = 0 // Reset counter on refresh consecutiveEmptyRef.current = 0 // Reset counter on refresh
@ -437,6 +515,10 @@ const NoteList = forwardRef(
return () => {} return () => {}
} }
const totalRelayUrls = mappedSubRequests.reduce((n, r) => n + r.urls.length, 0)
// Explore-style feeds merge many read relays; subscribeTimeline awaits every ensureRelay — 5s often loses the race.
const subscribeSetupRaceMs = totalRelayUrls > 24 ? 30_000 : 5000
let closer: (() => void) | undefined let closer: (() => void) | undefined
let timelineKey: string | undefined let timelineKey: string | undefined
let timelineSubscribePromise: let timelineSubscribePromise:
@ -444,30 +526,37 @@ const NoteList = forwardRef(
| undefined | undefined
try { try {
// Opening subs + IndexedDB timeline hydration can exceed 2s on spell feeds with many relays; a short race // Opening many relay subs can exceed 2s on spell feeds; a short race
// rejects, the catch closes the late subscription, and the list stays empty after refresh. // rejects, the catch closes the late subscription, and the list stays empty after refresh.
const subscribeSetupRaceMs = 5000
const timeoutPromise = new Promise<never>((_, reject) => { const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => { setTimeout(() => {
reject(new Error(`subscribeTimeline timeout after ${subscribeSetupRaceMs}ms`)) reject(new Error(`subscribeTimeline timeout after ${subscribeSetupRaceMs}ms`))
}, subscribeSetupRaceMs) }, subscribeSetupRaceMs)
}) })
const firstRelayGraceMs = spellFetchTimeoutMs ?? FIRST_RELAY_RESULT_GRACE_MS const eventCap = areAlgoRelays ? ALGO_LIMIT : LIMIT
timelineSubscribePromise = client.subscribeTimeline( timelineSubscribePromise = client.subscribeTimeline(
mappedSubRequests, mappedSubRequests,
{ {
onEvents: (events: Event[], eosed: boolean) => { onEvents: (batch: Event[], eosed: boolean) => {
if (!effectActive) return if (!effectActive) return
if (events.length > 0) { if (batch.length > 0) {
setEvents(events) if (preserveTimelineOnSubRequestsChange) {
setEvents((prev) => {
const next = mergeEventBatchesById(prev, batch, eventCap)
lastEventsForTimelinePrefetchRef.current = next
return next
})
} else {
setEvents(batch)
lastEventsForTimelinePrefetchRef.current = batch
}
// Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+ // Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
setLoading(false) setLoading(false)
// Defer profile + embed prefetch: streaming timelines fire onEvents often; starting // Defer profile + embed prefetch: streaming timelines fire onEvents often; starting
// fetchProfilesForPubkeys on every update spams relays (multi-second each) and cancels hooks. // fetchProfilesForPubkeys on every update spams relays (multi-second each) and cancels hooks.
lastEventsForTimelinePrefetchRef.current = events
if (timelinePrefetchDebounceRef.current) { if (timelinePrefetchDebounceRef.current) {
clearTimeout(timelinePrefetchDebounceRef.current) clearTimeout(timelinePrefetchDebounceRef.current)
} }
@ -492,11 +581,12 @@ const NoteList = forwardRef(
} }
}, 450) }, 450)
} else if (eosed) { } else if (eosed) {
// No events received but EOSE - set empty events array and stop loading if (!preserveTimelineOnSubRequestsChange) {
setEvents([]) setEvents([])
}
setLoading(false) setLoading(false)
} }
if (areAlgoRelays) { if (areAlgoRelays) {
// Algorithm feeds typically return all results at once // Algorithm feeds typically return all results at once
setHasMore(false) setHasMore(false)
@ -507,7 +597,7 @@ const NoteList = forwardRef(
// We should still try to load more on scroll - the loadMore logic will handle stopping // We should still try to load more on scroll - the loadMore logic will handle stopping
// Only set to false if we explicitly know there are no more events (handled in loadMore) // Only set to false if we explicitly know there are no more events (handled in loadMore)
// If we got a full limit of events, there's likely more available // If we got a full limit of events, there's likely more available
if (events.length >= (areAlgoRelays ? ALGO_LIMIT : LIMIT)) { if (batch.length >= (areAlgoRelays ? ALGO_LIMIT : LIMIT)) {
setHasMore(true) setHasMore(true)
} else { } else {
// Even with fewer events, there might be more (filtering, slow relays, etc.) // Even with fewer events, there might be more (filtering, slow relays, etc.)
@ -542,9 +632,7 @@ const NoteList = forwardRef(
{ {
startLogin, startLogin,
needSort: !areAlgoRelays, needSort: !areAlgoRelays,
useCache: useTimelineCacheBootstrap, firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap,
firstRelayResultGraceMs: firstRelayGraceMs
} }
) )
@ -582,6 +670,8 @@ const NoteList = forwardRef(
} }
}, [ }, [
timelineSubscriptionKey, timelineSubscriptionKey,
subRequestsKey,
preserveTimelineOnSubRequestsChange,
refreshCount, refreshCount,
showKindsKey, showKindsKey,
showKind1OPs, showKind1OPs,
@ -589,7 +679,6 @@ const NoteList = forwardRef(
showKind1111, showKind1111,
useFilterAsIs, useFilterAsIs,
areAlgoRelays, areAlgoRelays,
useTimelineCacheBootstrap,
spellFetchTimeoutMs spellFetchTimeoutMs
]) ])
@ -615,6 +704,21 @@ const NoteList = forwardRef(
} }
}, [timelineSubscriptionKey, refreshCount]) }, [timelineSubscriptionKey, refreshCount])
/** Spells: drop loading skeleton quickly so rows (or empty + reload) appear while REQ continues. */
useEffect(() => {
if (spellFetchTimeoutMs == null || spellFetchTimeoutMs <= 0) return
if (!subRequestsRef.current.length) return
let cancelled = false
const id = window.setTimeout(() => {
if (cancelled) return
setLoading(false)
}, spellFetchTimeoutMs)
return () => {
cancelled = true
clearTimeout(id)
}
}, [timelineSubscriptionKey, refreshCount, spellFetchTimeoutMs])
// Use refs to avoid dependency issues and ensure latest values in async callbacks // Use refs to avoid dependency issues and ensure latest values in async callbacks
const showCountRef = useRef(showCount) const showCountRef = useRef(showCount)
const loadingRef = useRef(loading) const loadingRef = useRef(loading)

4
src/components/NoteStats/LikeButton.tsx

@ -159,7 +159,9 @@ export default function LikeButton({ event, hideCount = false }: { event: Event;
showSimplePublishSuccess(t('Reaction published')) showSimplePublishSuccess(t('Reaction published'))
} }
noteStatsService.updateNoteStatsByEvents([evt]) noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} }
} catch (error) { } catch (error) {
logger.error('Like failed', { error, eventId: event.id }) logger.error('Like failed', { error, eventId: event.id })

4
src/components/NoteStats/Likes.tsx

@ -71,7 +71,9 @@ export default function Likes({ event }: { event: Event }) {
try { try {
const reaction = createReactionDraftEvent(event, emoji) const reaction = createReactionDraftEvent(event, emoji)
const evt = await publish(reaction) const evt = await publish(reaction)
noteStatsService.updateNoteStatsByEvents([evt]) noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} catch (error) { } catch (error) {
logger.error('Like failed', { error, eventId: event.id }) logger.error('Like failed', { error, eventId: event.id })
} finally { } finally {

4
src/components/NoteStats/RepostButton.tsx

@ -76,7 +76,9 @@ export default function RepostButton({ event, hideCount = false }: { event: Even
showSimplePublishSuccess(t('Boost published')) showSimplePublishSuccess(t('Boost published'))
} }
noteStatsService.updateNoteStatsByEvents([evt]) noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} catch (error) { } catch (error) {
logger.error('Boost failed', { error, eventId: event.id }) logger.error('Boost failed', { error, eventId: event.id })
} finally { } finally {

8
src/components/NoteStats/VoteButtons.tsx

@ -81,7 +81,9 @@ export default function VoteButtons({ event }: { event: Event }) {
showSimplePublishSuccess(t('Vote removed')) showSimplePublishSuccess(t('Vote removed'))
} }
noteStatsService.updateNoteStatsByEvents([evt]) noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} else { } else {
// If user voted the opposite way, first remove the old vote // If user voted the opposite way, first remove the old vote
if (userVote) { if (userVote) {
@ -109,7 +111,9 @@ export default function VoteButtons({ event }: { event: Event }) {
showSimplePublishSuccess(t('Vote published')) showSimplePublishSuccess(t('Vote published'))
} }
noteStatsService.updateNoteStatsByEvents([evt]) noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} }
} catch (error) { } catch (error) {
logger.error('Vote failed', { error, eventId: event.id }) logger.error('Vote failed', { error, eventId: event.id })

7
src/components/PostEditor/PostContent.tsx

@ -66,6 +66,7 @@ import mediaUpload from '@/services/media-upload.service'
import { successfulPublishRelayUrls, type TRelayPublishStatus } from '@/lib/publish-relay-urls' import { successfulPublishRelayUrls, type TRelayPublishStatus } from '@/lib/publish-relay-urls'
import client, { eventService } from '@/services/client.service' import client, { eventService } from '@/services/client.service'
import discussionFeedCache from '@/services/discussion-feed-cache.service' import discussionFeedCache from '@/services/discussion-feed-cache.service'
import noteStatsService from '@/services/note-stats.service'
import CreateThreadDialog from '@/pages/primary/DiscussionsPage/CreateThreadDialog' import CreateThreadDialog from '@/pages/primary/DiscussionsPage/CreateThreadDialog'
import { getReplaceableCoordinateFromEvent, isProtectedEvent as isEventProtected, isReplaceableEvent, isReplyNoteEvent } from '@/lib/event' import { getReplaceableCoordinateFromEvent, isProtectedEvent as isEventProtected, isReplaceableEvent, isReplyNoteEvent } from '@/lib/event'
import { Event, kinds } from 'nostr-tools' import { Event, kinds } from 'nostr-tools'
@ -112,6 +113,12 @@ export default function PostContent({
const clean = { ...reply } as Event const clean = { ...reply } as Event
delete (clean as any).relayStatuses delete (clean as any).relayStatuses
addReplies([clean]) addReplies([clean])
const isQuotePost = clean.tags.some((t) => t[0] === 'q' && t[1])
noteStatsService.updateNoteStatsByEvents(
[clean],
undefined,
isQuotePost ? undefined : { replyParentNoteId: parentEvent.id }
)
const rootInfo = !isReplaceableEvent(parentEvent.kind) const rootInfo = !isReplaceableEvent(parentEvent.kind)
? { type: 'E' as const, id: parentEvent.id, pubkey: parentEvent.pubkey } ? { type: 'E' as const, id: parentEvent.id, pubkey: parentEvent.pubkey }
: { : {

3
src/components/QuoteList/index.tsx

@ -136,9 +136,6 @@ export default function QuoteList({
[newEvt, ...oldEvents].sort((a, b) => b.created_at - a.created_at) [newEvt, ...oldEvents].sort((a, b) => b.created_at - a.created_at)
) )
} }
},
{
useCache: false // NO CACHING - stream raw from relays
} }
) )
if (cancelled) { if (cancelled) {

17
src/components/Username/index.tsx

@ -23,15 +23,16 @@ export default function Username({
}) { }) {
const { profile, isFetching } = useFetchProfile(userId) const { profile, isFetching } = useFetchProfile(userId)
const { navigateToProfile } = useSmartProfileNavigation() const { navigateToProfile } = useSmartProfileNavigation()
// Get pubkey from userId (works even if profile isn't loaded) // Get pubkey from userId (works even if profile isn't loaded)
const pubkey = useMemo(() => { const pubkey = useMemo(() => {
if (profile?.pubkey) return profile.pubkey if (profile?.pubkey) return profile.pubkey
return userIdToPubkey(userId) || '' return userIdToPubkey(userId) || ''
}, [userId, profile?.pubkey]) }, [userId, profile?.pubkey])
// Show skeleton while fetching (unless withoutSkeleton is true) // Never block on profile fetch when we can already show npub/hex fallback (feeds batch-fetch profiles).
if (isFetching && !withoutSkeleton) { const canShowWithoutProfile = Boolean(pubkey)
if (isFetching && !withoutSkeleton && !canShowWithoutProfile) {
return ( return (
<div className="py-1"> <div className="py-1">
<Skeleton className={cn('w-16', skeletonClassName)} /> <Skeleton className={cn('w-16', skeletonClassName)} />
@ -108,15 +109,15 @@ export function SimpleUsername({
style?: React.CSSProperties style?: React.CSSProperties
}) { }) {
const { profile, isFetching } = useFetchProfile(userId) const { profile, isFetching } = useFetchProfile(userId)
// Get pubkey from userId (works even if profile isn't loaded) // Get pubkey from userId (works even if profile isn't loaded)
const pubkey = useMemo(() => { const pubkey = useMemo(() => {
if (profile?.pubkey) return profile.pubkey if (profile?.pubkey) return profile.pubkey
return userIdToPubkey(userId) || '' return userIdToPubkey(userId) || ''
}, [userId, profile?.pubkey]) }, [userId, profile?.pubkey])
// Show skeleton while fetching (unless withoutSkeleton is true) const canShowWithoutProfile = Boolean(pubkey)
if (isFetching && !withoutSkeleton) { if (isFetching && !withoutSkeleton && !canShowWithoutProfile) {
return ( return (
<div className="py-1"> <div className="py-1">
<Skeleton className={cn('w-16', skeletonClassName)} /> <Skeleton className={cn('w-16', skeletonClassName)} />

7
src/constants.ts

@ -17,8 +17,11 @@ export const DEFAULT_FAVORITE_RELAYS = [
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */ /** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000 export const FIRST_RELAY_RESULT_GRACE_MS = 2000
/** Spells page feeds: shorter grace so multi-relay spell REQs finalize initial load sooner (still keeps subscription open for `onNew`). */ /** Spells page NoteList: drop the loading skeleton after this long so the feed can render; REQ stays open and rows stream in. */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = 450 export const SPELL_FEED_LOADING_MAX_MS = 1000
/** @deprecated Use {@link SPELL_FEED_LOADING_MAX_MS}; kept so old imports do not break. */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = SPELL_FEED_LOADING_MAX_MS
/** /**
* Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among * Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among

7
src/hooks/useFetchProfile.tsx

@ -26,7 +26,12 @@ export function useFetchProfile(id?: string, skipCache = false) {
const { profile: currentAccountProfile } = useNostr() const { profile: currentAccountProfile } = useNostr()
const noteFeed = useNoteFeedProfileContext() const noteFeed = useNoteFeedProfileContext()
const [isFetching, setIsFetching] = useState(true) /** Hex/npub ids can show npub fallback immediately; avoid a skeleton frame before the first effect. */
const [isFetching, setIsFetching] = useState(() => {
if (!id) return false
const pk = userIdToPubkey(id)
return !(pk.length === 64 && /^[0-9a-f]{64}$/.test(pk))
})
const [error, setError] = useState<Error | null>(null) const [error, setError] = useState<Error | null>(null)
const [profile, setProfile] = useState<TProfile | null>(null) const [profile, setProfile] = useState<TProfile | null>(null)
const [pubkey, setPubkey] = useState<string | null>(null) const [pubkey, setPubkey] = useState<string | null>(null)

293
src/hooks/useProfileTimeline.tsx

@ -1,18 +1,20 @@
import { useDeletedEvent } from '@/providers/DeletedEventProvider' import { useDeletedEvent } from '@/providers/DeletedEventProvider'
import client from '@/services/client.service' import client from '@/services/client.service'
import { useEffect, useMemo, useRef, useState, useCallback } from 'react' import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Event } from 'nostr-tools' import { Event } from 'nostr-tools'
import { CALENDAR_EVENT_KINDS, ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants' import { CALENDAR_EVENT_KINDS, ExtendedKind } from '@/constants'
import { normalizeUrl } from '@/lib/url' import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
type ProfileTimelineCacheEntry = { type ProfileTimelineMemoryEntry = {
events: Event[] events: Event[]
lastUpdated: number lastUpdated: number
} }
const timelineCache = new Map<string, ProfileTimelineCacheEntry>() /** 5-minute in-memory cache for this hook only — not IndexedDB, not client timeline refs. */
const CACHE_DURATION = 5 * 60 * 1000 // 5 minutes - cache is considered fresh for this long const memoryTimelineByKey = new Map<string, ProfileTimelineMemoryEntry>()
const relayGroupCache = new Map<string, string[][]>() const CACHE_DURATION = 5 * 60 * 1000
type UseProfileTimelineOptions = { type UseProfileTimelineOptions = {
pubkey: string pubkey: string
@ -28,55 +30,36 @@ type UseProfileTimelineResult = {
refresh: () => void refresh: () => void
} }
async function getRelayGroups(pubkey: string): Promise<string[][]> { function buildSubRequests(
const cached = relayGroupCache.get(pubkey) groups: string[][],
if (cached) { pubkey: string,
return cached kindsArg: number[],
} limit: number,
hasCalendarKinds: boolean
const [relayList, favoriteRelays] = await Promise.all([ ) {
client.fetchRelayList(pubkey).catch(() => ({ read: [], write: [] })), const authorRequests = groups
client.fetchFavoriteRelays(pubkey).catch(() => []) .map((urls) => ({
]) urls,
filter: {
const groups: string[][] = [] authors: [pubkey],
kinds: kindsArg,
const normalizeList = (urls?: string[]) => limit
Array.from( } as any
new Set( }))
(urls || []) .filter((request) => request.urls.length)
.map((url) => normalizeUrl(url)) const calendarInviteRequests = hasCalendarKinds
.filter((value): value is string => !!value) ? groups
) .map((urls) => ({
) urls,
filter: {
const readRelays = normalizeList(relayList.read) kinds: [ExtendedKind.CALENDAR_EVENT_DATE, ExtendedKind.CALENDAR_EVENT_TIME],
if (readRelays.length) { '#p': [pubkey],
groups.push(readRelays) limit: 100
} } as any
}))
const writeRelays = normalizeList(relayList.write) .filter((request) => request.urls.length)
if (writeRelays.length) { : []
groups.push(writeRelays) return [...authorRequests, ...calendarInviteRequests]
}
const favoriteRelayList = normalizeList(favoriteRelays)
if (favoriteRelayList.length) {
groups.push(favoriteRelayList)
}
const fastReadRelays = normalizeList(FAST_READ_RELAY_URLS)
if (fastReadRelays.length) {
groups.push(fastReadRelays)
}
if (!groups.length) {
relayGroupCache.set(pubkey, [fastReadRelays])
return [fastReadRelays]
}
relayGroupCache.set(pubkey, groups)
return groups
} }
function postProcessEvents( function postProcessEvents(
@ -107,11 +90,18 @@ export function useProfileTimeline({
limit = 200, limit = 200,
filterPredicate filterPredicate
}: UseProfileTimelineOptions): UseProfileTimelineResult { }: UseProfileTimelineOptions): UseProfileTimelineResult {
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { relayList } = useNostr()
const { isEventDeleted, tombstoneEpoch } = useDeletedEvent() const { isEventDeleted, tombstoneEpoch } = useDeletedEvent()
const isEventDeletedRef = useRef(isEventDeleted) const isEventDeletedRef = useRef(isEventDeleted)
isEventDeletedRef.current = isEventDeleted isEventDeletedRef.current = isEventDeleted
const cachedEntry = useMemo(() => timelineCache.get(cacheKey), [cacheKey]) const filterPredicateRef = useRef(filterPredicate)
filterPredicateRef.current = filterPredicate
const limitRef = useRef(limit)
limitRef.current = limit
const cachedEntry = useMemo(() => memoryTimelineByKey.get(cacheKey), [cacheKey])
const [events, setEvents] = useState<Event[]>(cachedEntry?.events ?? []) const [events, setEvents] = useState<Event[]>(cachedEntry?.events ?? [])
const [isLoading, setIsLoading] = useState(!cachedEntry) const [isLoading, setIsLoading] = useState(!cachedEntry)
const [refreshToken, setRefreshToken] = useState(0) const [refreshToken, setRefreshToken] = useState(0)
@ -121,9 +111,9 @@ export function useProfileTimeline({
setEvents((prev) => { setEvents((prev) => {
const next = prev.filter((e) => !isEventDeletedRef.current(e)) const next = prev.filter((e) => !isEventDeletedRef.current(e))
if (next.length === prev.length) return prev if (next.length === prev.length) return prev
const cached = timelineCache.get(cacheKey) const cached = memoryTimelineByKey.get(cacheKey)
if (cached) { if (cached) {
timelineCache.set(cacheKey, { events: next, lastUpdated: cached.lastUpdated }) memoryTimelineByKey.set(cacheKey, { events: next, lastUpdated: cached.lastUpdated })
} }
return next return next
}) })
@ -131,129 +121,117 @@ export function useProfileTimeline({
useEffect(() => { useEffect(() => {
let cancelled = false let cancelled = false
const closers: (() => void)[] = []
const pool = new Map<string, Event>()
const flushPool = () => {
if (cancelled) return
const processed = postProcessEvents(
Array.from(pool.values()),
filterPredicateRef.current,
limitRef.current,
isEventDeletedRef.current
)
memoryTimelineByKey.set(cacheKey, { events: processed, lastUpdated: Date.now() })
setEvents(processed)
setIsLoading(false)
}
subscriptionRef.current = () => {
closers.forEach((c) => c())
closers.length = 0
}
const registerCloser = (closer: () => void) => {
if (cancelled) {
closer()
return
}
closers.push(closer)
}
const subscribe = async () => { const subscribe = async () => {
// Check if we have fresh cached data const mem = memoryTimelineByKey.get(cacheKey)
const cachedEntry = timelineCache.get(cacheKey) const cacheAge = mem ? Date.now() - mem.lastUpdated : Infinity
const cacheAge = cachedEntry ? Date.now() - cachedEntry.lastUpdated : Infinity
const isCacheFresh = cacheAge < CACHE_DURATION const isCacheFresh = cacheAge < CACHE_DURATION
// If cache is fresh, show it immediately and skip subscribing pool.clear()
if (isCacheFresh && cachedEntry) { if (isCacheFresh && mem) {
setEvents(cachedEntry.events) setEvents(mem.events)
setIsLoading(false) setIsLoading(false)
// Still subscribe in background to get updates, but don't show loading mem.events.forEach((e) => pool.set(e.id, e))
// This ensures we get new events without disrupting the UI
} else { } else {
// Cache is stale or missing - show loading and fetch setIsLoading(!mem)
setIsLoading(!cachedEntry)
} }
try {
const relayGroups = await getRelayGroups(pubkey)
if (cancelled) {
return
}
const hasCalendarKinds = kinds.some((k) => CALENDAR_EVENT_KINDS.includes(k)) const hasCalendarKinds = kinds.some((k) => CALENDAR_EVENT_KINDS.includes(k))
const authorRequests = relayGroups const feedRelayUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
.map((urls) => ({ favoriteRelays,
urls, blockedRelays,
filter: { relayList?.read ?? []
authors: [pubkey], )
kinds,
limit
} as any
}))
.filter((request) => request.urls.length)
// When profile includes calendar event kinds, also subscribe to events where this user is an invitee (#p tag)
const calendarInviteRequests = hasCalendarKinds
? relayGroups
.map((urls) => ({
urls,
filter: {
kinds: [ExtendedKind.CALENDAR_EVENT_DATE, ExtendedKind.CALENDAR_EVENT_TIME],
'#p': [pubkey],
limit: 100
} as any
}))
.filter((request) => request.urls.length)
: []
const subRequests = [...authorRequests, ...calendarInviteRequests]
if (!subRequests.length) {
timelineCache.set(cacheKey, {
events: [],
lastUpdated: Date.now()
})
setEvents([])
setIsLoading(false)
return
}
const { closer } = await client.subscribeTimeline( const startWave = async (subRequests: ReturnType<typeof buildSubRequests>) => {
subRequests, if (cancelled || subRequests.length === 0) return
{ try {
onEvents: (fetchedEvents) => { const { closer } = await client.subscribeTimeline(
if (cancelled) return subRequests,
const processed = postProcessEvents( {
fetchedEvents as Event[], onEvents: (fetched) => {
filterPredicate, if (cancelled) return
limit, for (const e of fetched as Event[]) {
isEventDeletedRef.current pool.set(e.id, e)
) }
timelineCache.set(cacheKey, { flushPool()
events: processed, },
lastUpdated: Date.now() onNew: (evt) => {
}) if (cancelled) return
setEvents(processed) pool.set((evt as Event).id, evt as Event)
setIsLoading(false) flushPool()
}
}, },
onNew: (evt) => { { needSort: true }
if (cancelled) return )
setEvents((prevEvents) => { registerCloser(closer)
const combined = [evt as Event, ...prevEvents] } catch {
const processed = postProcessEvents( if (!cancelled) setIsLoading(false)
combined,
filterPredicate,
limit,
isEventDeletedRef.current
)
timelineCache.set(cacheKey, {
events: processed,
lastUpdated: Date.now()
})
return processed
})
}
},
{ needSort: true, useCache: false } // NO CACHING - stream raw from relays
)
subscriptionRef.current = () => closer()
} catch (error) {
if (!cancelled) {
setIsLoading(false)
} }
} }
if (feedRelayUrls.length === 0) {
if (!cancelled) setIsLoading(false)
return
}
void startWave(buildSubRequests([feedRelayUrls], pubkey, kinds, limit, hasCalendarKinds))
} }
subscribe() void subscribe()
return () => { return () => {
cancelled = true cancelled = true
subscriptionRef.current() subscriptionRef.current()
subscriptionRef.current = () => {} subscriptionRef.current = () => {}
} }
}, [pubkey, cacheKey, JSON.stringify(kinds), limit, filterPredicate, refreshToken]) }, [
pubkey,
cacheKey,
JSON.stringify(kinds),
limit,
filterPredicate,
refreshToken,
favoriteRelays,
blockedRelays,
relayList
])
const refresh = useCallback(() => { const refresh = useCallback(() => {
subscriptionRef.current() subscriptionRef.current()
subscriptionRef.current = () => {} subscriptionRef.current = () => {}
timelineCache.delete(cacheKey) memoryTimelineByKey.delete(cacheKey)
setIsLoading(true) setIsLoading(true)
setRefreshToken((token) => token + 1) setRefreshToken((token) => token + 1)
}, []) }, [cacheKey])
return { return {
events, events,
@ -261,4 +239,3 @@ export function useProfileTimeline({
refresh refresh
} }
} }

15
src/lib/event-metadata.ts

@ -569,12 +569,17 @@ export function getEmojisFromEvent(event: Event): TEmoji[] {
export function getStarsFromRelayReviewEvent(event: Event): number { export function getStarsFromRelayReviewEvent(event: Event): number {
const ratingTag = event.tags.find((t) => t[0] === 'rating') const ratingTag = event.tags.find((t) => t[0] === 'rating')
if (ratingTag) { if (!ratingTag?.[1]?.trim()) return 0
const stars = parseFloat(ratingTag[1]) * 5 const raw = parseFloat(ratingTag[1])
if (stars > 0 && stars <= 5) { if (Number.isNaN(raw) || raw <= 0) return 0
return stars // This app publishes `rating` as stars/5 (e.g. 5★ → "1"); scale back to 1–5.
} if (raw <= 1) {
const scaled = raw * 5
if (scaled > 0 && scaled <= 5) return scaled
return 0
} }
// Many clients use a plain 1–5 value in the tag.
if (raw >= 1 && raw <= 5) return raw
return 0 return 0
} }

81
src/lib/favorites-feed-relays.ts

@ -0,0 +1,81 @@
import { DEFAULT_FAVORITE_RELAYS, FAST_READ_RELAY_URLS } from '@/constants'
import type { TFeedSubRequest } from '@/types'
import { normalizeUrl } from '@/lib/url'
const blockedSet = (blockedRelays: string[]) =>
new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
/**
* Relay URLs for the all favorites home feed only (`FeedProvider` `all-favorites` / that `RelaysFeed` mode).
* Non-blocked user favorites, or {@link DEFAULT_FAVORITE_RELAYS} when none remain.
*/
export function getFavoritesFeedRelayUrls(
favoriteRelays: string[],
blockedRelays: string[]
): string[] {
const blocked = blockedSet(blockedRelays)
const visible = favoriteRelays.filter((r) => {
const k = normalizeUrl(r) || r
return k && !blocked.has(k)
})
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
const seen = new Set<string>()
const out: string[] = []
for (const u of base) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/**
* Merge relay URL lists in order; first occurrence wins; drops blocked.
*/
export function mergeRelayUrlLayers(layers: string[][], blockedRelays: string[]): string[] {
const blocked = blockedSet(blockedRelays)
const seen = new Set<string>()
const out: string[] = []
for (const layer of layers) {
for (const u of layer) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
}
}
return out
}
/**
* Favorites (same set as the favorites feed) plus {@link FAST_READ_RELAY_URLS} and the users NIP-65 **read** / inbox relays.
* Fast-read URLs are merged first so REQ setup hits responsive indexers early (same deduped set).
*/
export function getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
): string[] {
const favorites = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const fast = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
return mergeRelayUrlLayers([fast, favorites, userInboxReadRelays], blockedRelays)
}
/** Prefix each subrequest’s `urls` with the extended read set (favorites + fast read + inboxes). */
export function augmentSubRequestsWithFavoritesFastReadAndInbox(
requests: TFeedSubRequest[],
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
): TFeedSubRequest[] {
const base = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
userInboxReadRelays
)
return requests.map((r) => ({
...r,
urls: mergeRelayUrlLayers([base, r.urls], blockedRelays)
}))
}

34
src/lib/relay-list-builder.ts

@ -15,6 +15,26 @@ import { getCacheRelayUrls } from './private-relays'
import client from '@/services/client.service' import client from '@/services/client.service'
import logger from '@/lib/logger' import logger from '@/lib/logger'
function dedupeNormalizedRelayUrls(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const n = normalizeUrl(u) || u
if (!n || seen.has(n)) continue
seen.add(n)
out.push(n)
}
return out
}
/**
* Relays to bootstrap Explore replaceable fetches (e.g. kind 10012 batch) before NIP-65 resolves.
* PROFILE_FETCH + FAST_READ.
*/
export function exploreDiscoveryBootstrapRelayUrls(): string[] {
return dedupeNormalizedRelayUrls([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS])
}
export interface RelayListBuilderOptions { export interface RelayListBuilderOptions {
/** Author's pubkey - will include their outboxes (write relays) */ /** Author's pubkey - will include their outboxes (write relays) */
authorPubkey?: string authorPubkey?: string
@ -231,29 +251,31 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio
} }
/** /**
* Explore: Following's Favorites (kind 10012 batch) and Relay reviews tab. * Explore: Following's Favorites (kind 10012 batch) / replaceable discovery.
* PROFILE_FETCH_RELAY_URLS plus the viewer's read/write and cache (10432) relays no FAST_READ. * Bootstrap relays (profile + FAST_READ) plus the viewer's read/write and cache (10432) when logged in.
*/ */
export async function buildExploreProfileAndUserRelayList( export async function buildExploreProfileAndUserRelayList(
userPubkey: string | null | undefined userPubkey: string | null | undefined
): Promise<string[]> { ): Promise<string[]> {
const boot = exploreDiscoveryBootstrapRelayUrls()
if (!userPubkey) { if (!userPubkey) {
return Array.from(new Set([...PROFILE_FETCH_RELAY_URLS])) return boot
} }
try { try {
const built = await buildComprehensiveRelayList({ const built = await buildComprehensiveRelayList({
userPubkey, userPubkey,
includeUserOwnRelays: true, includeUserOwnRelays: true,
includeProfileFetchRelays: true, includeProfileFetchRelays: true,
includeFastReadRelays: false, includeFastReadRelays: true,
includeFavoriteRelays: false, includeFavoriteRelays: false,
includeLocalRelays: true, includeLocalRelays: true,
includeFastWriteRelays: false, includeFastWriteRelays: false,
includeSearchableRelays: false includeSearchableRelays: false
}) })
return built.length > 0 ? built : Array.from(new Set([...PROFILE_FETCH_RELAY_URLS])) if (!built.length) return boot
return dedupeNormalizedRelayUrls([...boot, ...built])
} catch { } catch {
return Array.from(new Set([...PROFILE_FETCH_RELAY_URLS])) return boot
} }
} }

27
src/lib/spell-feed-request-identity.ts

@ -24,3 +24,30 @@ export function computeSpellSubRequestsIdentityKey(subRequests: TFeedSubRequest[
})) }))
) )
} }
/**
* True when `nextKey` is the same REQ filters as `prevKey` but with a strict superset of relay URLs
* in at least one request slot (e.g. Explore relay reviews: bootstrap relays full list).
*/
export function isRelayUrlStrictSupersetIdentityKey(prevKey: string | null, nextKey: string): boolean {
if (!prevKey || prevKey === nextKey) return false
try {
type Item = { urls: string[]; filter: string }
const prev = JSON.parse(prevKey) as Item[]
const next = JSON.parse(nextKey) as Item[]
if (!Array.isArray(prev) || !Array.isArray(next) || prev.length !== next.length) return false
let sawStrictGrowth = false
for (let i = 0; i < prev.length; i++) {
if (prev[i].filter !== next[i].filter) return false
const ps = new Set(prev[i].urls)
const ns = new Set(next[i].urls)
for (const u of ps) {
if (!ns.has(u)) return false
}
if (ns.size > ps.size) sawStrictGrowth = true
}
return sawStrictGrowth
} catch {
return false
}
}

12
src/lib/tag.ts

@ -17,6 +17,18 @@ export function tagNameEquals(tagName: string) {
return (tag: string[]) => tag[0] === tagName return (tag: string[]) => tag[0] === tagName
} }
const NOTE_HEX_ID_RE = /^[0-9a-f]{64}$/i
/** First hex event id on an `e` / `E` tag (reactions, reposts, replies). */
export function getFirstHexEventIdFromETags(tags: string[][]): string | undefined {
for (const t of tags) {
if (t[0] !== 'e' && t[0] !== 'E') continue
const id = t[1]
if (id && NOTE_HEX_ID_RE.test(id)) return id
}
return undefined
}
export function generateBech32IdFromETag(tag: string[]) { export function generateBech32IdFromETag(tag: string[]) {
try { try {
const [, id, relay, markerOrPubkey, pubkey] = tag const [, id, relay, markerOrPubkey, pubkey] = tag

19
src/pages/primary/NoteListPage/FollowingFeed.tsx

@ -1,6 +1,8 @@
import NormalFeed from '@/components/NormalFeed' import NormalFeed from '@/components/NormalFeed'
import type { TNoteListRef } from '@/components/NoteList' import type { TNoteListRef } from '@/components/NoteList'
import { augmentSubRequestsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { useFeed } from '@/providers/FeedProvider' import { useFeed } from '@/providers/FeedProvider'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider' import { useNostr } from '@/providers/NostrProvider'
import client from '@/services/client.service' import client from '@/services/client.service'
import { TFeedSubRequest } from '@/types' import { TFeedSubRequest } from '@/types'
@ -13,7 +15,8 @@ const FollowingFeed = forwardRef<
setSubHeader?: (node: ReactNode) => void setSubHeader?: (node: ReactNode) => void
} }
>(function FollowingFeed({ setSubHeader }, ref) { >(function FollowingFeed({ setSubHeader }, ref) {
const { pubkey } = useNostr() const { pubkey, relayList } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { feedInfo } = useFeed() const { feedInfo } = useFeed()
const [subRequests, setSubRequests] = useState<TFeedSubRequest[]>([]) const [subRequests, setSubRequests] = useState<TFeedSubRequest[]>([])
@ -25,11 +28,19 @@ const FollowingFeed = forwardRef<
} }
const followings = await client.fetchFollowings(pubkey) const followings = await client.fetchFollowings(pubkey)
setSubRequests(await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)) const raw = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
setSubRequests(
augmentSubRequestsWithFavoritesFastReadAndInbox(
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
)
} }
init() void init()
}, [feedInfo.feedType, pubkey]) }, [feedInfo.feedType, pubkey, favoriteRelays, blockedRelays, relayList])
return <NormalFeed ref={ref} subRequests={subRequests} isMainFeed setSubHeader={setSubHeader} /> return <NormalFeed ref={ref} subRequests={subRequests} isMainFeed setSubHeader={setSubHeader} />
}) })

10
src/pages/primary/SpellsPage/CreateSpellDialog.tsx

@ -18,6 +18,7 @@ import {
dedupeAppendIds, dedupeAppendIds,
resolveSpellListATags resolveSpellListATags
} from '@/lib/spell-list-import' } from '@/lib/spell-list-import'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider' import { useNostr } from '@/providers/NostrProvider'
import { showPublishingError, showSimplePublishSuccess } from '@/lib/publishing-feedback' import { showPublishingError, showSimplePublishSuccess } from '@/lib/publishing-feedback'
import { eventService } from '@/services/client.service' import { eventService } from '@/services/client.service'
@ -289,6 +290,7 @@ export default function CreateSpellDialog({
}) { }) {
const { t } = useTranslation() const { t } = useTranslation()
const { pubkey, publish, checkLogin, relayList } = useNostr() const { pubkey, publish, checkLogin, relayList } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const [form, setForm] = useState<TSpellDraftParams>(DEFAULT_PARAMS) const [form, setForm] = useState<TSpellDraftParams>(DEFAULT_PARAMS)
const [saving, setSaving] = useState(false) const [saving, setSaving] = useState(false)
const scrollBodyRef = useRef<HTMLDivElement>(null) const scrollBodyRef = useRef<HTMLDivElement>(null)
@ -319,7 +321,11 @@ export default function CreateSpellDialog({
const { draft, notices, pendingATags } = applyListEventToSpellDraft(base, ev) const { draft, notices, pendingATags } = applyListEventToSpellDraft(base, ev)
setForm(draft) setForm(draft)
setListImportNotices(notices) setListImportNotices(notices)
const urls = getRelaysForSpellCatalogSync(relayList ?? undefined) const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
if (pendingATags.length === 0) return if (pendingATags.length === 0) return
void resolveSpellListATags(pendingATags, urls).then(({ ids, notices: extra }) => { void resolveSpellListATags(pendingATags, urls).then(({ ids, notices: extra }) => {
if (ids.length) { if (ids.length) {
@ -328,7 +334,7 @@ export default function CreateSpellDialog({
if (extra.length) setListImportNotices((n) => [...n, ...extra]) if (extra.length) setListImportNotices((n) => [...n, ...extra])
}) })
}, },
[relayList] [favoriteRelays, blockedRelays, relayList]
) )
const handleLoadManualList = useCallback(async () => { const handleLoadManualList = useCallback(async () => {

176
src/pages/primary/SpellsPage/fauxSpellFeeds.ts

@ -1,14 +1,7 @@
/** /**
* Built-in faux spells use the same NoteList path as kind-777 REQ spells. * Built-in faux spells use the same NoteList path as kind-777 REQ spells.
*/ */
import { import { ExtendedKind, PROFILE_FEED_KINDS, READ_ONLY_RELAY_URLS } from '@/constants'
DEFAULT_FAVORITE_RELAYS,
ExtendedKind,
FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS,
PROFILE_FEED_KINDS,
READ_ONLY_RELAY_URLS
} from '@/constants'
import { import {
extractHashtagsFromContent, extractHashtagsFromContent,
extractTTagsFromEvent, extractTTagsFromEvent,
@ -16,36 +9,13 @@ import {
} from '@/lib/discussion-topics' } from '@/lib/discussion-topics'
import { getImetaInfosFromEvent } from '@/lib/event' import { getImetaInfosFromEvent } from '@/lib/event'
import { normalizeUrl } from '@/lib/url' import { normalizeUrl } from '@/lib/url'
import type { TFeedSubRequest, TRelayList } from '@/types' import type { TFeedSubRequest } from '@/types'
import { type Event, type Filter, kinds } from 'nostr-tools' import { type Event, type Filter, kinds } from 'nostr-tools'
const NOTIFICATION_LIMIT = 500 const NOTIFICATION_LIMIT = 500
const DISCUSSION_LIMIT = 500 const DISCUSSION_LIMIT = 500
const MAX_BOOKMARK_IDS = 250 const MAX_BOOKMARK_IDS = 250
/**
* Spells Discussions uses NoteList subscribeTimeline one live REQ per relay.
* An uncapped merged relay list would open 80+ sockets and exhaust subscription slots;
* cap keeps first paint fast.
*/
const DISCUSSION_FAUX_SPELL_MAX_RELAYS = 10
/** Without caps, a long NIP-66 read list consumes the whole 32 slots and fast public relays never get a REQ — discussions stay empty while notifications still work (they blend fast reads). */
const DISCUSSION_SPELL_READ_CAP = 10
const DISCUSSION_SPELL_WRITE_CAP = 8
const DISCUSSION_SPELL_FAV_CAP = 8
function dedupe(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/** /**
* Append {@link READ_ONLY_RELAY_URLS} (e.g. aggr) after the curated set so every faux REQ includes them unless blocked. * Append {@link READ_ONLY_RELAY_URLS} (e.g. aggr) after the curated set so every faux REQ includes them unless blocked.
*/ */
@ -167,94 +137,6 @@ export function mediaSpellExtraShouldHideEvent(evt: Event): boolean {
return !isKind1MediaSpellEligible(evt) return !isKind1MediaSpellEligible(evt)
} }
/** Relays for “global” faux feeds (media, calendar): visible favorites or defaults. */
export function fauxFavoriteRelayUrls(favoriteRelays: string[], blockedRelays: string[]): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const visible = favoriteRelays.filter((r) => {
const k = normalizeUrl(r) || r
return k && !blocked.has(k)
})
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
const curated = dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[])
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
/**
* Notifications / bookmarks faux spells: **fast public relays first**, then inbox/favorites.
* `FAST_READ_RELAY_URLS` has 7 entries; the old cap of 6 never subscribed to `wss://aggr.nostr.land`
* (last in the list) a major `#p` indexer so mentions could take tens of seconds or look empty.
* Fast-write relays catch mentions replicated to outboxes (damus/primal/nos.lol) with little overlap.
*/
const NOTIFICATION_PRIMARY_MAX = 4
/** Must be ≥ FAST_READ length so every default fast read relay is eligible (currently 7). */
const NOTIFICATION_FAST_READ_MAX = 10
const NOTIFICATION_FAST_WRITE_MAX = 4
const NOTIFICATION_RELAY_CAP = 14
function relayUrlsUpToUnblocked(urls: string[], blocked: Set<string>, max: number): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
if (out.length >= max) break
}
return out
}
function mergeRelayListsUnique(
lists: string[][],
blocked: Set<string>,
cap: number
): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const list of lists) {
for (const u of list) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
if (out.length >= cap) return out
}
}
return out
}
export function notificationRelayUrls(
relayList: TRelayList | null | undefined,
favoriteRelays: string[],
blockedRelays: string[] = []
): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const read = relayList?.read ?? []
const readSorted = [...read].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
const favSorted = [...favoriteRelays]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort((a, b) => a.localeCompare(b))
const primary =
read.length > 0
? relayUrlsUpToUnblocked(readSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: favoriteRelays.length > 0
? relayUrlsUpToUnblocked(favSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: []
const fromFastRead = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_FAST_READ_MAX)
const fromFastWrite = relayUrlsUpToUnblocked(FAST_WRITE_RELAY_URLS, blocked, NOTIFICATION_FAST_WRITE_MAX)
const merged = mergeRelayListsUnique(
[fromFastRead, fromFastWrite, primary],
blocked,
NOTIFICATION_RELAY_CAP
)
if (merged.length > 0) return appendCuratedReadOnlyRelays(merged, blockedRelays)
return appendCuratedReadOnlyRelays(
relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP),
blockedRelays
)
}
/** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */ /** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */
export function buildMentionsSpellFilter(pubkey: string): Filter { export function buildMentionsSpellFilter(pubkey: string): Filter {
return { return {
@ -264,45 +146,6 @@ export function buildMentionsSpellFilter(pubkey: string): Filter {
} }
} }
/**
* Relay set for Spells Discussions (kind 11), capped for subscription-based loading
* (see DISCUSSION_FAUX_SPELL_MAX_RELAYS).
*/
/**
* Deterministic relay pick: each tier (read / write / fav / fast) is normalized + sorted so NostrProvider
* array order and NIP-66 ref churn do not change which 32 relays we REQ (prevents subscription identity thrash).
*/
export function discussionRelayUrls(
relayList: TRelayList | null | undefined,
favoriteRelays: string[],
blockedRelays: string[]
): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const tier = (urls: string[]) =>
[...new Set(urls.map((u) => normalizeUrl(u) || u).filter(Boolean))]
.filter((k) => !blocked.has(k))
.sort((a, b) => a.localeCompare(b))
const read = tier(relayList?.read ?? [])
const write = tier(relayList?.write ?? [])
const fav = tier(favoriteRelays)
const fastR = tier([...FAST_READ_RELAY_URLS])
const fastW = tier([...FAST_WRITE_RELAY_URLS])
const curated = mergeRelayListsUnique(
[
read.slice(0, DISCUSSION_SPELL_READ_CAP),
write.slice(0, DISCUSSION_SPELL_WRITE_CAP),
fav.slice(0, DISCUSSION_SPELL_FAV_CAP),
fastR,
fastW
],
blocked,
DISCUSSION_FAUX_SPELL_MAX_RELAYS
)
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
export function buildDiscussionFilter(): Filter { export function buildDiscussionFilter(): Filter {
return { return {
kinds: [ExtendedKind.DISCUSSION], kinds: [ExtendedKind.DISCUSSION],
@ -321,21 +164,6 @@ export function buildCalendarSpellFilter(): Filter {
} }
} }
const FOLLOW_PACK_LIMIT = 100
/** Kind 39089 follow/starter packs from fast read relays (same scope as the old Follow Packs page). */
export function buildFollowPacksSubRequests(): TFeedSubRequest[] {
const curated = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
if (!curated.length) return []
const urls = appendCuratedReadOnlyRelays(curated, [])
return [
{
urls,
filter: { kinds: [ExtendedKind.FOLLOW_PACK], limit: FOLLOW_PACK_LIMIT }
}
]
}
/** One subrequest per topic (OR). Uses same kind set as the main profile/favorites feed. */ /** One subrequest per topic (OR). Uses same kind set as the main profile/favorites feed. */
export function buildInterestsSubRequests( export function buildInterestsSubRequests(
relayUrls: string[], relayUrls: string[],

139
src/pages/primary/SpellsPage/index.tsx

@ -38,10 +38,14 @@ import {
FAUX_SPELL_ORDER, FAUX_SPELL_ORDER,
FIRST_RELAY_RESULT_GRACE_MS, FIRST_RELAY_RESULT_GRACE_MS,
PROFILE_FEED_KINDS, PROFILE_FEED_KINDS,
SPELL_FEED_FIRST_RELAY_GRACE_MS SPELL_FEED_LOADING_MAX_MS
} from '@/constants' } from '@/constants'
import { isUserInEventMentions } from '@/lib/event' import { isUserInEventMentions } from '@/lib/event'
import { formatPubkey } from '@/lib/pubkey' import { formatPubkey } from '@/lib/pubkey'
import {
augmentSubRequestsWithFavoritesFastReadAndInbox,
getRelayUrlsWithFavoritesFastReadAndInbox
} from '@/lib/favorites-feed-relays'
import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity' import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity'
import { normalizeUrl } from '@/lib/url' import { normalizeUrl } from '@/lib/url'
import { import {
@ -86,15 +90,11 @@ import {
buildBookmarksSubRequests, buildBookmarksSubRequests,
buildCalendarSpellFilter, buildCalendarSpellFilter,
buildDiscussionFilter, buildDiscussionFilter,
buildFollowPacksSubRequests,
buildInterestsSubRequests, buildInterestsSubRequests,
buildMediaSpellFilter, buildMediaSpellFilter,
buildMentionsSpellFilter, buildMentionsSpellFilter,
discussionRelayUrls,
fauxFavoriteRelayUrls,
MEDIA_SPELL_SHOW_KINDS, MEDIA_SPELL_SHOW_KINDS,
mediaSpellExtraShouldHideEvent, mediaSpellExtraShouldHideEvent
notificationRelayUrls
} from './fauxSpellFeeds' } from './fauxSpellFeeds'
import type { TPageRef } from '@/types' import type { TPageRef } from '@/types'
@ -370,6 +370,22 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return JSON.stringify(normalizedWriteSorted) return JSON.stringify(normalizedWriteSorted)
}, [relayMailboxStableKey]) }, [relayMailboxStableKey])
/** Order-independent favorites/blocked — array order from providers must not rebuild subs. */
const sortedFavoriteRelaysKey = useMemo(
() =>
JSON.stringify(
[...favoriteRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
),
[favoriteRelays]
)
const sortedBlockedRelaysKey = useMemo(
() =>
JSON.stringify(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
),
[blockedRelays]
)
useEffect(() => { useEffect(() => {
loadSpells() loadSpells()
}, [loadSpells]) }, [loadSpells])
@ -378,8 +394,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const contactsSyncKey = useMemo(() => [...contacts].sort().join(','), [contacts]) const contactsSyncKey = useMemo(() => [...contacts].sort().join(','), [contacts])
/** /**
* After showing the cache, pull kind 777 from merged mailbox (10002 + 10432) read/write + fast read. * After showing the cache, pull kind 777 using the same relay set as the favorites feed.
* Deps use `relayMailboxStableKey` only not NIP-66 `originalRelays` so discovery merges dont restart this sub.
*/ */
useEffect(() => { useEffect(() => {
if (!pubkey) { if (!pubkey) {
@ -396,7 +411,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
if (!cancelled) void loadSpells() if (!cancelled) void loadSpells()
}, 120) }, 120)
} }
const urls = getRelaysForSpellCatalogSync(relayList ?? undefined) const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const catalogAuthors = buildSpellCatalogAuthors(pubkey, contacts) const catalogAuthors = buildSpellCatalogAuthors(pubkey, contacts)
const authorAllowlist = new Set(catalogAuthors) const authorAllowlist = new Set(catalogAuthors)
const filter = { const filter = {
@ -421,10 +440,14 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
} }
} }
void (async () => { /** Defer catalog REQ so faux/kind-777 feed opens sockets and paints first. */
try { const catalogDelayMs = 800
setSpellsCatalogSyncing(true) const delayId = window.setTimeout(() => {
const { closer } = await client.subscribeTimeline( if (cancelled) return
void (async () => {
try {
setSpellsCatalogSyncing(true)
const { closer } = await client.subscribeTimeline(
[{ urls, filter }], [{ urls, filter }],
{ {
onEvents: async (events, eosed) => { onEvents: async (events, eosed) => {
@ -477,8 +500,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
onNew: () => {} // Not needed onNew: () => {} // Not needed
}, },
{ {
useCache: true,
omitDefaultSinceWhenUseCache: true,
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
} }
) )
@ -492,10 +513,12 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
logger.warn('[SpellsPage] Spell catalog subscribe failed', e) logger.warn('[SpellsPage] Spell catalog subscribe failed', e)
if (!cancelled) setSpellsCatalogSyncing(false) if (!cancelled) setSpellsCatalogSyncing(false)
} }
})() })()
}, catalogDelayMs)
return () => { return () => {
cancelled = true cancelled = true
window.clearTimeout(delayId)
clearAfterFirstBatchTimer() clearAfterFirstBatchTimer()
if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce) if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce)
window.clearTimeout(syncTimeout) window.clearTimeout(syncTimeout)
@ -503,7 +526,15 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
spellCatalogCloserRef.current = null spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false) setSpellsCatalogSyncing(false)
} }
}, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey, spellCatalogManualRefreshKey]) }, [
pubkey,
sortedFavoriteRelaysKey,
sortedBlockedRelaysKey,
relayMailboxStableKey,
loadSpells,
contactsSyncKey,
spellCatalogManualRefreshKey
])
useEffect(() => { useEffect(() => {
if (!pubkey) { if (!pubkey) {
@ -513,14 +544,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
client.fetchFollowings(pubkey).then(setContacts).catch(() => setContacts([])) client.fetchFollowings(pubkey).then(setContacts).catch(() => setContacts([]))
}, [pubkey]) }, [pubkey])
/** Order-independent favorites/blocked — array order from providers must not rebuild faux subs. */
const sortedFavoriteRelaysKey = JSON.stringify(
[...favoriteRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
)
const sortedBlockedRelaysKey = JSON.stringify(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
)
useEffect(() => { useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) { if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([]) setFollowingSubRequests([])
@ -533,7 +556,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
try { try {
const followings = await client.fetchFollowings(pubkey) const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey) const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
const withReadOnly = req.map((r) => ({ const merged = augmentSubRequestsWithFavoritesFastReadAndInbox(
req,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const withReadOnly = merged.map((r) => ({
...r, ...r,
urls: appendCuratedReadOnlyRelays(r.urls, blockedRelays) urls: appendCuratedReadOnlyRelays(r.urls, blockedRelays)
})) }))
@ -547,7 +576,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return () => { return () => {
cancelled = true cancelled = true
} }
}, [selectedFauxSpell, pubkey, sortedBlockedRelaysKey]) }, [
selectedFauxSpell,
pubkey,
sortedFavoriteRelaysKey,
sortedBlockedRelaysKey,
relayMailboxStableKey
])
const interestTagsStableKey = interestListEvent const interestTagsStableKey = interestListEvent
? JSON.stringify( ? JSON.stringify(
@ -574,45 +609,49 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const syncFauxSubRequests = useMemo<TFeedSubRequest[]>(() => { const syncFauxSubRequests = useMemo<TFeedSubRequest[]>(() => {
if (!selectedFauxSpell || selectedFauxSpell === 'following') return [] if (!selectedFauxSpell || selectedFauxSpell === 'following') return []
const feedUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
if (selectedFauxSpell === 'notifications') { if (selectedFauxSpell === 'notifications') {
if (!pubkey) return [] if (!pubkey || !feedUrls.length) return []
const urls = notificationRelayUrls(relayList, favoriteRelays, blockedRelays) return [{ urls: feedUrls, filter: buildMentionsSpellFilter(pubkey) }]
if (!urls.length) return []
return [{ urls, filter: buildMentionsSpellFilter(pubkey) }]
} }
if (selectedFauxSpell === 'discussions') { if (selectedFauxSpell === 'discussions') {
const urls = discussionRelayUrls(relayList, favoriteRelays, blockedRelays) if (!feedUrls.length) return []
if (!urls.length) return [] return [{ urls: feedUrls, filter: buildDiscussionFilter() }]
return [{ urls, filter: buildDiscussionFilter() }]
} }
if (selectedFauxSpell === 'media') { if (selectedFauxSpell === 'media') {
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays) if (!feedUrls.length) return []
if (!urls.length) return [] return [{ urls: feedUrls, filter: buildMediaSpellFilter() }]
return [{ urls, filter: buildMediaSpellFilter() }]
} }
if (selectedFauxSpell === 'calendar') { if (selectedFauxSpell === 'calendar') {
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays) if (!feedUrls.length) return []
if (!urls.length) return [] return [{ urls: feedUrls, filter: buildCalendarSpellFilter() }]
return [{ urls, filter: buildCalendarSpellFilter() }]
} }
if (selectedFauxSpell === 'interests') { if (selectedFauxSpell === 'interests') {
if (!pubkey || !interestListEvent) return [] if (!pubkey || !interestListEvent) return []
const topics = interestListEvent.tags.filter((tag) => tag[0] === 't' && tag[1]).map((tag) => tag[1]!) const topics = interestListEvent.tags.filter((tag) => tag[0] === 't' && tag[1]).map((tag) => tag[1]!)
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays) return buildInterestsSubRequests(feedUrls, topics, PROFILE_FEED_KINDS)
return buildInterestsSubRequests(urls, topics, PROFILE_FEED_KINDS)
} }
if (selectedFauxSpell === 'bookmarks') { if (selectedFauxSpell === 'bookmarks') {
if (!pubkey) return [] if (!pubkey) return []
const urls = notificationRelayUrls(relayList, favoriteRelays, blockedRelays) return buildBookmarksSubRequests(bookmarkListEvent, feedUrls)
return buildBookmarksSubRequests(bookmarkListEvent, urls)
} }
if (selectedFauxSpell === 'followPacks') { if (selectedFauxSpell === 'followPacks') {
return buildFollowPacksSubRequests() const urls = appendCuratedReadOnlyRelays(feedUrls, blockedRelays)
if (!urls.length) return []
return [
{
urls,
filter: { kinds: [ExtendedKind.FOLLOW_PACK], limit: 100 }
}
]
} }
return [] return []
// relayMailboxStableKey: read/write only — do not tie faux feeds to originalRelays (NIP-66 churn). }, [selectedFauxSpell, pubkey, fauxFeedRelaysDepsKey, relayMailboxStableKey])
}, [selectedFauxSpell, pubkey, relayMailboxStableKey, fauxFeedRelaysDepsKey])
const fauxSubRequests = useMemo<TFeedSubRequest[]>(() => { const fauxSubRequests = useMemo<TFeedSubRequest[]>(() => {
if (selectedFauxSpell === 'following') return followingSubRequests if (selectedFauxSpell === 'following') return followingSubRequests
@ -1229,8 +1268,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
subRequests={subRequests} subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey} feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds} showKinds={showKinds}
useTimelineCacheBootstrap spellFetchTimeoutMs={SPELL_FEED_LOADING_MAX_MS}
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken} spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint} onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs={fauxNoteListUseFilterAsIs} useFilterAsIs={fauxNoteListUseFilterAsIs}
@ -1258,8 +1296,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
subRequests={subRequests} subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey} feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds} showKinds={showKinds}
useTimelineCacheBootstrap spellFetchTimeoutMs={SPELL_FEED_LOADING_MAX_MS}
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken} spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint} onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs useFilterAsIs

59
src/pages/secondary/NoteListPage/index.tsx

@ -3,12 +3,18 @@ import type { TNoteListRef } from '@/components/NoteList'
import NormalFeed from '@/components/NormalFeed' import NormalFeed from '@/components/NormalFeed'
import { RefreshButton } from '@/components/RefreshButton' import { RefreshButton } from '@/components/RefreshButton'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' import { SEARCHABLE_RELAY_URLS } from '@/constants'
import {
augmentSubRequestsWithFavoritesFastReadAndInbox,
getRelayUrlsWithFavoritesFastReadAndInbox,
mergeRelayUrlLayers
} from '@/lib/favorites-feed-relays'
import { normalizeUrl } from '@/lib/url' import { normalizeUrl } from '@/lib/url'
import SecondaryPageLayout from '@/layouts/SecondaryPageLayout' import SecondaryPageLayout from '@/layouts/SecondaryPageLayout'
import { toProfileList } from '@/lib/link' import { toProfileList } from '@/lib/link'
import { fetchPubkeysFromDomain, getWellKnownNip05Url } from '@/lib/nip05' import { fetchPubkeysFromDomain, getWellKnownNip05Url } from '@/lib/nip05'
import { usePrimaryNoteView, useSecondaryPage } from '@/PageManager' import { usePrimaryNoteView, useSecondaryPage } from '@/PageManager'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider' import { useNostr } from '@/providers/NostrProvider'
import { useInterestList } from '@/providers/InterestListProvider' import { useInterestList } from '@/providers/InterestListProvider'
import client from '@/services/client.service' import client from '@/services/client.service'
@ -29,6 +35,7 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
const bumpFeed = useCallback(() => feedRef.current?.refresh(), []) const bumpFeed = useCallback(() => feedRef.current?.refresh(), [])
const { push } = useSecondaryPage() const { push } = useSecondaryPage()
const { relayList, pubkey } = useNostr() const { relayList, pubkey } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { isSubscribed, subscribe } = useInterestList() const { isSubscribed, subscribe } = useInterestList()
const [title, setTitle] = useState<React.ReactNode>(null) const [title, setTitle] = useState<React.ReactNode>(null)
const [controls, setControls] = useState<React.ReactNode>(null) const [controls, setControls] = useState<React.ReactNode>(null)
@ -84,7 +91,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([ setSubRequests([
{ {
filter: { '#t': [hashtag], ...(kinds.length > 0 ? { kinds } : {}) }, filter: { '#t': [hashtag], ...(kinds.length > 0 ? { kinds } : {}) },
urls: FAST_READ_RELAY_URLS urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
} }
]) ])
// Set controls for hashtag subscribe button - check subscription status // Set controls for hashtag subscribe button - check subscription status
@ -122,10 +133,17 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([ setSubRequests([
{ {
filter: { '#I': [externalContentId], ...(kinds.length > 0 ? { kinds } : {}) }, filter: { '#I': [externalContentId], ...(kinds.length > 0 ? { kinds } : {}) },
urls: Array.from(new Set([ urls: mergeRelayUrlLayers(
...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url), [
...(relayList?.write || []).map(url => normalizeUrl(url) || url) getRelayUrlsWithFavoritesFastReadAndInbox(
])) favoriteRelays,
blockedRelays,
relayList?.read ?? []
),
(relayList?.write || []).map((url) => normalizeUrl(url) || url).filter(Boolean) as string[]
],
blockedRelays
)
} }
]) ])
return return
@ -149,7 +167,15 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
domain domain
}) })
if (pubkeys.length) { if (pubkeys.length) {
setSubRequests(await client.generateSubRequestsForPubkeys(pubkeys, pubkey)) const raw = await client.generateSubRequestsForPubkeys(pubkeys, pubkey)
setSubRequests(
augmentSubRequestsWithFavoritesFastReadAndInbox(
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
)
setControls( setControls(
<Button <Button
variant="ghost" variant="ghost"
@ -181,7 +207,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([ setSubRequests([
{ {
filter, filter,
urls: FAST_READ_RELAY_URLS urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
} }
]) ])
} }
@ -191,7 +221,18 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
// Advanced search parameters removed // Advanced search parameters removed
// Note: Only hashtag (t=) and kind (k=) URL parameters are supported // Note: Only hashtag (t=) and kind (k=) URL parameters are supported
// Date searches, pubkey filters, and event filters removed - not supported // Date searches, pubkey filters, and event filters removed - not supported
}, [pubkey, relayList, handleSubscribeHashtag, push, t, isSubscribed, subscribe, client]) }, [
pubkey,
relayList,
favoriteRelays,
blockedRelays,
handleSubscribeHashtag,
push,
t,
isSubscribed,
subscribe,
client
])
// Initialize on mount // Initialize on mount
useEffect(() => { useEffect(() => {

24
src/providers/FeedProvider.tsx

@ -1,4 +1,5 @@
import { DEFAULT_FAVORITE_RELAYS } from '@/constants' import { DEFAULT_FAVORITE_RELAYS } from '@/constants'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { getRelaySetFromEvent } from '@/lib/event-metadata' import { getRelaySetFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { isWebsocketUrl, normalizeUrl } from '@/lib/url' import { isWebsocketUrl, normalizeUrl } from '@/lib/url'
@ -113,13 +114,8 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
return return
} }
if (feedType === 'all-favorites') { if (feedType === 'all-favorites') {
// Filter out blocked relays const finalRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const visibleRelays = favoriteRelays.filter(relay => !blockedRelays.includes(relay)) logger.debug('Switching to all-favorites, finalRelays:', finalRelays)
// If no visible relays, fall back to default favorite relays
const finalRelays = visibleRelays.length > 0 ? visibleRelays : DEFAULT_FAVORITE_RELAYS
logger.debug('Switching to all-favorites, favoriteRelays:', visibleRelays, 'finalRelays:', finalRelays)
const newFeedInfo = { feedType } const newFeedInfo = { feedType }
setFeedInfo(newFeedInfo) setFeedInfo(newFeedInfo)
feedInfoRef.current = newFeedInfo feedInfoRef.current = newFeedInfo
@ -163,18 +159,17 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
logger.debug('FeedProvider: favoriteRelays is empty, using defaults') logger.debug('FeedProvider: favoriteRelays is empty, using defaults')
} }
// Get first visible (non-blocked) favorite relay as default const favoritesFeedRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const visibleRelays = favoriteRelays.filter(relay => !blockedRelays.includes(relay))
let feedInfo: TFeedInfo = { let feedInfo: TFeedInfo = {
feedType: 'relay', feedType: 'relay',
id: visibleRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0] id: favoritesFeedRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
} }
// Ensure we always have a valid relay ID // Ensure we always have a valid relay ID
if (!feedInfo.id) { if (!feedInfo.id) {
feedInfo.id = DEFAULT_FAVORITE_RELAYS[0] feedInfo.id = DEFAULT_FAVORITE_RELAYS[0]
} }
logger.debug('Initial feedInfo setup:', { visibleRelays, favoriteRelays, blockedRelays, feedInfo }) logger.debug('Initial feedInfo setup:', { favoritesFeedRelays, favoriteRelays, blockedRelays, feedInfo })
if (pubkey) { if (pubkey) {
const storedFeedInfo = storage.getFeedInfo(pubkey) const storedFeedInfo = storage.getFeedInfo(pubkey)
@ -192,7 +187,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
// Check if the stored relay is blocked, if so use first visible relay instead // Check if the stored relay is blocked, if so use first visible relay instead
if (feedInfo.id && blockedRelays.includes(feedInfo.id)) { if (feedInfo.id && blockedRelays.includes(feedInfo.id)) {
logger.component('FeedProvider', 'Stored relay is blocked, using first visible relay instead') logger.component('FeedProvider', 'Stored relay is blocked, using first visible relay instead')
feedInfo.id = visibleRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0] feedInfo.id = favoritesFeedRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
} }
logger.component('FeedProvider', 'Initial relay setup, calling switchFeed', { relayId: feedInfo.id }) logger.component('FeedProvider', 'Initial relay setup, calling switchFeed', { relayId: feedInfo.id })
return await switchFeed('relay', { relay: feedInfo.id }) return await switchFeed('relay', { relay: feedInfo.id })
@ -219,8 +214,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
// Update relay URLs when favoriteRelays change and we're in all-favorites mode // Update relay URLs when favoriteRelays change and we're in all-favorites mode
useEffect(() => { useEffect(() => {
if (feedInfo.feedType !== 'all-favorites') return if (feedInfo.feedType !== 'all-favorites') return
const visibleRelays = favoriteRelays.filter((relay) => !blockedRelays.includes(relay)) const finalRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const finalRelays = visibleRelays.length > 0 ? visibleRelays : DEFAULT_FAVORITE_RELAYS
logger.debug('Updating relay URLs for all-favorites:', finalRelays) logger.debug('Updating relay URLs for all-favorites:', finalRelays)
setRelayUrls(finalRelays) setRelayUrls(finalRelays)
}, [feedInfo.feedType, favoriteRelays, blockedRelays]) }, [feedInfo.feedType, favoriteRelays, blockedRelays])

252
src/services/client-replaceable-events.service.ts

@ -436,8 +436,8 @@ export class ReplaceableEventService {
// For metadata with a logged-in user, merge defaults with {@link buildComprehensiveRelayList}: inboxes (read), // For metadata with a logged-in user, merge defaults with {@link buildComprehensiveRelayList}: inboxes (read),
// local/cache relays (10432), favorite relays (10012), plus profile + fast read — same idea as favorites feed // local/cache relays (10432), favorite relays (10012), plus profile + fast read — same idea as favorites feed
// / inbox-scoped discovery without per-author relay list fetches. // / inbox-scoped discovery without per-author relay list fetches.
// Following's Favorites (Explore): kind 10012 batch uses PROFILE_FETCH_RELAY_URLS + viewer's own relays only // Following's Favorites (Explore): kind 10012 batch uses {@link buildExploreProfileAndUserRelayList}
// (no FAST_READ), so outbox data is queried where the user actually reads + profile-index relays. // (profile + FAST_READ + viewer read/write/local when logged in).
let relayUrls: string[] let relayUrls: string[]
if (kind === kinds.Metadata) { if (kind === kinds.Metadata) {
const userPk = client.pubkey const userPk = client.pubkey
@ -737,174 +737,134 @@ export class ReplaceableEventService {
const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, []) const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, [])
if (profileEvent) { if (profileEvent) {
logger.debug('[ReplaceableEventService] Profile found with relay hints + default relays', { logger.debug('[ReplaceableEventService] Profile found via cache / default relays (DataLoader)', {
pubkey, pubkey,
eventId: profileEvent.id eventId: profileEvent.id
}) })
await this.indexProfile(profileEvent) await this.indexProfile(profileEvent)
return profileEvent return profileEvent
} }
// Step 2: Only fetch author's relay list as fallback if we have relay hints from bech32 // Step 2: Only after cache + default relays miss — NIP-65 relay list (timeout-capped), then hints + outbox/inbox + defaults.
// This prevents creating many individual subscriptions when profiles aren't found logger.debug('[ReplaceableEventService] Step 2: Fetching author relay list as fallback', {
// If we have relay hints, it's worth trying author relays. Otherwise, Step 1 should be sufficient. pubkey,
if (relayHints.length > 0) { relayHintCount: relayHints.length
logger.debug('[ReplaceableEventService] Step 2: Profile not found, but we have relay hints - fetching author relay list as fallback', { })
let authorRelayList: { read?: string[]; write?: string[] } | null = null
try {
const relayListPromise = client.fetchRelayList(pubkey)
const timeoutPromise = new Promise<null>((resolve) => {
setTimeout(() => {
logger.warn('[ReplaceableEventService] fetchRelayList timeout, giving up', { pubkey })
resolve(null)
}, 2000)
})
authorRelayList = await Promise.race([relayListPromise, timeoutPromise])
} catch (error) {
logger.error('[ReplaceableEventService] Failed to fetch author relay list', {
pubkey, pubkey,
relayHintCount: relayHints.length error: error instanceof Error ? error.message : String(error)
}) })
}
let authorRelayList: { read?: string[]; write?: string[] } | null = null
try { const authorRelays = authorRelayList
const relayListStartTime = Date.now() ? [
// Add timeout to prevent hanging - 2 seconds max
const relayListPromise = client.fetchRelayList(pubkey)
const timeoutPromise = new Promise<null>((resolve) => {
setTimeout(() => {
logger.warn('[ReplaceableEventService] fetchRelayList timeout, giving up', {
pubkey
})
resolve(null)
}, 2000)
})
authorRelayList = await Promise.race([relayListPromise, timeoutPromise])
const relayListTime = Date.now() - relayListStartTime
logger.debug('[ReplaceableEventService] Author relay list fetched', {
pubkey,
hasRelayList: !!authorRelayList,
fetchTime: `${relayListTime}ms`
})
} catch (error) {
logger.error('[ReplaceableEventService] Failed to fetch author relay list', {
pubkey,
error: error instanceof Error ? error.message : String(error)
})
}
// Step 3: Try with relay hints + author's relays if we got them
// CRITICAL: Always include relay hints first (highest priority), then author relays, then defaults
if (authorRelayList) {
const authorRelays = [
...(authorRelayList.write || []).slice(0, 10), ...(authorRelayList.write || []).slice(0, 10),
...(authorRelayList.read || []).slice(0, 10) ...(authorRelayList.read || []).slice(0, 10)
] ]
// Relay hints first (highest priority), then author relays, then defaults : []
const allRelays = [...new Set([
...relayHints, // Relay hints from bech32 (highest priority) const expandedRelays = [
...authorRelays, // Author's relays ...new Set([
...PROFILE_FETCH_RELAY_URLS, // Default profile relays ...relayHints,
...FAST_READ_RELAY_URLS // Fast read relays ...authorRelays,
])] ...PROFILE_FETCH_RELAY_URLS,
...FAST_READ_RELAY_URLS
logger.debug('[ReplaceableEventService] Step 3: Trying with relay hints + author relays', { ])
pubkey, ]
relayHintCount: relayHints.length,
authorRelayCount: authorRelays.length, const profileFromExpanded = await this.fetchReplaceableEvent(
totalRelayCount: allRelays.length pubkey,
}) kinds.Metadata,
undefined,
// Use fetchReplaceableEvent with relay hints + author's relays expandedRelays
const profileEventFromAuthorRelays = await this.fetchReplaceableEvent( )
pubkey, if (profileFromExpanded) {
kinds.Metadata, logger.debug('[ReplaceableEventService] Profile found after relay-list fallback', {
undefined, pubkey,
allRelays eventId: profileFromExpanded.id
)
if (profileEventFromAuthorRelays) {
logger.debug('[ReplaceableEventService] Profile found with relay hints + author relays', {
pubkey,
eventId: profileEventFromAuthorRelays.id
})
await this.indexProfile(profileEventFromAuthorRelays)
return profileEventFromAuthorRelays
}
}
} else {
// No relay hints - Step 1 with default relays should be sufficient
// Skip Step 2/3 to avoid creating individual subscriptions
logger.debug('[ReplaceableEventService] Profile not found, but no relay hints - skipping author relay fallback to avoid individual subscriptions', {
pubkey
}) })
await this.indexProfile(profileFromExpanded)
return profileFromExpanded
} }
// Step 3: Comprehensive search across ALL available relays before giving up // Step 3: Last resort — broad relay query (timeout-bounded in query layer)
// OPTIMIZATION: Skip comprehensive search for batch profile fetches (when called from DataLoader) logger.debug('[ReplaceableEventService] Step 3: Comprehensive relay query (last resort)', { pubkey })
// Comprehensive search is expensive (10s timeout) and should only be used for individual profile fetches try {
// when user explicitly navigates to a profile page. For feed rendering, missing profiles are acceptable. const userPubkey = client.pubkey
// Only run comprehensive search if we have relay hints (suggesting user intent to find this specific profile) const comprehensiveRelays = await buildComprehensiveRelayList({
if (relayHints.length > 0) { authorPubkey: pubkey,
logger.debug('[ReplaceableEventService] Step 3: Profile not found, trying comprehensive relay list (all available relays)', { userPubkey: userPubkey || undefined,
relayHints: relayHints.length > 0 ? relayHints : undefined,
includeUserOwnRelays: true,
includeFavoriteRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: true,
includeFastWriteRelays: true,
includeSearchableRelays: true,
includeLocalRelays: true
})
logger.debug('[ReplaceableEventService] Comprehensive relay list built', {
pubkey, pubkey,
hasRelayHints: relayHints.length > 0 relayCount: comprehensiveRelays.length,
relays: comprehensiveRelays.slice(0, 10)
}) })
try { if (comprehensiveRelays.length > 0) {
const userPubkey = client.pubkey const startTime = Date.now()
const comprehensiveRelays = await buildComprehensiveRelayList({ const events = await this.queryService.query(
authorPubkey: pubkey, comprehensiveRelays,
userPubkey: userPubkey || undefined, {
relayHints: relayHints.length > 0 ? relayHints : undefined,
includeUserOwnRelays: true, // Include user's read/write relays
includeFavoriteRelays: true, // Include user's favorite relays (kind 10012)
includeProfileFetchRelays: true, // Include PROFILE_FETCH_RELAY_URLS
includeFastReadRelays: true, // Include FAST_READ_RELAY_URLS
includeFastWriteRelays: true, // Include FAST_WRITE_RELAY_URLS
includeSearchableRelays: true, // Include SEARCHABLE_RELAY_URLS
includeLocalRelays: true // Include local/cache relays
})
logger.debug('[ReplaceableEventService] Comprehensive relay list built', {
pubkey,
relayCount: comprehensiveRelays.length,
relays: comprehensiveRelays.slice(0, 10) // Log first 10 for debugging
})
if (comprehensiveRelays.length > 0) {
// Query the comprehensive relay list with reduced timeout for faster failure
const startTime = Date.now()
const events = await this.queryService.query(comprehensiveRelays, {
authors: [pubkey], authors: [pubkey],
kinds: [kinds.Metadata] kinds: [kinds.Metadata]
}, undefined, { },
undefined,
{
replaceableRace: true, replaceableRace: true,
eoseTimeout: 300, // Reduced from 500ms eoseTimeout: 300,
globalTimeout: 5000 // Reduced from 10000ms to prevent 10s waits globalTimeout: 5000
})
const queryTime = Date.now() - startTime
logger.debug('[ReplaceableEventService] Comprehensive search completed', {
pubkey,
eventCount: events.length,
queryTime: `${queryTime}ms`,
relayCount: comprehensiveRelays.length
})
if (events.length > 0) {
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
const profileEvent = sortedEvents[0]
logger.debug('[ReplaceableEventService] Profile found via comprehensive search', {
pubkey,
eventId: profileEvent.id
})
await this.indexProfile(profileEvent)
return profileEvent
} }
} )
} catch (error) { const queryTime = Date.now() - startTime
logger.error('[ReplaceableEventService] Comprehensive search failed', {
logger.debug('[ReplaceableEventService] Comprehensive search completed', {
pubkey, pubkey,
error: error instanceof Error ? error.message : String(error) eventCount: events.length,
queryTime: `${queryTime}ms`,
relayCount: comprehensiveRelays.length
}) })
// Continue to return undefined below
if (events.length > 0) {
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
const found = sortedEvents[0]!
logger.debug('[ReplaceableEventService] Profile found via comprehensive search', {
pubkey,
eventId: found.id
})
await this.indexProfile(found)
return found
}
} }
} else { } catch (error) {
logger.debug('[ReplaceableEventService] Skipping comprehensive search (no relay hints, likely batch fetch)', { logger.error('[ReplaceableEventService] Comprehensive search failed', {
pubkey pubkey,
error: error instanceof Error ? error.message : String(error)
}) })
} }
logger.warn('[ReplaceableEventService] Profile not found after trying all relays (including comprehensive search)', { logger.warn('[ReplaceableEventService] Profile not found after cache, relay-list fallback, and comprehensive search', {
pubkey, pubkey,
triedRelayHints: relayHints.length > 0 triedRelayHints: relayHints.length > 0
}) })

383
src/services/client.service.ts

@ -49,19 +49,16 @@ import { AbstractRelay } from 'nostr-tools/abstract-relay'
import indexedDb from './indexed-db.service' import indexedDb from './indexed-db.service'
import nip66Service from './nip66.service' import nip66Service from './nip66.service'
import { QueryService } from './client-query.service' import { QueryService } from './client-query.service'
/** Live timeline REQ: dead relays fail fast; EOSE caps “connected but silent” relays. */
const SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS = 2800
const SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS = 4800
import { EventService } from './client-events.service' import { EventService } from './client-events.service'
import { ReplaceableEventService } from './client-replaceable-events.service' import { ReplaceableEventService } from './client-replaceable-events.service'
import { MacroService, createBookstrService } from './client-macro.service' import { MacroService, createBookstrService } from './client-macro.service'
type TTimelineRef = [string, number] type TTimelineRef = [string, number]
/**
* Timeline bootstrap used to await up to `filter.limit` IndexedDB reads before opening a live REQ,
* which blocked first paint for many seconds. We only prefetch this many newest refs; the subscription
* streams the rest immediately.
*/
const TIMELINE_CACHE_PREFETCH_CAP = 48
class ClientService extends EventTarget { class ClientService extends EventTarget {
static instance: ClientService static instance: ClientService
@ -861,18 +858,13 @@ class ClientService extends EventTarget {
{ {
startLogin, startLogin,
needSort = true, needSort = true,
useCache = false,
omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: { }: {
startLogin?: () => void startLogin?: () => void
needSort?: boolean needSort?: boolean
useCache?: boolean
/** When useCache is true but there are no timeline refs yet, skip the default 24h `since` so REQ stays unbounded (spell feeds / catalog). */
omitDefaultSinceWhenUseCache?: boolean
/** /**
* After the first live event before EOSE, wait this long then treat initial load as EOSE (query-style finalize). * Ignored by {@link ClientService.subscribeTimeline} (kept for compatibility). Initial completion is
* Spells pass {@link FIRST_RELAY_RESULT_GRACE_MS} explicitly; feeds may override. * aggregate relay EOSE only; per-event results stream via `onEvents` without faking EOSE.
*/ */
firstRelayResultGraceMs?: number firstRelayResultGraceMs?: number
} = {} } = {}
@ -882,30 +874,37 @@ class ClientService extends EventTarget {
let eventIdSet = new Set<string>() let eventIdSet = new Set<string>()
let events: NEvent[] = [] let events: NEvent[] = []
let eosedCount = 0 let eosedCount = 0
/** One merged buffer — slice using the largest child `limit` so a later child with a smaller limit cannot drop other relays’ events. */
const mergedTimelineLimit = Math.max(
500,
...subRequests.map(({ filter }) =>
typeof filter.limit === 'number' && filter.limit > 0 ? filter.limit : 0
)
)
/** First merged batch goes out synchronously so the list paints without waiting a frame. */ let outerFlushQueued = false
let outerMergedDelivered = false let outerFlushBump = 0
/** One React update per animation frame after the first paint — limits setEvents/profile churn. */ const scheduleOuterFlush = (immediate = false) => {
let outerFlushRaf: number | null = null const run = () => {
const scheduleOuterFlush = () => { outerFlushQueued = false
const snapshot = events.length ? [...events] : [] const snapshot = events.length ? [...events] : []
const allEosed = eosedCount >= requestCount const allEosed = eosedCount >= requestCount
if (!outerMergedDelivered && (snapshot.length > 0 || allEosed)) {
outerMergedDelivered = true
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents(snapshot, allEosed) onEvents(snapshot, allEosed)
}
if (immediate || eosedCount >= requestCount || events.length <= 1) {
outerFlushBump++
outerFlushQueued = false
run()
return return
} }
if (outerFlushRaf != null) { if (!outerFlushQueued) {
cancelAnimationFrame(outerFlushRaf) outerFlushQueued = true
const b = outerFlushBump
queueMicrotask(() => {
if (b !== outerFlushBump) return
run()
})
} }
outerFlushRaf = requestAnimationFrame(() => {
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
})
} }
const subs = await Promise.all( const subs = await Promise.all(
@ -924,10 +923,12 @@ class ClientService extends EventTarget {
eventIdSet.add(evt.id) eventIdSet.add(evt.id)
events.push(evt) events.push(evt)
}) })
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) events = events
.sort((a, b) => b.created_at - a.created_at)
.slice(0, mergedTimelineLimit)
eventIdSet = new Set(events.map((evt) => evt.id)) eventIdSet = new Set(events.map((evt) => evt.id))
scheduleOuterFlush() scheduleOuterFlush(!!_eosed)
}, },
onNew: (evt) => { onNew: (evt) => {
if (newEventIdSet.has(evt.id)) return if (newEventIdSet.has(evt.id)) return
@ -936,7 +937,7 @@ class ClientService extends EventTarget {
}, },
onClose onClose
}, },
{ startLogin, needSort, useCache, omitDefaultSinceWhenUseCache, firstRelayResultGraceMs } { startLogin, needSort, firstRelayResultGraceMs }
) )
}) })
) )
@ -944,18 +945,8 @@ class ClientService extends EventTarget {
const key = this.generateMultipleTimelinesKey(subRequests) const key = this.generateMultipleTimelinesKey(subRequests)
this.timelines[key] = subs.map((sub) => sub.timelineKey) this.timelines[key] = subs.map((sub) => sub.timelineKey)
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
}
return { return {
closer: () => { closer: () => {
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents = () => {} onEvents = () => {}
onNew = () => {} onNew = () => {}
subs.forEach((sub) => { subs.forEach((sub) => {
@ -1102,7 +1093,7 @@ class ClientService extends EventTarget {
await that.queryService.acquireSubSlot(relayKey) await that.queryService.acquireSubSlot(relayKey)
let relay: AbstractRelay let relay: AbstractRelay
try { try {
relay = await that.pool.ensureRelay(url, { connectionTimeout: 5000 }) relay = await that.pool.ensureRelay(url, { connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS })
} catch (err) { } catch (err) {
that.queryService.releaseSubSlot(relayKey) that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err)) handleClose(i, (err as Error)?.message ?? String(err))
@ -1136,7 +1127,9 @@ class ClientService extends EventTarget {
// resubscribe on a fresh connection from ensureRelay (fixes SendingOnClosedConnection). // resubscribe on a fresh connection from ensureRelay (fixes SendingOnClosedConnection).
let liveRelay: AbstractRelay let liveRelay: AbstractRelay
try { try {
liveRelay = await that.pool.ensureRelay(url, { connectionTimeout: 5000 }) liveRelay = await that.pool.ensureRelay(url, {
connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS
})
} catch (err) { } catch (err) {
that.queryService.releaseSubSlot(relayKey) that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err)) handleClose(i, (err as Error)?.message ?? String(err))
@ -1159,7 +1152,7 @@ class ClientService extends EventTarget {
handleClose(i, reason2) handleClose(i, reason2)
}, },
alreadyHaveEvent: localAlreadyHaveEvent, alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000 eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
}) })
subs.push({ subs.push({
relayKey, relayKey,
@ -1184,7 +1177,7 @@ class ClientService extends EventTarget {
handleClose(i, reason) handleClose(i, reason)
}, },
alreadyHaveEvent: localAlreadyHaveEvent, alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000 eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
}) })
subs.push({ subs.push({
relayKey, relayKey,
@ -1236,23 +1229,19 @@ class ClientService extends EventTarget {
{ {
startLogin, startLogin,
needSort = true, needSort = true,
useCache = false, /** @deprecated No longer used; streaming does not fake EOSE (see flushStreamingSnapshot). Kept for call-site compatibility. */
omitDefaultSinceWhenUseCache = false, firstRelayResultGraceMs: _unusedFirstRelayGraceMs = FIRST_RELAY_RESULT_GRACE_MS
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: { }: {
startLogin?: () => void startLogin?: () => void
needSort?: boolean needSort?: boolean
useCache?: boolean
omitDefaultSinceWhenUseCache?: boolean
firstRelayResultGraceMs?: number firstRelayResultGraceMs?: number
} = {} } = {}
) { ) {
void _unusedFirstRelayGraceMs
const relays = Array.from(new Set(urls)) const relays = Array.from(new Set(urls))
const key = this.generateTimelineKey(relays, filter) const key = this.generateTimelineKey(relays, filter)
let timeline = this.timelines[key] let timeline = this.timelines[key]
// CRITICAL FIX: Always initialize timeline object, even when useCache is false
// This ensures refs are always available for pagination tracking
if (!timeline || Array.isArray(timeline)) { if (!timeline || Array.isArray(timeline)) {
this.timelines[key] = { this.timelines[key] = {
refs: [], refs: [],
@ -1261,207 +1250,83 @@ class ClientService extends EventTarget {
} }
timeline = this.timelines[key] timeline = this.timelines[key]
} }
let cachedEvents: NEvent[] = []
let since: number | undefined
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
// eslint-disable-next-line @typescript-eslint/no-this-alias // eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this const that = this
let events: NEvent[] = [] let events: NEvent[] = []
let eosedAt: number | null = null let eosedAt: number | null = null
let initialBatchScheduled = false
let lastDeliveredCount = 0
let progressiveIntervalId: ReturnType<typeof setInterval> | null = null
let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const PROGRESSIVE_INTERVAL_MS = 100 // Backup tick while relays stream without new onevent bursts
const MIN_NEW_EVENTS_AFTER_FIRST = 1
const mergeTimelineLiveAndCache = (): NEvent[] => {
const sortedLive = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (!needSort || !useCache || cachedEvents.length === 0) {
return sortedLive
}
const byId = new Map<string, NEvent>()
for (const e of cachedEvents) {
byId.set(e.id, e)
}
for (const e of sortedLive) {
byId.set(e.id, e)
}
return [...byId.values()].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
}
const deliverProgressive = () => { /**
* Stream every matching event to the UI immediately. Do **not** use a "grace EOSE" timer: it set `eosedAt`
* to wall-clock time while relays were still returning historical rows, so `evt.created_at > eosedAt` was
* almost always false and later relay results were dropped until the feed looked empty/slow.
* Real initial completion is only when {@link ClientService.subscribe} fires aggregate `oneose` (all relays).
*/
let streamFlushMicrotask = false
const flushStreamingSnapshot = () => {
if (eosedAt) return if (eosedAt) return
const combined = mergeTimelineLiveAndCache() const emit = () => {
if (combined.length === 0) return streamFlushMicrotask = false
const newEventCount = combined.length - lastDeliveredCount if (eosedAt) return
const isFirstPaint = lastDeliveredCount === 0 if (needSort) {
const shouldDeliver = const sorted = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
isFirstPaint onEvents(sorted, false)
? combined.length >= 1 } else {
: newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || combined.length >= filter.limit * 0.5 onEvents([...events], false)
if (shouldDeliver) {
lastDeliveredCount = combined.length
onEvents(combined, false)
}
}
// CRITICAL: Only use cache if explicitly enabled (for profile timelines)
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
const refs = timeline.refs
const prefetchN = Math.min(refs.length, filter.limit, TIMELINE_CACHE_PREFETCH_CAP)
// Spell / catalog feeds: refs already carry created_at — set `since` immediately and open the live REQ
// without awaiting dozens of IndexedDB reads (that delayed first events by seconds).
if (omitDefaultSinceWhenUseCache && refs[0]![1] >= oneDayAgo) {
since = refs[0]![1] + 1
void (async () => {
try {
const loaded = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => that.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (!loaded.length) return
loaded.sort((a, b) => b.created_at - a.created_at)
const recent = loaded.filter((evt) => evt.created_at >= oneDayAgo)
if (!recent.length) return
cachedEvents = recent
deliverProgressive()
} catch {
// ignore
}
})()
} else if (!omitDefaultSinceWhenUseCache) {
cachedEvents = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
cachedEvents.sort((a, b) => b.created_at - a.created_at)
const recentCachedEvents = cachedEvents.filter((evt) => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
onEvents([...recentCachedEvents], false)
since = recentCachedEvents[0].created_at + 1
} else {
cachedEvents = []
}
} }
} }
} if (events.length <= 1) {
streamFlushMicrotask = false
// CRITICAL FIX: Only set since parameter if caching is enabled emit()
// When useCache is false, we want to stream raw from relays without time restrictions return
// This allows relay feeds to show all available events, not just recent ones }
if (!since && needSort && useCache && !omitDefaultSinceWhenUseCache) { if (!streamFlushMicrotask) {
since = oneDayAgo streamFlushMicrotask = true
queueMicrotask(emit)
}
} }
const handleTimelineEose = (eosed: boolean) => { const handleTimelineEose = (eosed: boolean) => {
if (eosed && eosedAt != null) return if (!eosed) return
if (eosedAt != null) return
eosedAt = dayjs().unix()
if (eosed && !eosedAt) {
if (firstRelayResultGraceTimer != null) {
clearTimeout(firstRelayResultGraceTimer)
firstRelayResultGraceTimer = null
}
eosedAt = dayjs().unix()
if (progressiveIntervalId) {
clearInterval(progressiveIntervalId)
progressiveIntervalId = null
}
}
// (algo feeds) no need to sort and cache
if (!needSort) { if (!needSort) {
return onEvents([...events], !!eosedAt) return onEvents([...events], true)
}
if (!eosed) {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
// Only include cached events if caching is enabled
return onEvents([...(useCache ? events.concat(cachedEvents).slice(0, filter.limit) : events)], false)
} }
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit) events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
// Only update timeline cache if caching is enabled const tl = that.timelines[key]
if (useCache) { if (!tl || Array.isArray(tl)) {
const timeline = that.timelines[key] that.timelines[key] = {
// no cache yet refs: events.map((evt) => [evt.id, evt.created_at]),
if (!timeline || Array.isArray(timeline) || !timeline.refs.length) { filter,
that.timelines[key] = { urls
refs: events.map((evt) => [evt.id, evt.created_at]),
filter,
urls
}
return onEvents([...events], true)
} }
} else {
// Prevent concurrent requests from duplicating the same event const firstRefCreatedAt = tl.refs.length > 0 ? tl.refs[0][1] : dayjs().unix()
const firstRefCreatedAt = timeline.refs[0][1]
const newRefs = events const newRefs = events
.filter((evt) => evt.created_at > firstRefCreatedAt) .filter((evt) => evt.created_at > firstRefCreatedAt)
.map((evt) => [evt.id, evt.created_at] as TTimelineRef) .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
if (events.length >= filter.limit) { if (events.length >= filter.limit) {
// if new refs are more than limit, means old refs are too old, replace them tl.refs = newRefs
timeline.refs = newRefs
onEvents([...events], true)
} else {
// merge new refs with old refs
timeline.refs = newRefs.concat(timeline.refs)
onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
}
} else {
// No caching for initial load, but still need to initialize timeline.refs for loadMoreTimeline pagination
const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline)) {
// Initialize timeline with refs for pagination (even though we don't use cache for initial load)
that.timelines[key] = {
refs: events.map((evt) => [evt.id, evt.created_at]),
filter,
urls
}
} else { } else {
// Update refs with new events for pagination tracking tl.refs = newRefs.concat(tl.refs)
const firstRefCreatedAt = timeline.refs.length > 0 ? timeline.refs[0][1] : dayjs().unix()
const newRefs = events
.filter((evt) => evt.created_at > firstRefCreatedAt)
.map((evt) => [evt.id, evt.created_at] as TTimelineRef)
if (events.length >= filter.limit) {
timeline.refs = newRefs
} else {
timeline.refs = newRefs.concat(timeline.refs)
}
} }
// Return events directly (no cache concatenation)
onEvents([...events], true)
} }
onEvents([...events], true)
} }
const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, { const subCloser = this.subscribe(relays, filter, {
startLogin, startLogin,
onevent: (evt: NEvent) => { onevent: (evt: NEvent) => {
that.addEventToCache(evt) that.addEventToCache(evt)
// not eosed yet, push to events // not eosed yet, push to events
if (!eosedAt) { if (!eosedAt) {
events.push(evt) events.push(evt)
if (firstRelayResultGraceTimer == null) { flushStreamingSnapshot()
firstRelayResultGraceTimer = setTimeout(() => {
firstRelayResultGraceTimer = null
handleTimelineEose(true)
}, firstRelayResultGraceMs)
}
// Deliver on every live event before EOSE (plus interval as a safety net)
if (needSort && events.length >= 1) {
if (!initialBatchScheduled) {
initialBatchScheduled = true
if (!progressiveIntervalId) {
progressiveIntervalId = setInterval(deliverProgressive, PROGRESSIVE_INTERVAL_MS)
}
}
deliverProgressive()
}
return return
} }
// new event // new event
@ -1469,7 +1334,7 @@ class ClientService extends EventTarget {
onNew(evt) onNew(evt)
} }
// Update timeline refs for pagination tracking (even when useCache is false) // Update timeline refs for pagination tracking
// This is needed for loadMoreTimeline to know what events have been loaded // This is needed for loadMoreTimeline to know what events have been loaded
const timeline = that.timelines[key] const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline)) { if (!timeline || Array.isArray(timeline)) {
@ -1506,14 +1371,6 @@ class ClientService extends EventTarget {
return { return {
timelineKey: key, timelineKey: key,
closer: () => { closer: () => {
if (firstRelayResultGraceTimer != null) {
clearTimeout(firstRelayResultGraceTimer)
firstRelayResultGraceTimer = null
}
if (progressiveIntervalId) {
clearInterval(progressiveIntervalId)
progressiveIntervalId = null
}
onEvents = () => {} onEvents = () => {}
onNew = () => {} onNew = () => {}
subCloser.close() subCloser.close()
@ -1525,86 +1382,38 @@ class ClientService extends EventTarget {
const timeline = this.timelines[key] const timeline = this.timelines[key]
if (!timeline || Array.isArray(timeline)) return [] if (!timeline || Array.isArray(timeline)) return []
const { filter, urls, refs } = timeline const { filter, urls } = timeline
// Try to load from cache if refs exist
let cachedEvents: NEvent[] = []
if (refs && refs.length > 0) {
const startIdx = refs.findIndex(([, createdAt]) => createdAt <= until)
if (startIdx >= 0) {
cachedEvents = (
await Promise.all(
refs.slice(startIdx, startIdx + limit).map(([id]) => this.eventService.fetchEvent(id))
)
).filter((evt): evt is NEvent => !!evt) as NEvent[]
}
if (cachedEvents.length >= limit) {
return cachedEvents
}
}
// CRITICAL FIX: Always query relay for more events, even if we have some cached
// This ensures we continue fetching from relays when scrolling, not just from cache
// Calculate the correct until timestamp based on what we already have
until = cachedEvents.length ? cachedEvents[cachedEvents.length - 1].created_at - 1 : until
limit = limit - cachedEvents.length
// CRITICAL: Ensure we always query the relay, even if limit is small
// This prevents the feed from stopping when we have few cached events
if (limit <= 0) {
limit = 100 // Minimum limit to ensure we get more events from relay
}
// Query relay for more events with proper until parameter for pagination
let events = await this.query(urls, { ...filter, until, limit }) let events = await this.query(urls, { ...filter, until, limit })
events.forEach((evt) => { events.forEach((evt) => {
this.addEventToCache(evt) this.addEventToCache(evt)
}) })
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, limit) events = events.sort((a, b) => b.created_at - a.created_at).slice(0, limit)
// Update refs for pagination tracking (even when useCache is false)
// Initialize refs if empty
if (!timeline.refs) { if (!timeline.refs) {
timeline.refs = [] timeline.refs = []
} }
// Prevent duplicate events in refs
const existingRefIds = new Set(timeline.refs.map(([id]) => id)) const existingRefIds = new Set(timeline.refs.map(([id]) => id))
const newRefs: TTimelineRef[] = [] const newRefs: TTimelineRef[] = []
// Add cached events to refs if not already present
for (const evt of cachedEvents) {
if (!existingRefIds.has(evt.id)) {
newRefs.push([evt.id, evt.created_at])
existingRefIds.add(evt.id)
}
}
// Add new events from relay to refs
for (const evt of events) { for (const evt of events) {
if (!existingRefIds.has(evt.id)) { if (!existingRefIds.has(evt.id)) {
newRefs.push([evt.id, evt.created_at]) newRefs.push([evt.id, evt.created_at])
existingRefIds.add(evt.id) existingRefIds.add(evt.id)
} }
} }
// Sort new refs by created_at descending and merge with existing refs
newRefs.sort((a, b) => b[1] - a[1]) newRefs.sort((a, b) => b[1] - a[1])
// Merge with existing refs, maintaining sorted order
if (timeline.refs.length > 0) { if (timeline.refs.length > 0) {
const lastRefCreatedAt = timeline.refs[timeline.refs.length - 1][1] const lastRefCreatedAt = timeline.refs[timeline.refs.length - 1][1]
// Only add events that are older than the last ref (for pagination)
const olderRefs = newRefs.filter(([, createdAt]) => createdAt < lastRefCreatedAt) const olderRefs = newRefs.filter(([, createdAt]) => createdAt < lastRefCreatedAt)
timeline.refs.push(...olderRefs) timeline.refs.push(...olderRefs)
// Keep refs sorted
timeline.refs.sort((a, b) => b[1] - a[1]) timeline.refs.sort((a, b) => b[1] - a[1])
} else { } else {
// No existing refs, add all new refs
timeline.refs.push(...newRefs) timeline.refs.push(...newRefs)
} }
return [...cachedEvents, ...events] return events
} }
/** =========== Event =========== */ /** =========== Event =========== */

102
src/services/note-stats.service.ts

@ -2,7 +2,7 @@ import { ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants'
import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event' import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event'
import { getZapInfoFromEvent } from '@/lib/event-metadata' import { getZapInfoFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { getEmojiInfosFromEmojiTags, tagNameEquals } from '@/lib/tag' import { getEmojiInfosFromEmojiTags, getFirstHexEventIdFromETags, tagNameEquals } from '@/lib/tag'
import { normalizeUrl } from '@/lib/url' import { normalizeUrl } from '@/lib/url'
import { eventService } from '@/services/client.service' import { eventService } from '@/services/client.service'
import { TEmoji } from '@/types' import { TEmoji } from '@/types'
@ -241,7 +241,18 @@ class NoteStatsService {
return eventId return eventId
} }
updateNoteStatsByEvents(events: Event[], originalEventAuthor?: string) { /**
* @param mergeOpts When the UI just published a single interaction, pass the note id the user acted on
* so stats merge even if `e` tag shape varies (extensions, multiple ancestors).
*/
updateNoteStatsByEvents(
events: Event[],
originalEventAuthor?: string,
mergeOpts?: {
interactionTargetNoteId?: string
replyParentNoteId?: string
}
) {
const updatedEventIdSet = new Set<string>() const updatedEventIdSet = new Set<string>()
// Process events in batches for better performance // Process events in batches for better performance
@ -249,7 +260,7 @@ class NoteStatsService {
for (let i = 0; i < events.length; i += batchSize) { for (let i = 0; i < events.length; i += batchSize) {
const batch = events.slice(i, i + batchSize) const batch = events.slice(i, i + batchSize)
batch.forEach((evt) => { batch.forEach((evt) => {
const updatedEventId = this.processEvent(evt, originalEventAuthor) const updatedEventId = this.processEvent(evt, originalEventAuthor, mergeOpts)
if (updatedEventId) { if (updatedEventId) {
updatedEventIdSet.add(updatedEventId) updatedEventIdSet.add(updatedEventId)
} }
@ -261,19 +272,25 @@ class NoteStatsService {
}) })
} }
private processEvent(evt: Event, originalEventAuthor?: string): string | undefined { private processEvent(
evt: Event,
originalEventAuthor?: string,
mergeOpts?: { interactionTargetNoteId?: string; replyParentNoteId?: string }
): string | undefined {
let updatedEventId: string | undefined let updatedEventId: string | undefined
if (evt.kind === kinds.Reaction) { if (evt.kind === kinds.Reaction) {
updatedEventId = this.addLikeByEvent(evt, originalEventAuthor) updatedEventId = this.addLikeByEvent(evt, originalEventAuthor, mergeOpts?.interactionTargetNoteId)
} else if (evt.kind === kinds.Repost) { } else if (evt.kind === kinds.Repost) {
updatedEventId = this.addRepostByEvent(evt, originalEventAuthor) updatedEventId = this.addRepostByEvent(evt, originalEventAuthor, mergeOpts?.interactionTargetNoteId)
} else if (evt.kind === kinds.Zap) { } else if (evt.kind === kinds.Zap) {
updatedEventId = this.addZapByEvent(evt, originalEventAuthor) updatedEventId = this.addZapByEvent(evt, originalEventAuthor)
} else if (evt.kind === kinds.ShortTextNote || evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) { } else if (evt.kind === kinds.ShortTextNote || evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) {
const isQuote = this.isQuoteByEvent(evt) const isQuote = this.isQuoteByEvent(evt)
if (isQuote) { if (isQuote) {
updatedEventId = this.addQuoteByEvent(evt, originalEventAuthor) updatedEventId = this.addQuoteByEvent(evt, originalEventAuthor)
} else if (mergeOpts?.replyParentNoteId) {
updatedEventId = this.addReplyByEvent(evt, originalEventAuthor, mergeOpts.replyParentNoteId)
} else { } else {
updatedEventId = this.addReplyByEvent(evt, originalEventAuthor) updatedEventId = this.addReplyByEvent(evt, originalEventAuthor)
} }
@ -284,8 +301,8 @@ class NoteStatsService {
return updatedEventId return updatedEventId
} }
private addLikeByEvent(evt: Event, originalEventAuthor?: string) { private addLikeByEvent(evt: Event, originalEventAuthor?: string, forcedTargetEventId?: string) {
const targetEventId = evt.tags.findLast(tagNameEquals('e'))?.[1] const targetEventId = forcedTargetEventId ?? getFirstHexEventIdFromETags(evt.tags)
if (!targetEventId) return if (!targetEventId) return
const old = this.noteStatsMap.get(targetEventId) || {} const old = this.noteStatsMap.get(targetEventId) || {}
@ -298,7 +315,14 @@ class NoteStatsService {
} }
let emoji: TEmoji | string = evt.content.trim() let emoji: TEmoji | string = evt.content.trim()
if (!emoji) return if (!emoji) {
const fromTags = getEmojiInfosFromEmojiTags(evt.tags)
if (fromTags.length) {
emoji = fromTags[0]
} else {
emoji = '+'
}
}
if (emoji.startsWith(':') && emoji.endsWith(':')) { if (emoji.startsWith(':') && emoji.endsWith(':')) {
const emojiInfos = getEmojiInfosFromEmojiTags(evt.tags) const emojiInfos = getEmojiInfosFromEmojiTags(evt.tags)
@ -331,8 +355,8 @@ class NoteStatsService {
return eventId return eventId
} }
private addRepostByEvent(evt: Event, originalEventAuthor?: string) { private addRepostByEvent(evt: Event, originalEventAuthor?: string, forcedTargetEventId?: string) {
const eventId = evt.tags.find(tagNameEquals('e'))?.[1] const eventId = forcedTargetEventId ?? getFirstHexEventIdFromETags(evt.tags)
if (!eventId) return if (!eventId) return
const old = this.noteStatsMap.get(eventId) || {} const old = this.noteStatsMap.get(eventId) || {}
@ -371,34 +395,36 @@ class NoteStatsService {
) )
} }
private addReplyByEvent(evt: Event, originalEventAuthor?: string) { private addReplyByEvent(evt: Event, originalEventAuthor?: string, forcedOriginalEventId?: string) {
let originalEventId: string | undefined let originalEventId: string | undefined = forcedOriginalEventId
if (evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) { if (!originalEventId) {
const eTag = evt.tags.find(tagNameEquals('e')) ?? evt.tags.find(tagNameEquals('E')) if (evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) {
originalEventId = eTag?.[1] const eTag = evt.tags.find(tagNameEquals('e')) ?? evt.tags.find(tagNameEquals('E'))
} else if (evt.kind === kinds.ShortTextNote) { originalEventId = eTag?.[1]
const parentETag = evt.tags.find(([tagName, , , marker]) => { } else if (evt.kind === kinds.ShortTextNote) {
return tagName === 'e' && (marker === 'reply' || marker === 'root') const parentETag = evt.tags.find(([tagName, , , marker]) => {
}) return tagName === 'e' && (marker === 'reply' || marker === 'root')
if (parentETag) { })
originalEventId = parentETag[1] if (parentETag) {
} else { originalEventId = parentETag[1]
const lastETag = evt.tags.findLast( } else {
([tagName, tagValue, , marker]) => const lastETag = evt.tags.findLast(
tagName === 'e' && ([tagName, tagValue, , marker]) =>
!!tagValue && tagName === 'e' &&
marker !== 'mention' !!tagValue &&
) marker !== 'mention'
if (lastETag) { )
originalEventId = lastETag[1] if (lastETag) {
originalEventId = lastETag[1]
}
} }
}
if (!originalEventId) {
if (!originalEventId) { const aTag = evt.tags.find(tagNameEquals('a'))
const aTag = evt.tags.find(tagNameEquals('a')) if (aTag) {
if (aTag) { originalEventId = aTag[1]
originalEventId = aTag[1] }
} }
} }
} }

23
src/services/spell.service.ts

@ -2,7 +2,8 @@
* NIP-A7 Spells: parse and execute kind 777 events as portable relay query filters. * NIP-A7 Spells: parse and execute kind 777 events as portable relay query filters.
*/ */
import { ExtendedKind, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import { ExtendedKind, FAST_WRITE_RELAY_URLS } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { tagNameEquals } from '@/lib/tag' import { tagNameEquals } from '@/lib/tag'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import type { TRelayList } from '@/types' import type { TRelayList } from '@/types'
@ -80,19 +81,15 @@ export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): st
} }
/** /**
* Relays to fetch the user's kind-777 spells: **read** (inboxes), **write** (outboxes), and * Relays to fetch the user's kind-777 spells: favorites + default fast-read relays + user read/inboxes
* {@link FAST_READ_RELAY_URLS}. * (same extension as other nonfavorites-feed reads; not the favorites-only home list).
*
* Pass `relayList` from {@link ClientService.fetchRelayList} / NostrProvider it already merges
* kind **10002** and kind **10432** (CACHE_RELAYS / local relays in the app). Do not infer local
* relays from hostnames.
*/ */
export function getRelaysForSpellCatalogSync(relayList: TRelayList | null | undefined): string[] { export function getRelaysForSpellCatalogSync(
return dedupeRelayUrls([ favoriteRelays: string[],
...(relayList?.read ?? []), blockedRelays: string[],
...(relayList?.write ?? []), userInboxReadRelays: string[]
...FAST_READ_RELAY_URLS ): string[] {
]) return getRelayUrlsWithFavoritesFastReadAndInbox(favoriteRelays, blockedRelays, userInboxReadRelays)
} }
function dedupeRelayUrls(urls: string[]): string[] { function dedupeRelayUrls(urls: string[]): string[] {

Loading…
Cancel
Save