Browse Source

speed up timelines

imwald
Silberengel 1 month ago
parent
commit
e5ed682ee5
  1. 31
      src/components/Explore/ExploreRelayReviews.tsx
  2. 2
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  3. 156
      src/components/NoteList/index.tsx
  4. 4
      src/components/NoteStats/LikeButton.tsx
  5. 4
      src/components/NoteStats/Likes.tsx
  6. 4
      src/components/NoteStats/RepostButton.tsx
  7. 8
      src/components/NoteStats/VoteButtons.tsx
  8. 7
      src/components/PostEditor/PostContent.tsx
  9. 3
      src/components/QuoteList/index.tsx
  10. 9
      src/components/Username/index.tsx
  11. 7
      src/constants.ts
  12. 7
      src/hooks/useFetchProfile.tsx
  13. 267
      src/hooks/useProfileTimeline.tsx
  14. 15
      src/lib/event-metadata.ts
  15. 81
      src/lib/favorites-feed-relays.ts
  16. 34
      src/lib/relay-list-builder.ts
  17. 27
      src/lib/spell-feed-request-identity.ts
  18. 12
      src/lib/tag.ts
  19. 19
      src/pages/primary/NoteListPage/FollowingFeed.tsx
  20. 10
      src/pages/primary/SpellsPage/CreateSpellDialog.tsx
  21. 176
      src/pages/primary/SpellsPage/fauxSpellFeeds.ts
  22. 129
      src/pages/primary/SpellsPage/index.tsx
  23. 59
      src/pages/secondary/NoteListPage/index.tsx
  24. 22
      src/providers/FeedProvider.tsx
  25. 140
      src/services/client-replaceable-events.service.ts
  26. 351
      src/services/client.service.ts
  27. 52
      src/services/note-stats.service.ts
  28. 23
      src/services/spell.service.ts

31
src/components/Explore/ExploreRelayReviews.tsx

@ -1,27 +1,28 @@ @@ -1,27 +1,28 @@
import NoteList from '@/components/NoteList'
import { ExtendedKind, PROFILE_FETCH_RELAY_URLS } from '@/constants'
import { ExtendedKind } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import {
getRelayUrlFromRelayReviewEvent,
getStarsFromRelayReviewEvent
} from '@/lib/event-metadata'
import { buildExploreProfileAndUserRelayList } from '@/lib/relay-list-builder'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
import { Event } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useCallback, useMemo } from 'react'
export default function ExploreRelayReviews() {
const { pubkey } = useNostr()
const [relayUrls, setRelayUrls] = useState<string[]>(() => [...PROFILE_FETCH_RELAY_URLS])
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { relayList } = useNostr()
useEffect(() => {
let cancelled = false
buildExploreProfileAndUserRelayList(pubkey ?? null).then((urls) => {
if (!cancelled) setRelayUrls(urls)
})
return () => {
cancelled = true
}
}, [pubkey])
const relayUrls = useMemo(
() =>
getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
),
[favoriteRelays, blockedRelays, relayList]
)
const subRequests = useMemo(() => [{ urls: relayUrls, filter: {} }], [relayUrls])
@ -34,6 +35,8 @@ export default function ExploreRelayReviews() { @@ -34,6 +35,8 @@ export default function ExploreRelayReviews() {
return (
<div className="min-w-0 pt-1">
<NoteList
feedSubscriptionKey="explore-relay-reviews"
preserveTimelineOnSubRequestsChange
showKinds={[ExtendedKind.RELAY_REVIEW]}
subRequests={subRequests}
showKind1OPs={false}

2
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -537,7 +537,7 @@ export default function PublicationIndex({ @@ -537,7 +537,7 @@ export default function PublicationIndex({
},
onNew: () => {} // Not needed for one-time fetch
},
{ needSort: false, useCache: false } // NO CACHING - stream raw from relays
{ needSort: false }
)
// Wait for up to 10 seconds for events to arrive or eosed

156
src/components/NoteList/index.tsx

@ -9,7 +9,10 @@ import { @@ -9,7 +9,10 @@ import {
isReplyNoteEvent
} from '@/lib/event'
import { shouldFilterEvent } from '@/lib/event-filtering'
import { stableSpellFeedFilterKey } from '@/lib/spell-feed-request-identity'
import {
isRelayUrlStrictSupersetIdentityKey,
stableSpellFeedFilterKey
} from '@/lib/spell-feed-request-identity'
import { syncUserDeletionTombstones } from '@/lib/sync-user-deletions'
import { normalizeUrl } from '@/lib/url'
import { getZapInfoFromEvent } from '@/lib/event-metadata'
@ -48,6 +51,19 @@ const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing @@ -48,6 +51,19 @@ const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing
const FEED_PROFILE_BATCH_DEBOUNCE_MS = 120
const FEED_PROFILE_CHUNK = 36
function mergeEventBatchesById(prev: Event[], incoming: Event[], cap: number): Event[] {
const byId = new Map<string, Event>()
for (const e of prev) {
byId.set(e.id, e)
}
for (const e of incoming) {
byId.set(e.id, e)
}
return Array.from(byId.values())
.sort((a, b) => b.created_at - a.created_at)
.slice(0, cap)
}
const NoteList = forwardRef(
(
{
@ -66,14 +82,12 @@ const NoteList = forwardRef( @@ -66,14 +82,12 @@ const NoteList = forwardRef(
/** When set (e.g. Spells page), timeline subscription keys off this string instead of `subRequests` reference churn. */
feedSubscriptionKey,
/**
* When true, hydrate the list from the client timeline cache (IndexedDB-backed) before/at same time as
* live REQ, so feeds feel instant on repeat visits. Spells faux feeds use this; home feed stays false.
* When true (e.g. Explore relay reviews), `subRequests` may grow after first paint (bootstrap relays full list).
* Re-subscribe when URLs change but **merge** new timeline batches into existing rows by event id instead of clearing.
*/
useTimelineCacheBootstrap = false,
preserveTimelineOnSubRequestsChange = false,
/**
* When set (Spells page), passed to `subscribeTimeline` as `firstRelayResultGraceMs` only ms to wait after
* the first live event before treating initial load as EOSE. Subscribe setup and loading fallback keep
* longer defaults so multi-relay spell feeds do not race-fail and stay blank after refresh.
* Spells page: after this many ms, clear the loading skeleton so the list area renders; subscription keeps running.
*/
spellFetchTimeoutMs,
/** Spells page: bumps when user picks a feed; used with {@link onSpellFeedFirstPaint}. */
@ -96,7 +110,8 @@ const NoteList = forwardRef( @@ -96,7 +110,8 @@ const NoteList = forwardRef(
/** When provided and returns true, the event is omitted from the feed (in addition to built-in rules). */
extraShouldHideEvent?: (evt: Event) => boolean
feedSubscriptionKey?: string
useTimelineCacheBootstrap?: boolean
preserveTimelineOnSubRequestsChange?: boolean
/** When set (spells), max time to show the initial loading skeleton (ms). */
spellFetchTimeoutMs?: number
spellFeedInstrumentToken?: number
onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void
@ -156,6 +171,9 @@ const NoteList = forwardRef( @@ -156,6 +171,9 @@ const NoteList = forwardRef(
}, [subRequests])
const timelineSubscriptionKey = feedSubscriptionKey ?? subRequestsKey
const prevSubRequestsKeyForTimelineRef = useRef<string | null>(null)
/** Detect pull-to-refresh so preserve-mode feeds still clear; unrelated dep changes must not clear. */
const timelineEffectLastRefreshCountRef = useRef(refreshCount)
useEffect(() => {
feedProfileBatchGenRef.current += 1
@ -163,6 +181,35 @@ const NoteList = forwardRef( @@ -163,6 +181,35 @@ const NoteList = forwardRef(
setFeedProfileBatch({ profiles: new Map(), pending: new Set(), version: 0 })
}, [timelineSubscriptionKey, refreshCount])
/** Pending pubkeys sync with rows so useFetchProfile skips per-note fetches before the debounced batch. */
useLayoutEffect(() => {
const candidates = new Set<string>()
const addPk = (p: string | undefined) => {
if (p && p.length === 64 && /^[0-9a-f]{64}$/.test(p)) {
candidates.add(p)
}
}
for (const e of events) {
addPk(e.pubkey)
}
for (const e of newEvents) {
addPk(e.pubkey)
}
setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending)
let changed = false
for (const pk of candidates) {
if (!prev.profiles.has(pk) && !pending.has(pk)) {
pending.add(pk)
changed = true
}
}
if (!changed) return prev
return { ...prev, pending, version: prev.version + 1 }
})
}, [events, newEvents])
const subRequestsRef = useRef(subRequests)
subRequestsRef.current = subRequests
@ -309,9 +356,12 @@ const NoteList = forwardRef( @@ -309,9 +356,12 @@ const NoteList = forwardRef(
candidates.add(p)
}
}
filteredEvents.slice(0, 50).forEach((e) => addPk(e.pubkey))
events.slice(0, 120).forEach((e) => addPk(e.pubkey))
events.slice(showCount, showCount + 60).forEach((e) => addPk(e.pubkey))
for (const e of events) {
addPk(e.pubkey)
}
for (const e of newEvents) {
addPk(e.pubkey)
}
const need = [...candidates].filter((pk) => !feedProfileLoadedRef.current.has(pk))
if (need.length === 0) return
@ -320,7 +370,14 @@ const NoteList = forwardRef( @@ -320,7 +370,14 @@ const NoteList = forwardRef(
setFeedProfileBatch((prev) => {
const pending = new Set(prev.pending)
need.forEach((pk) => pending.add(pk))
let pendingChanged = false
for (const pk of need) {
if (!pending.has(pk)) {
pending.add(pk)
pendingChanged = true
}
}
if (!pendingChanged) return prev
return { ...prev, pending, version: prev.version + 1 }
})
@ -363,7 +420,7 @@ const NoteList = forwardRef( @@ -363,7 +420,7 @@ const NoteList = forwardRef(
})()
}, FEED_PROFILE_BATCH_DEBOUNCE_MS)
return () => window.clearTimeout(handle)
}, [filteredEvents, events, showCount])
}, [events, newEvents])
const scrollToTop = useCallback((behavior: ScrollBehavior = 'instant') => {
setTimeout(() => {
@ -392,13 +449,34 @@ const NoteList = forwardRef( @@ -392,13 +449,34 @@ const NoteList = forwardRef(
return () => {}
}
const prevSubKey = prevSubRequestsKeyForTimelineRef.current
const userPulledRefresh = refreshCount !== timelineEffectLastRefreshCountRef.current
if (userPulledRefresh) {
timelineEffectLastRefreshCountRef.current = refreshCount
}
const keepExistingTimelineEvents =
preserveTimelineOnSubRequestsChange &&
!userPulledRefresh &&
(prevSubKey === subRequestsKey ||
isRelayUrlStrictSupersetIdentityKey(prevSubKey, subRequestsKey))
prevSubRequestsKeyForTimelineRef.current = subRequestsKey
/** False after cleanup so stale timeline callbacks cannot overwrite state after switching feeds (e.g. Spells discussions → notifications). */
let effectActive = true
async function init() {
// Re-subscribe with rows visible (e.g. relay URL expansion): don't flash global loading / skeleton.
const keepRowsVisible =
preserveTimelineOnSubRequestsChange &&
keepExistingTimelineEvents &&
eventsRef.current.length > 0
if (!keepRowsVisible) {
setLoading(true)
}
if (!keepExistingTimelineEvents) {
setEvents([])
setNewEvents([])
}
setHasMore(true)
consecutiveEmptyRef.current = 0 // Reset counter on refresh
@ -437,6 +515,10 @@ const NoteList = forwardRef( @@ -437,6 +515,10 @@ const NoteList = forwardRef(
return () => {}
}
const totalRelayUrls = mappedSubRequests.reduce((n, r) => n + r.urls.length, 0)
// Explore-style feeds merge many read relays; subscribeTimeline awaits every ensureRelay — 5s often loses the race.
const subscribeSetupRaceMs = totalRelayUrls > 24 ? 30_000 : 5000
let closer: (() => void) | undefined
let timelineKey: string | undefined
let timelineSubscribePromise:
@ -444,30 +526,37 @@ const NoteList = forwardRef( @@ -444,30 +526,37 @@ const NoteList = forwardRef(
| undefined
try {
// Opening subs + IndexedDB timeline hydration can exceed 2s on spell feeds with many relays; a short race
// Opening many relay subs can exceed 2s on spell feeds; a short race
// rejects, the catch closes the late subscription, and the list stays empty after refresh.
const subscribeSetupRaceMs = 5000
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => {
reject(new Error(`subscribeTimeline timeout after ${subscribeSetupRaceMs}ms`))
}, subscribeSetupRaceMs)
})
const firstRelayGraceMs = spellFetchTimeoutMs ?? FIRST_RELAY_RESULT_GRACE_MS
const eventCap = areAlgoRelays ? ALGO_LIMIT : LIMIT
timelineSubscribePromise = client.subscribeTimeline(
mappedSubRequests,
{
onEvents: (events: Event[], eosed: boolean) => {
onEvents: (batch: Event[], eosed: boolean) => {
if (!effectActive) return
if (events.length > 0) {
setEvents(events)
if (batch.length > 0) {
if (preserveTimelineOnSubRequestsChange) {
setEvents((prev) => {
const next = mergeEventBatchesById(prev, batch, eventCap)
lastEventsForTimelinePrefetchRef.current = next
return next
})
} else {
setEvents(batch)
lastEventsForTimelinePrefetchRef.current = batch
}
// Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
setLoading(false)
// Defer profile + embed prefetch: streaming timelines fire onEvents often; starting
// fetchProfilesForPubkeys on every update spams relays (multi-second each) and cancels hooks.
lastEventsForTimelinePrefetchRef.current = events
if (timelinePrefetchDebounceRef.current) {
clearTimeout(timelinePrefetchDebounceRef.current)
}
@ -492,8 +581,9 @@ const NoteList = forwardRef( @@ -492,8 +581,9 @@ const NoteList = forwardRef(
}
}, 450)
} else if (eosed) {
// No events received but EOSE - set empty events array and stop loading
if (!preserveTimelineOnSubRequestsChange) {
setEvents([])
}
setLoading(false)
}
@ -507,7 +597,7 @@ const NoteList = forwardRef( @@ -507,7 +597,7 @@ const NoteList = forwardRef(
// We should still try to load more on scroll - the loadMore logic will handle stopping
// Only set to false if we explicitly know there are no more events (handled in loadMore)
// If we got a full limit of events, there's likely more available
if (events.length >= (areAlgoRelays ? ALGO_LIMIT : LIMIT)) {
if (batch.length >= (areAlgoRelays ? ALGO_LIMIT : LIMIT)) {
setHasMore(true)
} else {
// Even with fewer events, there might be more (filtering, slow relays, etc.)
@ -542,9 +632,7 @@ const NoteList = forwardRef( @@ -542,9 +632,7 @@ const NoteList = forwardRef(
{
startLogin,
needSort: !areAlgoRelays,
useCache: useTimelineCacheBootstrap,
omitDefaultSinceWhenUseCache: useTimelineCacheBootstrap,
firstRelayResultGraceMs: firstRelayGraceMs
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
}
)
@ -582,6 +670,8 @@ const NoteList = forwardRef( @@ -582,6 +670,8 @@ const NoteList = forwardRef(
}
}, [
timelineSubscriptionKey,
subRequestsKey,
preserveTimelineOnSubRequestsChange,
refreshCount,
showKindsKey,
showKind1OPs,
@ -589,7 +679,6 @@ const NoteList = forwardRef( @@ -589,7 +679,6 @@ const NoteList = forwardRef(
showKind1111,
useFilterAsIs,
areAlgoRelays,
useTimelineCacheBootstrap,
spellFetchTimeoutMs
])
@ -615,6 +704,21 @@ const NoteList = forwardRef( @@ -615,6 +704,21 @@ const NoteList = forwardRef(
}
}, [timelineSubscriptionKey, refreshCount])
/** Spells: drop loading skeleton quickly so rows (or empty + reload) appear while REQ continues. */
useEffect(() => {
if (spellFetchTimeoutMs == null || spellFetchTimeoutMs <= 0) return
if (!subRequestsRef.current.length) return
let cancelled = false
const id = window.setTimeout(() => {
if (cancelled) return
setLoading(false)
}, spellFetchTimeoutMs)
return () => {
cancelled = true
clearTimeout(id)
}
}, [timelineSubscriptionKey, refreshCount, spellFetchTimeoutMs])
// Use refs to avoid dependency issues and ensure latest values in async callbacks
const showCountRef = useRef(showCount)
const loadingRef = useRef(loading)

4
src/components/NoteStats/LikeButton.tsx

@ -159,7 +159,9 @@ export default function LikeButton({ event, hideCount = false }: { event: Event; @@ -159,7 +159,9 @@ export default function LikeButton({ event, hideCount = false }: { event: Event;
showSimplePublishSuccess(t('Reaction published'))
}
noteStatsService.updateNoteStatsByEvents([evt])
noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
}
} catch (error) {
logger.error('Like failed', { error, eventId: event.id })

4
src/components/NoteStats/Likes.tsx

@ -71,7 +71,9 @@ export default function Likes({ event }: { event: Event }) { @@ -71,7 +71,9 @@ export default function Likes({ event }: { event: Event }) {
try {
const reaction = createReactionDraftEvent(event, emoji)
const evt = await publish(reaction)
noteStatsService.updateNoteStatsByEvents([evt])
noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} catch (error) {
logger.error('Like failed', { error, eventId: event.id })
} finally {

4
src/components/NoteStats/RepostButton.tsx

@ -76,7 +76,9 @@ export default function RepostButton({ event, hideCount = false }: { event: Even @@ -76,7 +76,9 @@ export default function RepostButton({ event, hideCount = false }: { event: Even
showSimplePublishSuccess(t('Boost published'))
}
noteStatsService.updateNoteStatsByEvents([evt])
noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} catch (error) {
logger.error('Boost failed', { error, eventId: event.id })
} finally {

8
src/components/NoteStats/VoteButtons.tsx

@ -81,7 +81,9 @@ export default function VoteButtons({ event }: { event: Event }) { @@ -81,7 +81,9 @@ export default function VoteButtons({ event }: { event: Event }) {
showSimplePublishSuccess(t('Vote removed'))
}
noteStatsService.updateNoteStatsByEvents([evt])
noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
} else {
// If user voted the opposite way, first remove the old vote
if (userVote) {
@ -109,7 +111,9 @@ export default function VoteButtons({ event }: { event: Event }) { @@ -109,7 +111,9 @@ export default function VoteButtons({ event }: { event: Event }) {
showSimplePublishSuccess(t('Vote published'))
}
noteStatsService.updateNoteStatsByEvents([evt])
noteStatsService.updateNoteStatsByEvents([evt], undefined, {
interactionTargetNoteId: event.id
})
}
} catch (error) {
logger.error('Vote failed', { error, eventId: event.id })

7
src/components/PostEditor/PostContent.tsx

@ -66,6 +66,7 @@ import mediaUpload from '@/services/media-upload.service' @@ -66,6 +66,7 @@ import mediaUpload from '@/services/media-upload.service'
import { successfulPublishRelayUrls, type TRelayPublishStatus } from '@/lib/publish-relay-urls'
import client, { eventService } from '@/services/client.service'
import discussionFeedCache from '@/services/discussion-feed-cache.service'
import noteStatsService from '@/services/note-stats.service'
import CreateThreadDialog from '@/pages/primary/DiscussionsPage/CreateThreadDialog'
import { getReplaceableCoordinateFromEvent, isProtectedEvent as isEventProtected, isReplaceableEvent, isReplyNoteEvent } from '@/lib/event'
import { Event, kinds } from 'nostr-tools'
@ -112,6 +113,12 @@ export default function PostContent({ @@ -112,6 +113,12 @@ export default function PostContent({
const clean = { ...reply } as Event
delete (clean as any).relayStatuses
addReplies([clean])
const isQuotePost = clean.tags.some((t) => t[0] === 'q' && t[1])
noteStatsService.updateNoteStatsByEvents(
[clean],
undefined,
isQuotePost ? undefined : { replyParentNoteId: parentEvent.id }
)
const rootInfo = !isReplaceableEvent(parentEvent.kind)
? { type: 'E' as const, id: parentEvent.id, pubkey: parentEvent.pubkey }
: {

3
src/components/QuoteList/index.tsx

@ -136,9 +136,6 @@ export default function QuoteList({ @@ -136,9 +136,6 @@ export default function QuoteList({
[newEvt, ...oldEvents].sort((a, b) => b.created_at - a.created_at)
)
}
},
{
useCache: false // NO CACHING - stream raw from relays
}
)
if (cancelled) {

9
src/components/Username/index.tsx

@ -30,8 +30,9 @@ export default function Username({ @@ -30,8 +30,9 @@ export default function Username({
return userIdToPubkey(userId) || ''
}, [userId, profile?.pubkey])
// Show skeleton while fetching (unless withoutSkeleton is true)
if (isFetching && !withoutSkeleton) {
// Never block on profile fetch when we can already show npub/hex fallback (feeds batch-fetch profiles).
const canShowWithoutProfile = Boolean(pubkey)
if (isFetching && !withoutSkeleton && !canShowWithoutProfile) {
return (
<div className="py-1">
<Skeleton className={cn('w-16', skeletonClassName)} />
@ -115,8 +116,8 @@ export function SimpleUsername({ @@ -115,8 +116,8 @@ export function SimpleUsername({
return userIdToPubkey(userId) || ''
}, [userId, profile?.pubkey])
// Show skeleton while fetching (unless withoutSkeleton is true)
if (isFetching && !withoutSkeleton) {
const canShowWithoutProfile = Boolean(pubkey)
if (isFetching && !withoutSkeleton && !canShowWithoutProfile) {
return (
<div className="py-1">
<Skeleton className={cn('w-16', skeletonClassName)} />

7
src/constants.ts

@ -17,8 +17,11 @@ export const DEFAULT_FAVORITE_RELAYS = [ @@ -17,8 +17,11 @@ export const DEFAULT_FAVORITE_RELAYS = [
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000
/** Spells page feeds: shorter grace so multi-relay spell REQs finalize initial load sooner (still keeps subscription open for `onNew`). */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = 450
/** Spells page NoteList: drop the loading skeleton after this long so the feed can render; REQ stays open and rows stream in. */
export const SPELL_FEED_LOADING_MAX_MS = 1000
/** @deprecated Use {@link SPELL_FEED_LOADING_MAX_MS}; kept so old imports do not break. */
export const SPELL_FEED_FIRST_RELAY_GRACE_MS = SPELL_FEED_LOADING_MAX_MS
/**
* Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among

7
src/hooks/useFetchProfile.tsx

@ -26,7 +26,12 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -26,7 +26,12 @@ export function useFetchProfile(id?: string, skipCache = false) {
const { profile: currentAccountProfile } = useNostr()
const noteFeed = useNoteFeedProfileContext()
const [isFetching, setIsFetching] = useState(true)
/** Hex/npub ids can show npub fallback immediately; avoid a skeleton frame before the first effect. */
const [isFetching, setIsFetching] = useState(() => {
if (!id) return false
const pk = userIdToPubkey(id)
return !(pk.length === 64 && /^[0-9a-f]{64}$/.test(pk))
})
const [error, setError] = useState<Error | null>(null)
const [profile, setProfile] = useState<TProfile | null>(null)
const [pubkey, setPubkey] = useState<string | null>(null)

267
src/hooks/useProfileTimeline.tsx

@ -1,18 +1,20 @@ @@ -1,18 +1,20 @@
import { useDeletedEvent } from '@/providers/DeletedEventProvider'
import client from '@/services/client.service'
import { useEffect, useMemo, useRef, useState, useCallback } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Event } from 'nostr-tools'
import { CALENDAR_EVENT_KINDS, ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants'
import { normalizeUrl } from '@/lib/url'
import { CALENDAR_EVENT_KINDS, ExtendedKind } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
type ProfileTimelineCacheEntry = {
type ProfileTimelineMemoryEntry = {
events: Event[]
lastUpdated: number
}
const timelineCache = new Map<string, ProfileTimelineCacheEntry>()
const CACHE_DURATION = 5 * 60 * 1000 // 5 minutes - cache is considered fresh for this long
const relayGroupCache = new Map<string, string[][]>()
/** 5-minute in-memory cache for this hook only — not IndexedDB, not client timeline refs. */
const memoryTimelineByKey = new Map<string, ProfileTimelineMemoryEntry>()
const CACHE_DURATION = 5 * 60 * 1000
type UseProfileTimelineOptions = {
pubkey: string
@ -28,55 +30,36 @@ type UseProfileTimelineResult = { @@ -28,55 +30,36 @@ type UseProfileTimelineResult = {
refresh: () => void
}
async function getRelayGroups(pubkey: string): Promise<string[][]> {
const cached = relayGroupCache.get(pubkey)
if (cached) {
return cached
}
const [relayList, favoriteRelays] = await Promise.all([
client.fetchRelayList(pubkey).catch(() => ({ read: [], write: [] })),
client.fetchFavoriteRelays(pubkey).catch(() => [])
])
const groups: string[][] = []
const normalizeList = (urls?: string[]) =>
Array.from(
new Set(
(urls || [])
.map((url) => normalizeUrl(url))
.filter((value): value is string => !!value)
)
)
const readRelays = normalizeList(relayList.read)
if (readRelays.length) {
groups.push(readRelays)
}
const writeRelays = normalizeList(relayList.write)
if (writeRelays.length) {
groups.push(writeRelays)
}
const favoriteRelayList = normalizeList(favoriteRelays)
if (favoriteRelayList.length) {
groups.push(favoriteRelayList)
}
const fastReadRelays = normalizeList(FAST_READ_RELAY_URLS)
if (fastReadRelays.length) {
groups.push(fastReadRelays)
}
if (!groups.length) {
relayGroupCache.set(pubkey, [fastReadRelays])
return [fastReadRelays]
}
relayGroupCache.set(pubkey, groups)
return groups
function buildSubRequests(
groups: string[][],
pubkey: string,
kindsArg: number[],
limit: number,
hasCalendarKinds: boolean
) {
const authorRequests = groups
.map((urls) => ({
urls,
filter: {
authors: [pubkey],
kinds: kindsArg,
limit
} as any
}))
.filter((request) => request.urls.length)
const calendarInviteRequests = hasCalendarKinds
? groups
.map((urls) => ({
urls,
filter: {
kinds: [ExtendedKind.CALENDAR_EVENT_DATE, ExtendedKind.CALENDAR_EVENT_TIME],
'#p': [pubkey],
limit: 100
} as any
}))
.filter((request) => request.urls.length)
: []
return [...authorRequests, ...calendarInviteRequests]
}
function postProcessEvents(
@ -107,11 +90,18 @@ export function useProfileTimeline({ @@ -107,11 +90,18 @@ export function useProfileTimeline({
limit = 200,
filterPredicate
}: UseProfileTimelineOptions): UseProfileTimelineResult {
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { relayList } = useNostr()
const { isEventDeleted, tombstoneEpoch } = useDeletedEvent()
const isEventDeletedRef = useRef(isEventDeleted)
isEventDeletedRef.current = isEventDeleted
const cachedEntry = useMemo(() => timelineCache.get(cacheKey), [cacheKey])
const filterPredicateRef = useRef(filterPredicate)
filterPredicateRef.current = filterPredicate
const limitRef = useRef(limit)
limitRef.current = limit
const cachedEntry = useMemo(() => memoryTimelineByKey.get(cacheKey), [cacheKey])
const [events, setEvents] = useState<Event[]>(cachedEntry?.events ?? [])
const [isLoading, setIsLoading] = useState(!cachedEntry)
const [refreshToken, setRefreshToken] = useState(0)
@ -121,9 +111,9 @@ export function useProfileTimeline({ @@ -121,9 +111,9 @@ export function useProfileTimeline({
setEvents((prev) => {
const next = prev.filter((e) => !isEventDeletedRef.current(e))
if (next.length === prev.length) return prev
const cached = timelineCache.get(cacheKey)
const cached = memoryTimelineByKey.get(cacheKey)
if (cached) {
timelineCache.set(cacheKey, { events: next, lastUpdated: cached.lastUpdated })
memoryTimelineByKey.set(cacheKey, { events: next, lastUpdated: cached.lastUpdated })
}
return next
})
@ -131,129 +121,117 @@ export function useProfileTimeline({ @@ -131,129 +121,117 @@ export function useProfileTimeline({
useEffect(() => {
let cancelled = false
const closers: (() => void)[] = []
const pool = new Map<string, Event>()
const subscribe = async () => {
// Check if we have fresh cached data
const cachedEntry = timelineCache.get(cacheKey)
const cacheAge = cachedEntry ? Date.now() - cachedEntry.lastUpdated : Infinity
const isCacheFresh = cacheAge < CACHE_DURATION
// If cache is fresh, show it immediately and skip subscribing
if (isCacheFresh && cachedEntry) {
setEvents(cachedEntry.events)
const flushPool = () => {
if (cancelled) return
const processed = postProcessEvents(
Array.from(pool.values()),
filterPredicateRef.current,
limitRef.current,
isEventDeletedRef.current
)
memoryTimelineByKey.set(cacheKey, { events: processed, lastUpdated: Date.now() })
setEvents(processed)
setIsLoading(false)
// Still subscribe in background to get updates, but don't show loading
// This ensures we get new events without disrupting the UI
} else {
// Cache is stale or missing - show loading and fetch
setIsLoading(!cachedEntry)
}
try {
const relayGroups = await getRelayGroups(pubkey)
subscriptionRef.current = () => {
closers.forEach((c) => c())
closers.length = 0
}
const registerCloser = (closer: () => void) => {
if (cancelled) {
closer()
return
}
closers.push(closer)
}
const hasCalendarKinds = kinds.some((k) => CALENDAR_EVENT_KINDS.includes(k))
const authorRequests = relayGroups
.map((urls) => ({
urls,
filter: {
authors: [pubkey],
kinds,
limit
} as any
}))
.filter((request) => request.urls.length)
// When profile includes calendar event kinds, also subscribe to events where this user is an invitee (#p tag)
const calendarInviteRequests = hasCalendarKinds
? relayGroups
.map((urls) => ({
urls,
filter: {
kinds: [ExtendedKind.CALENDAR_EVENT_DATE, ExtendedKind.CALENDAR_EVENT_TIME],
'#p': [pubkey],
limit: 100
} as any
}))
.filter((request) => request.urls.length)
: []
const subRequests = [...authorRequests, ...calendarInviteRequests]
const subscribe = async () => {
const mem = memoryTimelineByKey.get(cacheKey)
const cacheAge = mem ? Date.now() - mem.lastUpdated : Infinity
const isCacheFresh = cacheAge < CACHE_DURATION
if (!subRequests.length) {
timelineCache.set(cacheKey, {
events: [],
lastUpdated: Date.now()
})
setEvents([])
pool.clear()
if (isCacheFresh && mem) {
setEvents(mem.events)
setIsLoading(false)
return
mem.events.forEach((e) => pool.set(e.id, e))
} else {
setIsLoading(!mem)
}
const hasCalendarKinds = kinds.some((k) => CALENDAR_EVENT_KINDS.includes(k))
const feedRelayUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const startWave = async (subRequests: ReturnType<typeof buildSubRequests>) => {
if (cancelled || subRequests.length === 0) return
try {
const { closer } = await client.subscribeTimeline(
subRequests,
{
onEvents: (fetchedEvents) => {
onEvents: (fetched) => {
if (cancelled) return
const processed = postProcessEvents(
fetchedEvents as Event[],
filterPredicate,
limit,
isEventDeletedRef.current
)
timelineCache.set(cacheKey, {
events: processed,
lastUpdated: Date.now()
})
setEvents(processed)
setIsLoading(false)
for (const e of fetched as Event[]) {
pool.set(e.id, e)
}
flushPool()
},
onNew: (evt) => {
if (cancelled) return
setEvents((prevEvents) => {
const combined = [evt as Event, ...prevEvents]
const processed = postProcessEvents(
combined,
filterPredicate,
limit,
isEventDeletedRef.current
)
timelineCache.set(cacheKey, {
events: processed,
lastUpdated: Date.now()
})
return processed
})
pool.set((evt as Event).id, evt as Event)
flushPool()
}
},
{ needSort: true, useCache: false } // NO CACHING - stream raw from relays
{ needSort: true }
)
subscriptionRef.current = () => closer()
} catch (error) {
if (!cancelled) {
setIsLoading(false)
registerCloser(closer)
} catch {
if (!cancelled) setIsLoading(false)
}
}
if (feedRelayUrls.length === 0) {
if (!cancelled) setIsLoading(false)
return
}
void startWave(buildSubRequests([feedRelayUrls], pubkey, kinds, limit, hasCalendarKinds))
}
subscribe()
void subscribe()
return () => {
cancelled = true
subscriptionRef.current()
subscriptionRef.current = () => {}
}
}, [pubkey, cacheKey, JSON.stringify(kinds), limit, filterPredicate, refreshToken])
}, [
pubkey,
cacheKey,
JSON.stringify(kinds),
limit,
filterPredicate,
refreshToken,
favoriteRelays,
blockedRelays,
relayList
])
const refresh = useCallback(() => {
subscriptionRef.current()
subscriptionRef.current = () => {}
timelineCache.delete(cacheKey)
memoryTimelineByKey.delete(cacheKey)
setIsLoading(true)
setRefreshToken((token) => token + 1)
}, [])
}, [cacheKey])
return {
events,
@ -261,4 +239,3 @@ export function useProfileTimeline({ @@ -261,4 +239,3 @@ export function useProfileTimeline({
refresh
}
}

15
src/lib/event-metadata.ts

@ -569,12 +569,17 @@ export function getEmojisFromEvent(event: Event): TEmoji[] { @@ -569,12 +569,17 @@ export function getEmojisFromEvent(event: Event): TEmoji[] {
export function getStarsFromRelayReviewEvent(event: Event): number {
const ratingTag = event.tags.find((t) => t[0] === 'rating')
if (ratingTag) {
const stars = parseFloat(ratingTag[1]) * 5
if (stars > 0 && stars <= 5) {
return stars
}
if (!ratingTag?.[1]?.trim()) return 0
const raw = parseFloat(ratingTag[1])
if (Number.isNaN(raw) || raw <= 0) return 0
// This app publishes `rating` as stars/5 (e.g. 5★ → "1"); scale back to 1–5.
if (raw <= 1) {
const scaled = raw * 5
if (scaled > 0 && scaled <= 5) return scaled
return 0
}
// Many clients use a plain 1–5 value in the tag.
if (raw >= 1 && raw <= 5) return raw
return 0
}

81
src/lib/favorites-feed-relays.ts

@ -0,0 +1,81 @@ @@ -0,0 +1,81 @@
import { DEFAULT_FAVORITE_RELAYS, FAST_READ_RELAY_URLS } from '@/constants'
import type { TFeedSubRequest } from '@/types'
import { normalizeUrl } from '@/lib/url'
const blockedSet = (blockedRelays: string[]) =>
new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
/**
* Relay URLs for the all favorites home feed only (`FeedProvider` `all-favorites` / that `RelaysFeed` mode).
* Non-blocked user favorites, or {@link DEFAULT_FAVORITE_RELAYS} when none remain.
*/
export function getFavoritesFeedRelayUrls(
favoriteRelays: string[],
blockedRelays: string[]
): string[] {
const blocked = blockedSet(blockedRelays)
const visible = favoriteRelays.filter((r) => {
const k = normalizeUrl(r) || r
return k && !blocked.has(k)
})
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
const seen = new Set<string>()
const out: string[] = []
for (const u of base) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/**
* Merge relay URL lists in order; first occurrence wins; drops blocked.
*/
export function mergeRelayUrlLayers(layers: string[][], blockedRelays: string[]): string[] {
const blocked = blockedSet(blockedRelays)
const seen = new Set<string>()
const out: string[] = []
for (const layer of layers) {
for (const u of layer) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
}
}
return out
}
/**
* Favorites (same set as the favorites feed) plus {@link FAST_READ_RELAY_URLS} and the users NIP-65 **read** / inbox relays.
* Fast-read URLs are merged first so REQ setup hits responsive indexers early (same deduped set).
*/
export function getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
): string[] {
const favorites = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const fast = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
return mergeRelayUrlLayers([fast, favorites, userInboxReadRelays], blockedRelays)
}
/** Prefix each subrequest’s `urls` with the extended read set (favorites + fast read + inboxes). */
export function augmentSubRequestsWithFavoritesFastReadAndInbox(
requests: TFeedSubRequest[],
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
): TFeedSubRequest[] {
const base = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
userInboxReadRelays
)
return requests.map((r) => ({
...r,
urls: mergeRelayUrlLayers([base, r.urls], blockedRelays)
}))
}

34
src/lib/relay-list-builder.ts

@ -15,6 +15,26 @@ import { getCacheRelayUrls } from './private-relays' @@ -15,6 +15,26 @@ import { getCacheRelayUrls } from './private-relays'
import client from '@/services/client.service'
import logger from '@/lib/logger'
function dedupeNormalizedRelayUrls(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const n = normalizeUrl(u) || u
if (!n || seen.has(n)) continue
seen.add(n)
out.push(n)
}
return out
}
/**
* Relays to bootstrap Explore replaceable fetches (e.g. kind 10012 batch) before NIP-65 resolves.
* PROFILE_FETCH + FAST_READ.
*/
export function exploreDiscoveryBootstrapRelayUrls(): string[] {
return dedupeNormalizedRelayUrls([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS])
}
export interface RelayListBuilderOptions {
/** Author's pubkey - will include their outboxes (write relays) */
authorPubkey?: string
@ -231,29 +251,31 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio @@ -231,29 +251,31 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio
}
/**
* Explore: Following's Favorites (kind 10012 batch) and Relay reviews tab.
* PROFILE_FETCH_RELAY_URLS plus the viewer's read/write and cache (10432) relays no FAST_READ.
* Explore: Following's Favorites (kind 10012 batch) / replaceable discovery.
* Bootstrap relays (profile + FAST_READ) plus the viewer's read/write and cache (10432) when logged in.
*/
export async function buildExploreProfileAndUserRelayList(
userPubkey: string | null | undefined
): Promise<string[]> {
const boot = exploreDiscoveryBootstrapRelayUrls()
if (!userPubkey) {
return Array.from(new Set([...PROFILE_FETCH_RELAY_URLS]))
return boot
}
try {
const built = await buildComprehensiveRelayList({
userPubkey,
includeUserOwnRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: false,
includeFastReadRelays: true,
includeFavoriteRelays: false,
includeLocalRelays: true,
includeFastWriteRelays: false,
includeSearchableRelays: false
})
return built.length > 0 ? built : Array.from(new Set([...PROFILE_FETCH_RELAY_URLS]))
if (!built.length) return boot
return dedupeNormalizedRelayUrls([...boot, ...built])
} catch {
return Array.from(new Set([...PROFILE_FETCH_RELAY_URLS]))
return boot
}
}

27
src/lib/spell-feed-request-identity.ts

@ -24,3 +24,30 @@ export function computeSpellSubRequestsIdentityKey(subRequests: TFeedSubRequest[ @@ -24,3 +24,30 @@ export function computeSpellSubRequestsIdentityKey(subRequests: TFeedSubRequest[
}))
)
}
/**
* True when `nextKey` is the same REQ filters as `prevKey` but with a strict superset of relay URLs
* in at least one request slot (e.g. Explore relay reviews: bootstrap relays full list).
*/
export function isRelayUrlStrictSupersetIdentityKey(prevKey: string | null, nextKey: string): boolean {
if (!prevKey || prevKey === nextKey) return false
try {
type Item = { urls: string[]; filter: string }
const prev = JSON.parse(prevKey) as Item[]
const next = JSON.parse(nextKey) as Item[]
if (!Array.isArray(prev) || !Array.isArray(next) || prev.length !== next.length) return false
let sawStrictGrowth = false
for (let i = 0; i < prev.length; i++) {
if (prev[i].filter !== next[i].filter) return false
const ps = new Set(prev[i].urls)
const ns = new Set(next[i].urls)
for (const u of ps) {
if (!ns.has(u)) return false
}
if (ns.size > ps.size) sawStrictGrowth = true
}
return sawStrictGrowth
} catch {
return false
}
}

12
src/lib/tag.ts

@ -17,6 +17,18 @@ export function tagNameEquals(tagName: string) { @@ -17,6 +17,18 @@ export function tagNameEquals(tagName: string) {
return (tag: string[]) => tag[0] === tagName
}
const NOTE_HEX_ID_RE = /^[0-9a-f]{64}$/i
/** First hex event id on an `e` / `E` tag (reactions, reposts, replies). */
export function getFirstHexEventIdFromETags(tags: string[][]): string | undefined {
for (const t of tags) {
if (t[0] !== 'e' && t[0] !== 'E') continue
const id = t[1]
if (id && NOTE_HEX_ID_RE.test(id)) return id
}
return undefined
}
export function generateBech32IdFromETag(tag: string[]) {
try {
const [, id, relay, markerOrPubkey, pubkey] = tag

19
src/pages/primary/NoteListPage/FollowingFeed.tsx

@ -1,6 +1,8 @@ @@ -1,6 +1,8 @@
import NormalFeed from '@/components/NormalFeed'
import type { TNoteListRef } from '@/components/NoteList'
import { augmentSubRequestsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { useFeed } from '@/providers/FeedProvider'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
import client from '@/services/client.service'
import { TFeedSubRequest } from '@/types'
@ -13,7 +15,8 @@ const FollowingFeed = forwardRef< @@ -13,7 +15,8 @@ const FollowingFeed = forwardRef<
setSubHeader?: (node: ReactNode) => void
}
>(function FollowingFeed({ setSubHeader }, ref) {
const { pubkey } = useNostr()
const { pubkey, relayList } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { feedInfo } = useFeed()
const [subRequests, setSubRequests] = useState<TFeedSubRequest[]>([])
@ -25,11 +28,19 @@ const FollowingFeed = forwardRef< @@ -25,11 +28,19 @@ const FollowingFeed = forwardRef<
}
const followings = await client.fetchFollowings(pubkey)
setSubRequests(await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey))
const raw = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
setSubRequests(
augmentSubRequestsWithFavoritesFastReadAndInbox(
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
)
}
init()
}, [feedInfo.feedType, pubkey])
void init()
}, [feedInfo.feedType, pubkey, favoriteRelays, blockedRelays, relayList])
return <NormalFeed ref={ref} subRequests={subRequests} isMainFeed setSubHeader={setSubHeader} />
})

10
src/pages/primary/SpellsPage/CreateSpellDialog.tsx

@ -18,6 +18,7 @@ import { @@ -18,6 +18,7 @@ import {
dedupeAppendIds,
resolveSpellListATags
} from '@/lib/spell-list-import'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
import { showPublishingError, showSimplePublishSuccess } from '@/lib/publishing-feedback'
import { eventService } from '@/services/client.service'
@ -289,6 +290,7 @@ export default function CreateSpellDialog({ @@ -289,6 +290,7 @@ export default function CreateSpellDialog({
}) {
const { t } = useTranslation()
const { pubkey, publish, checkLogin, relayList } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const [form, setForm] = useState<TSpellDraftParams>(DEFAULT_PARAMS)
const [saving, setSaving] = useState(false)
const scrollBodyRef = useRef<HTMLDivElement>(null)
@ -319,7 +321,11 @@ export default function CreateSpellDialog({ @@ -319,7 +321,11 @@ export default function CreateSpellDialog({
const { draft, notices, pendingATags } = applyListEventToSpellDraft(base, ev)
setForm(draft)
setListImportNotices(notices)
const urls = getRelaysForSpellCatalogSync(relayList ?? undefined)
const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
if (pendingATags.length === 0) return
void resolveSpellListATags(pendingATags, urls).then(({ ids, notices: extra }) => {
if (ids.length) {
@ -328,7 +334,7 @@ export default function CreateSpellDialog({ @@ -328,7 +334,7 @@ export default function CreateSpellDialog({
if (extra.length) setListImportNotices((n) => [...n, ...extra])
})
},
[relayList]
[favoriteRelays, blockedRelays, relayList]
)
const handleLoadManualList = useCallback(async () => {

176
src/pages/primary/SpellsPage/fauxSpellFeeds.ts

@ -1,14 +1,7 @@ @@ -1,14 +1,7 @@
/**
* Built-in faux spells use the same NoteList path as kind-777 REQ spells.
*/
import {
DEFAULT_FAVORITE_RELAYS,
ExtendedKind,
FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS,
PROFILE_FEED_KINDS,
READ_ONLY_RELAY_URLS
} from '@/constants'
import { ExtendedKind, PROFILE_FEED_KINDS, READ_ONLY_RELAY_URLS } from '@/constants'
import {
extractHashtagsFromContent,
extractTTagsFromEvent,
@ -16,36 +9,13 @@ import { @@ -16,36 +9,13 @@ import {
} from '@/lib/discussion-topics'
import { getImetaInfosFromEvent } from '@/lib/event'
import { normalizeUrl } from '@/lib/url'
import type { TFeedSubRequest, TRelayList } from '@/types'
import type { TFeedSubRequest } from '@/types'
import { type Event, type Filter, kinds } from 'nostr-tools'
const NOTIFICATION_LIMIT = 500
const DISCUSSION_LIMIT = 500
const MAX_BOOKMARK_IDS = 250
/**
* Spells Discussions uses NoteList subscribeTimeline one live REQ per relay.
* An uncapped merged relay list would open 80+ sockets and exhaust subscription slots;
* cap keeps first paint fast.
*/
const DISCUSSION_FAUX_SPELL_MAX_RELAYS = 10
/** Without caps, a long NIP-66 read list consumes the whole 32 slots and fast public relays never get a REQ — discussions stay empty while notifications still work (they blend fast reads). */
const DISCUSSION_SPELL_READ_CAP = 10
const DISCUSSION_SPELL_WRITE_CAP = 8
const DISCUSSION_SPELL_FAV_CAP = 8
function dedupe(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || seen.has(k)) continue
seen.add(k)
out.push(k)
}
return out
}
/**
* Append {@link READ_ONLY_RELAY_URLS} (e.g. aggr) after the curated set so every faux REQ includes them unless blocked.
*/
@ -167,94 +137,6 @@ export function mediaSpellExtraShouldHideEvent(evt: Event): boolean { @@ -167,94 +137,6 @@ export function mediaSpellExtraShouldHideEvent(evt: Event): boolean {
return !isKind1MediaSpellEligible(evt)
}
/** Relays for “global” faux feeds (media, calendar): visible favorites or defaults. */
export function fauxFavoriteRelayUrls(favoriteRelays: string[], blockedRelays: string[]): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const visible = favoriteRelays.filter((r) => {
const k = normalizeUrl(r) || r
return k && !blocked.has(k)
})
const base = visible.length > 0 ? visible : DEFAULT_FAVORITE_RELAYS
const curated = dedupe(base.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[])
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
/**
* Notifications / bookmarks faux spells: **fast public relays first**, then inbox/favorites.
* `FAST_READ_RELAY_URLS` has 7 entries; the old cap of 6 never subscribed to `wss://aggr.nostr.land`
* (last in the list) a major `#p` indexer so mentions could take tens of seconds or look empty.
* Fast-write relays catch mentions replicated to outboxes (damus/primal/nos.lol) with little overlap.
*/
const NOTIFICATION_PRIMARY_MAX = 4
/** Must be ≥ FAST_READ length so every default fast read relay is eligible (currently 7). */
const NOTIFICATION_FAST_READ_MAX = 10
const NOTIFICATION_FAST_WRITE_MAX = 4
const NOTIFICATION_RELAY_CAP = 14
function relayUrlsUpToUnblocked(urls: string[], blocked: Set<string>, max: number): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
if (out.length >= max) break
}
return out
}
function mergeRelayListsUnique(
lists: string[][],
blocked: Set<string>,
cap: number
): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const list of lists) {
for (const u of list) {
const k = normalizeUrl(u) || u
if (!k || blocked.has(k) || seen.has(k)) continue
seen.add(k)
out.push(k)
if (out.length >= cap) return out
}
}
return out
}
export function notificationRelayUrls(
relayList: TRelayList | null | undefined,
favoriteRelays: string[],
blockedRelays: string[] = []
): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const read = relayList?.read ?? []
const readSorted = [...read].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
const favSorted = [...favoriteRelays]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort((a, b) => a.localeCompare(b))
const primary =
read.length > 0
? relayUrlsUpToUnblocked(readSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: favoriteRelays.length > 0
? relayUrlsUpToUnblocked(favSorted, blocked, NOTIFICATION_PRIMARY_MAX)
: []
const fromFastRead = relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_FAST_READ_MAX)
const fromFastWrite = relayUrlsUpToUnblocked(FAST_WRITE_RELAY_URLS, blocked, NOTIFICATION_FAST_WRITE_MAX)
const merged = mergeRelayListsUnique(
[fromFastRead, fromFastWrite, primary],
blocked,
NOTIFICATION_RELAY_CAP
)
if (merged.length > 0) return appendCuratedReadOnlyRelays(merged, blockedRelays)
return appendCuratedReadOnlyRelays(
relayUrlsUpToUnblocked(FAST_READ_RELAY_URLS, blocked, NOTIFICATION_RELAY_CAP),
blockedRelays
)
}
/** Notifications spell: same kind set as profile-style feeds, restricted to `#p` = you on the relay. */
export function buildMentionsSpellFilter(pubkey: string): Filter {
return {
@ -264,45 +146,6 @@ export function buildMentionsSpellFilter(pubkey: string): Filter { @@ -264,45 +146,6 @@ export function buildMentionsSpellFilter(pubkey: string): Filter {
}
}
/**
* Relay set for Spells Discussions (kind 11), capped for subscription-based loading
* (see DISCUSSION_FAUX_SPELL_MAX_RELAYS).
*/
/**
* Deterministic relay pick: each tier (read / write / fav / fast) is normalized + sorted so NostrProvider
* array order and NIP-66 ref churn do not change which 32 relays we REQ (prevents subscription identity thrash).
*/
export function discussionRelayUrls(
relayList: TRelayList | null | undefined,
favoriteRelays: string[],
blockedRelays: string[]
): string[] {
const blocked = new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
const tier = (urls: string[]) =>
[...new Set(urls.map((u) => normalizeUrl(u) || u).filter(Boolean))]
.filter((k) => !blocked.has(k))
.sort((a, b) => a.localeCompare(b))
const read = tier(relayList?.read ?? [])
const write = tier(relayList?.write ?? [])
const fav = tier(favoriteRelays)
const fastR = tier([...FAST_READ_RELAY_URLS])
const fastW = tier([...FAST_WRITE_RELAY_URLS])
const curated = mergeRelayListsUnique(
[
read.slice(0, DISCUSSION_SPELL_READ_CAP),
write.slice(0, DISCUSSION_SPELL_WRITE_CAP),
fav.slice(0, DISCUSSION_SPELL_FAV_CAP),
fastR,
fastW
],
blocked,
DISCUSSION_FAUX_SPELL_MAX_RELAYS
)
return appendCuratedReadOnlyRelays(curated, blockedRelays)
}
export function buildDiscussionFilter(): Filter {
return {
kinds: [ExtendedKind.DISCUSSION],
@ -321,21 +164,6 @@ export function buildCalendarSpellFilter(): Filter { @@ -321,21 +164,6 @@ export function buildCalendarSpellFilter(): Filter {
}
}
const FOLLOW_PACK_LIMIT = 100
/** Kind 39089 follow/starter packs from fast read relays (same scope as the old Follow Packs page). */
export function buildFollowPacksSubRequests(): TFeedSubRequest[] {
const curated = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
if (!curated.length) return []
const urls = appendCuratedReadOnlyRelays(curated, [])
return [
{
urls,
filter: { kinds: [ExtendedKind.FOLLOW_PACK], limit: FOLLOW_PACK_LIMIT }
}
]
}
/** One subrequest per topic (OR). Uses same kind set as the main profile/favorites feed. */
export function buildInterestsSubRequests(
relayUrls: string[],

129
src/pages/primary/SpellsPage/index.tsx

@ -38,10 +38,14 @@ import { @@ -38,10 +38,14 @@ import {
FAUX_SPELL_ORDER,
FIRST_RELAY_RESULT_GRACE_MS,
PROFILE_FEED_KINDS,
SPELL_FEED_FIRST_RELAY_GRACE_MS
SPELL_FEED_LOADING_MAX_MS
} from '@/constants'
import { isUserInEventMentions } from '@/lib/event'
import { formatPubkey } from '@/lib/pubkey'
import {
augmentSubRequestsWithFavoritesFastReadAndInbox,
getRelayUrlsWithFavoritesFastReadAndInbox
} from '@/lib/favorites-feed-relays'
import { computeSpellSubRequestsIdentityKey } from '@/lib/spell-feed-request-identity'
import { normalizeUrl } from '@/lib/url'
import {
@ -86,15 +90,11 @@ import { @@ -86,15 +90,11 @@ import {
buildBookmarksSubRequests,
buildCalendarSpellFilter,
buildDiscussionFilter,
buildFollowPacksSubRequests,
buildInterestsSubRequests,
buildMediaSpellFilter,
buildMentionsSpellFilter,
discussionRelayUrls,
fauxFavoriteRelayUrls,
MEDIA_SPELL_SHOW_KINDS,
mediaSpellExtraShouldHideEvent,
notificationRelayUrls
mediaSpellExtraShouldHideEvent
} from './fauxSpellFeeds'
import type { TPageRef } from '@/types'
@ -370,6 +370,22 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -370,6 +370,22 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return JSON.stringify(normalizedWriteSorted)
}, [relayMailboxStableKey])
/** Order-independent favorites/blocked — array order from providers must not rebuild subs. */
const sortedFavoriteRelaysKey = useMemo(
() =>
JSON.stringify(
[...favoriteRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
),
[favoriteRelays]
)
const sortedBlockedRelaysKey = useMemo(
() =>
JSON.stringify(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
),
[blockedRelays]
)
useEffect(() => {
loadSpells()
}, [loadSpells])
@ -378,8 +394,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -378,8 +394,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const contactsSyncKey = useMemo(() => [...contacts].sort().join(','), [contacts])
/**
* After showing the cache, pull kind 777 from merged mailbox (10002 + 10432) read/write + fast read.
* Deps use `relayMailboxStableKey` only not NIP-66 `originalRelays` so discovery merges dont restart this sub.
* After showing the cache, pull kind 777 using the same relay set as the favorites feed.
*/
useEffect(() => {
if (!pubkey) {
@ -396,7 +411,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -396,7 +411,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
if (!cancelled) void loadSpells()
}, 120)
}
const urls = getRelaysForSpellCatalogSync(relayList ?? undefined)
const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const catalogAuthors = buildSpellCatalogAuthors(pubkey, contacts)
const authorAllowlist = new Set(catalogAuthors)
const filter = {
@ -421,6 +440,10 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -421,6 +440,10 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
}
}
/** Defer catalog REQ so faux/kind-777 feed opens sockets and paints first. */
const catalogDelayMs = 800
const delayId = window.setTimeout(() => {
if (cancelled) return
void (async () => {
try {
setSpellsCatalogSyncing(true)
@ -477,8 +500,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -477,8 +500,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
onNew: () => {} // Not needed
},
{
useCache: true,
omitDefaultSinceWhenUseCache: true,
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
}
)
@ -493,9 +514,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -493,9 +514,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
if (!cancelled) setSpellsCatalogSyncing(false)
}
})()
}, catalogDelayMs)
return () => {
cancelled = true
window.clearTimeout(delayId)
clearAfterFirstBatchTimer()
if (loadSpellsDebounce != null) clearTimeout(loadSpellsDebounce)
window.clearTimeout(syncTimeout)
@ -503,7 +526,15 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -503,7 +526,15 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
spellCatalogCloserRef.current = null
setSpellsCatalogSyncing(false)
}
}, [pubkey, relayMailboxStableKey, loadSpells, contactsSyncKey, spellCatalogManualRefreshKey])
}, [
pubkey,
sortedFavoriteRelaysKey,
sortedBlockedRelaysKey,
relayMailboxStableKey,
loadSpells,
contactsSyncKey,
spellCatalogManualRefreshKey
])
useEffect(() => {
if (!pubkey) {
@ -513,14 +544,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -513,14 +544,6 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
client.fetchFollowings(pubkey).then(setContacts).catch(() => setContacts([]))
}, [pubkey])
/** Order-independent favorites/blocked — array order from providers must not rebuild faux subs. */
const sortedFavoriteRelaysKey = JSON.stringify(
[...favoriteRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
)
const sortedBlockedRelaysKey = JSON.stringify(
[...blockedRelays].map((u) => normalizeUrl(u) || u).filter(Boolean).sort((a, b) => a.localeCompare(b))
)
useEffect(() => {
if (selectedFauxSpell !== 'following' || !pubkey) {
setFollowingSubRequests([])
@ -533,7 +556,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -533,7 +556,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
try {
const followings = await client.fetchFollowings(pubkey)
const req = await client.generateSubRequestsForPubkeys([pubkey, ...followings], pubkey)
const withReadOnly = req.map((r) => ({
const merged = augmentSubRequestsWithFavoritesFastReadAndInbox(
req,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const withReadOnly = merged.map((r) => ({
...r,
urls: appendCuratedReadOnlyRelays(r.urls, blockedRelays)
}))
@ -547,7 +576,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -547,7 +576,13 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
return () => {
cancelled = true
}
}, [selectedFauxSpell, pubkey, sortedBlockedRelaysKey])
}, [
selectedFauxSpell,
pubkey,
sortedFavoriteRelaysKey,
sortedBlockedRelaysKey,
relayMailboxStableKey
])
const interestTagsStableKey = interestListEvent
? JSON.stringify(
@ -574,45 +609,49 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -574,45 +609,49 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const syncFauxSubRequests = useMemo<TFeedSubRequest[]>(() => {
if (!selectedFauxSpell || selectedFauxSpell === 'following') return []
const feedUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
if (selectedFauxSpell === 'notifications') {
if (!pubkey) return []
const urls = notificationRelayUrls(relayList, favoriteRelays, blockedRelays)
if (!urls.length) return []
return [{ urls, filter: buildMentionsSpellFilter(pubkey) }]
if (!pubkey || !feedUrls.length) return []
return [{ urls: feedUrls, filter: buildMentionsSpellFilter(pubkey) }]
}
if (selectedFauxSpell === 'discussions') {
const urls = discussionRelayUrls(relayList, favoriteRelays, blockedRelays)
if (!urls.length) return []
return [{ urls, filter: buildDiscussionFilter() }]
if (!feedUrls.length) return []
return [{ urls: feedUrls, filter: buildDiscussionFilter() }]
}
if (selectedFauxSpell === 'media') {
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays)
if (!urls.length) return []
return [{ urls, filter: buildMediaSpellFilter() }]
if (!feedUrls.length) return []
return [{ urls: feedUrls, filter: buildMediaSpellFilter() }]
}
if (selectedFauxSpell === 'calendar') {
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays)
if (!urls.length) return []
return [{ urls, filter: buildCalendarSpellFilter() }]
if (!feedUrls.length) return []
return [{ urls: feedUrls, filter: buildCalendarSpellFilter() }]
}
if (selectedFauxSpell === 'interests') {
if (!pubkey || !interestListEvent) return []
const topics = interestListEvent.tags.filter((tag) => tag[0] === 't' && tag[1]).map((tag) => tag[1]!)
const urls = fauxFavoriteRelayUrls(favoriteRelays, blockedRelays)
return buildInterestsSubRequests(urls, topics, PROFILE_FEED_KINDS)
return buildInterestsSubRequests(feedUrls, topics, PROFILE_FEED_KINDS)
}
if (selectedFauxSpell === 'bookmarks') {
if (!pubkey) return []
const urls = notificationRelayUrls(relayList, favoriteRelays, blockedRelays)
return buildBookmarksSubRequests(bookmarkListEvent, urls)
return buildBookmarksSubRequests(bookmarkListEvent, feedUrls)
}
if (selectedFauxSpell === 'followPacks') {
return buildFollowPacksSubRequests()
const urls = appendCuratedReadOnlyRelays(feedUrls, blockedRelays)
if (!urls.length) return []
return [
{
urls,
filter: { kinds: [ExtendedKind.FOLLOW_PACK], limit: 100 }
}
]
}
return []
// relayMailboxStableKey: read/write only — do not tie faux feeds to originalRelays (NIP-66 churn).
}, [selectedFauxSpell, pubkey, relayMailboxStableKey, fauxFeedRelaysDepsKey])
}, [selectedFauxSpell, pubkey, fauxFeedRelaysDepsKey, relayMailboxStableKey])
const fauxSubRequests = useMemo<TFeedSubRequest[]>(() => {
if (selectedFauxSpell === 'following') return followingSubRequests
@ -1229,8 +1268,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1229,8 +1268,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds}
useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFetchTimeoutMs={SPELL_FEED_LOADING_MAX_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs={fauxNoteListUseFilterAsIs}
@ -1258,8 +1296,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1258,8 +1296,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
subRequests={subRequests}
feedSubscriptionKey={spellFeedSubscriptionKey}
showKinds={showKinds}
useTimelineCacheBootstrap
spellFetchTimeoutMs={SPELL_FEED_FIRST_RELAY_GRACE_MS}
spellFetchTimeoutMs={SPELL_FEED_LOADING_MAX_MS}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
useFilterAsIs

59
src/pages/secondary/NoteListPage/index.tsx

@ -3,12 +3,18 @@ import type { TNoteListRef } from '@/components/NoteList' @@ -3,12 +3,18 @@ import type { TNoteListRef } from '@/components/NoteList'
import NormalFeed from '@/components/NormalFeed'
import { RefreshButton } from '@/components/RefreshButton'
import { Button } from '@/components/ui/button'
import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants'
import { SEARCHABLE_RELAY_URLS } from '@/constants'
import {
augmentSubRequestsWithFavoritesFastReadAndInbox,
getRelayUrlsWithFavoritesFastReadAndInbox,
mergeRelayUrlLayers
} from '@/lib/favorites-feed-relays'
import { normalizeUrl } from '@/lib/url'
import SecondaryPageLayout from '@/layouts/SecondaryPageLayout'
import { toProfileList } from '@/lib/link'
import { fetchPubkeysFromDomain, getWellKnownNip05Url } from '@/lib/nip05'
import { usePrimaryNoteView, useSecondaryPage } from '@/PageManager'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
import { useInterestList } from '@/providers/InterestListProvider'
import client from '@/services/client.service'
@ -29,6 +35,7 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -29,6 +35,7 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
const bumpFeed = useCallback(() => feedRef.current?.refresh(), [])
const { push } = useSecondaryPage()
const { relayList, pubkey } = useNostr()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const { isSubscribed, subscribe } = useInterestList()
const [title, setTitle] = useState<React.ReactNode>(null)
const [controls, setControls] = useState<React.ReactNode>(null)
@ -84,7 +91,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -84,7 +91,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([
{
filter: { '#t': [hashtag], ...(kinds.length > 0 ? { kinds } : {}) },
urls: FAST_READ_RELAY_URLS
urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
}
])
// Set controls for hashtag subscribe button - check subscription status
@ -122,10 +133,17 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -122,10 +133,17 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([
{
filter: { '#I': [externalContentId], ...(kinds.length > 0 ? { kinds } : {}) },
urls: Array.from(new Set([
...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url),
...(relayList?.write || []).map(url => normalizeUrl(url) || url)
]))
urls: mergeRelayUrlLayers(
[
getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
),
(relayList?.write || []).map((url) => normalizeUrl(url) || url).filter(Boolean) as string[]
],
blockedRelays
)
}
])
return
@ -149,7 +167,15 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -149,7 +167,15 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
domain
})
if (pubkeys.length) {
setSubRequests(await client.generateSubRequestsForPubkeys(pubkeys, pubkey))
const raw = await client.generateSubRequestsForPubkeys(pubkeys, pubkey)
setSubRequests(
augmentSubRequestsWithFavoritesFastReadAndInbox(
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
)
setControls(
<Button
variant="ghost"
@ -181,7 +207,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -181,7 +207,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([
{
filter,
urls: FAST_READ_RELAY_URLS
urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
}
])
}
@ -191,7 +221,18 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -191,7 +221,18 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
// Advanced search parameters removed
// Note: Only hashtag (t=) and kind (k=) URL parameters are supported
// Date searches, pubkey filters, and event filters removed - not supported
}, [pubkey, relayList, handleSubscribeHashtag, push, t, isSubscribed, subscribe, client])
}, [
pubkey,
relayList,
favoriteRelays,
blockedRelays,
handleSubscribeHashtag,
push,
t,
isSubscribed,
subscribe,
client
])
// Initialize on mount
useEffect(() => {

22
src/providers/FeedProvider.tsx

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
import { DEFAULT_FAVORITE_RELAYS } from '@/constants'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { getRelaySetFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger'
import { isWebsocketUrl, normalizeUrl } from '@/lib/url'
@ -113,13 +114,8 @@ export function FeedProvider({ children }: { children: React.ReactNode }) { @@ -113,13 +114,8 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
return
}
if (feedType === 'all-favorites') {
// Filter out blocked relays
const visibleRelays = favoriteRelays.filter(relay => !blockedRelays.includes(relay))
// If no visible relays, fall back to default favorite relays
const finalRelays = visibleRelays.length > 0 ? visibleRelays : DEFAULT_FAVORITE_RELAYS
logger.debug('Switching to all-favorites, favoriteRelays:', visibleRelays, 'finalRelays:', finalRelays)
const finalRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
logger.debug('Switching to all-favorites, finalRelays:', finalRelays)
const newFeedInfo = { feedType }
setFeedInfo(newFeedInfo)
feedInfoRef.current = newFeedInfo
@ -163,18 +159,17 @@ export function FeedProvider({ children }: { children: React.ReactNode }) { @@ -163,18 +159,17 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
logger.debug('FeedProvider: favoriteRelays is empty, using defaults')
}
// Get first visible (non-blocked) favorite relay as default
const visibleRelays = favoriteRelays.filter(relay => !blockedRelays.includes(relay))
const favoritesFeedRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
let feedInfo: TFeedInfo = {
feedType: 'relay',
id: visibleRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
id: favoritesFeedRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
}
// Ensure we always have a valid relay ID
if (!feedInfo.id) {
feedInfo.id = DEFAULT_FAVORITE_RELAYS[0]
}
logger.debug('Initial feedInfo setup:', { visibleRelays, favoriteRelays, blockedRelays, feedInfo })
logger.debug('Initial feedInfo setup:', { favoritesFeedRelays, favoriteRelays, blockedRelays, feedInfo })
if (pubkey) {
const storedFeedInfo = storage.getFeedInfo(pubkey)
@ -192,7 +187,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) { @@ -192,7 +187,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
// Check if the stored relay is blocked, if so use first visible relay instead
if (feedInfo.id && blockedRelays.includes(feedInfo.id)) {
logger.component('FeedProvider', 'Stored relay is blocked, using first visible relay instead')
feedInfo.id = visibleRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
feedInfo.id = favoritesFeedRelays[0] ?? DEFAULT_FAVORITE_RELAYS[0]
}
logger.component('FeedProvider', 'Initial relay setup, calling switchFeed', { relayId: feedInfo.id })
return await switchFeed('relay', { relay: feedInfo.id })
@ -219,8 +214,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) { @@ -219,8 +214,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) {
// Update relay URLs when favoriteRelays change and we're in all-favorites mode
useEffect(() => {
if (feedInfo.feedType !== 'all-favorites') return
const visibleRelays = favoriteRelays.filter((relay) => !blockedRelays.includes(relay))
const finalRelays = visibleRelays.length > 0 ? visibleRelays : DEFAULT_FAVORITE_RELAYS
const finalRelays = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
logger.debug('Updating relay URLs for all-favorites:', finalRelays)
setRelayUrls(finalRelays)
}, [feedInfo.feedType, favoriteRelays, blockedRelays])

140
src/services/client-replaceable-events.service.ts

@ -436,8 +436,8 @@ export class ReplaceableEventService { @@ -436,8 +436,8 @@ export class ReplaceableEventService {
// For metadata with a logged-in user, merge defaults with {@link buildComprehensiveRelayList}: inboxes (read),
// local/cache relays (10432), favorite relays (10012), plus profile + fast read — same idea as favorites feed
// / inbox-scoped discovery without per-author relay list fetches.
// Following's Favorites (Explore): kind 10012 batch uses PROFILE_FETCH_RELAY_URLS + viewer's own relays only
// (no FAST_READ), so outbox data is queried where the user actually reads + profile-index relays.
// Following's Favorites (Explore): kind 10012 batch uses {@link buildExploreProfileAndUserRelayList}
// (profile + FAST_READ + viewer read/write/local when logged in).
let relayUrls: string[]
if (kind === kinds.Metadata) {
const userPk = client.pubkey
@ -737,7 +737,7 @@ export class ReplaceableEventService { @@ -737,7 +737,7 @@ export class ReplaceableEventService {
const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, [])
if (profileEvent) {
logger.debug('[ReplaceableEventService] Profile found with relay hints + default relays', {
logger.debug('[ReplaceableEventService] Profile found via cache / default relays (DataLoader)', {
pubkey,
eventId: profileEvent.id
})
@ -745,35 +745,22 @@ export class ReplaceableEventService { @@ -745,35 +745,22 @@ export class ReplaceableEventService {
return profileEvent
}
// Step 2: Only fetch author's relay list as fallback if we have relay hints from bech32
// This prevents creating many individual subscriptions when profiles aren't found
// If we have relay hints, it's worth trying author relays. Otherwise, Step 1 should be sufficient.
if (relayHints.length > 0) {
logger.debug('[ReplaceableEventService] Step 2: Profile not found, but we have relay hints - fetching author relay list as fallback', {
// Step 2: Only after cache + default relays miss — NIP-65 relay list (timeout-capped), then hints + outbox/inbox + defaults.
logger.debug('[ReplaceableEventService] Step 2: Fetching author relay list as fallback', {
pubkey,
relayHintCount: relayHints.length
})
let authorRelayList: { read?: string[]; write?: string[] } | null = null
try {
const relayListStartTime = Date.now()
// Add timeout to prevent hanging - 2 seconds max
const relayListPromise = client.fetchRelayList(pubkey)
const timeoutPromise = new Promise<null>((resolve) => {
setTimeout(() => {
logger.warn('[ReplaceableEventService] fetchRelayList timeout, giving up', {
pubkey
})
logger.warn('[ReplaceableEventService] fetchRelayList timeout, giving up', { pubkey })
resolve(null)
}, 2000)
})
authorRelayList = await Promise.race([relayListPromise, timeoutPromise])
const relayListTime = Date.now() - relayListStartTime
logger.debug('[ReplaceableEventService] Author relay list fetched', {
pubkey,
hasRelayList: !!authorRelayList,
fetchTime: `${relayListTime}ms`
})
} catch (error) {
logger.error('[ReplaceableEventService] Failed to fetch author relay list', {
pubkey,
@ -781,96 +768,75 @@ export class ReplaceableEventService { @@ -781,96 +768,75 @@ export class ReplaceableEventService {
})
}
// Step 3: Try with relay hints + author's relays if we got them
// CRITICAL: Always include relay hints first (highest priority), then author relays, then defaults
if (authorRelayList) {
const authorRelays = [
const authorRelays = authorRelayList
? [
...(authorRelayList.write || []).slice(0, 10),
...(authorRelayList.read || []).slice(0, 10)
]
// Relay hints first (highest priority), then author relays, then defaults
const allRelays = [...new Set([
...relayHints, // Relay hints from bech32 (highest priority)
...authorRelays, // Author's relays
...PROFILE_FETCH_RELAY_URLS, // Default profile relays
...FAST_READ_RELAY_URLS // Fast read relays
])]
logger.debug('[ReplaceableEventService] Step 3: Trying with relay hints + author relays', {
pubkey,
relayHintCount: relayHints.length,
authorRelayCount: authorRelays.length,
totalRelayCount: allRelays.length
})
: []
const expandedRelays = [
...new Set([
...relayHints,
...authorRelays,
...PROFILE_FETCH_RELAY_URLS,
...FAST_READ_RELAY_URLS
])
]
// Use fetchReplaceableEvent with relay hints + author's relays
const profileEventFromAuthorRelays = await this.fetchReplaceableEvent(
const profileFromExpanded = await this.fetchReplaceableEvent(
pubkey,
kinds.Metadata,
undefined,
allRelays
expandedRelays
)
if (profileEventFromAuthorRelays) {
logger.debug('[ReplaceableEventService] Profile found with relay hints + author relays', {
if (profileFromExpanded) {
logger.debug('[ReplaceableEventService] Profile found after relay-list fallback', {
pubkey,
eventId: profileEventFromAuthorRelays.id
})
await this.indexProfile(profileEventFromAuthorRelays)
return profileEventFromAuthorRelays
}
}
} else {
// No relay hints - Step 1 with default relays should be sufficient
// Skip Step 2/3 to avoid creating individual subscriptions
logger.debug('[ReplaceableEventService] Profile not found, but no relay hints - skipping author relay fallback to avoid individual subscriptions', {
pubkey
eventId: profileFromExpanded.id
})
await this.indexProfile(profileFromExpanded)
return profileFromExpanded
}
// Step 3: Comprehensive search across ALL available relays before giving up
// OPTIMIZATION: Skip comprehensive search for batch profile fetches (when called from DataLoader)
// Comprehensive search is expensive (10s timeout) and should only be used for individual profile fetches
// when user explicitly navigates to a profile page. For feed rendering, missing profiles are acceptable.
// Only run comprehensive search if we have relay hints (suggesting user intent to find this specific profile)
if (relayHints.length > 0) {
logger.debug('[ReplaceableEventService] Step 3: Profile not found, trying comprehensive relay list (all available relays)', {
pubkey,
hasRelayHints: relayHints.length > 0
})
// Step 3: Last resort — broad relay query (timeout-bounded in query layer)
logger.debug('[ReplaceableEventService] Step 3: Comprehensive relay query (last resort)', { pubkey })
try {
const userPubkey = client.pubkey
const comprehensiveRelays = await buildComprehensiveRelayList({
authorPubkey: pubkey,
userPubkey: userPubkey || undefined,
relayHints: relayHints.length > 0 ? relayHints : undefined,
includeUserOwnRelays: true, // Include user's read/write relays
includeFavoriteRelays: true, // Include user's favorite relays (kind 10012)
includeProfileFetchRelays: true, // Include PROFILE_FETCH_RELAY_URLS
includeFastReadRelays: true, // Include FAST_READ_RELAY_URLS
includeFastWriteRelays: true, // Include FAST_WRITE_RELAY_URLS
includeSearchableRelays: true, // Include SEARCHABLE_RELAY_URLS
includeLocalRelays: true // Include local/cache relays
includeUserOwnRelays: true,
includeFavoriteRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: true,
includeFastWriteRelays: true,
includeSearchableRelays: true,
includeLocalRelays: true
})
logger.debug('[ReplaceableEventService] Comprehensive relay list built', {
pubkey,
relayCount: comprehensiveRelays.length,
relays: comprehensiveRelays.slice(0, 10) // Log first 10 for debugging
relays: comprehensiveRelays.slice(0, 10)
})
if (comprehensiveRelays.length > 0) {
// Query the comprehensive relay list with reduced timeout for faster failure
const startTime = Date.now()
const events = await this.queryService.query(comprehensiveRelays, {
const events = await this.queryService.query(
comprehensiveRelays,
{
authors: [pubkey],
kinds: [kinds.Metadata]
}, undefined, {
},
undefined,
{
replaceableRace: true,
eoseTimeout: 300, // Reduced from 500ms
globalTimeout: 5000 // Reduced from 10000ms to prevent 10s waits
})
eoseTimeout: 300,
globalTimeout: 5000
}
)
const queryTime = Date.now() - startTime
logger.debug('[ReplaceableEventService] Comprehensive search completed', {
@ -882,13 +848,13 @@ export class ReplaceableEventService { @@ -882,13 +848,13 @@ export class ReplaceableEventService {
if (events.length > 0) {
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
const profileEvent = sortedEvents[0]
const found = sortedEvents[0]!
logger.debug('[ReplaceableEventService] Profile found via comprehensive search', {
pubkey,
eventId: profileEvent.id
eventId: found.id
})
await this.indexProfile(profileEvent)
return profileEvent
await this.indexProfile(found)
return found
}
}
} catch (error) {
@ -896,15 +862,9 @@ export class ReplaceableEventService { @@ -896,15 +862,9 @@ export class ReplaceableEventService {
pubkey,
error: error instanceof Error ? error.message : String(error)
})
// Continue to return undefined below
}
} else {
logger.debug('[ReplaceableEventService] Skipping comprehensive search (no relay hints, likely batch fetch)', {
pubkey
})
}
logger.warn('[ReplaceableEventService] Profile not found after trying all relays (including comprehensive search)', {
logger.warn('[ReplaceableEventService] Profile not found after cache, relay-list fallback, and comprehensive search', {
pubkey,
triedRelayHints: relayHints.length > 0
})

351
src/services/client.service.ts

@ -49,19 +49,16 @@ import { AbstractRelay } from 'nostr-tools/abstract-relay' @@ -49,19 +49,16 @@ import { AbstractRelay } from 'nostr-tools/abstract-relay'
import indexedDb from './indexed-db.service'
import nip66Service from './nip66.service'
import { QueryService } from './client-query.service'
/** Live timeline REQ: dead relays fail fast; EOSE caps “connected but silent” relays. */
const SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS = 2800
const SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS = 4800
import { EventService } from './client-events.service'
import { ReplaceableEventService } from './client-replaceable-events.service'
import { MacroService, createBookstrService } from './client-macro.service'
type TTimelineRef = [string, number]
/**
* Timeline bootstrap used to await up to `filter.limit` IndexedDB reads before opening a live REQ,
* which blocked first paint for many seconds. We only prefetch this many newest refs; the subscription
* streams the rest immediately.
*/
const TIMELINE_CACHE_PREFETCH_CAP = 48
class ClientService extends EventTarget {
static instance: ClientService
@ -861,18 +858,13 @@ class ClientService extends EventTarget { @@ -861,18 +858,13 @@ class ClientService extends EventTarget {
{
startLogin,
needSort = true,
useCache = false,
omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
/** When useCache is true but there are no timeline refs yet, skip the default 24h `since` so REQ stays unbounded (spell feeds / catalog). */
omitDefaultSinceWhenUseCache?: boolean
/**
* After the first live event before EOSE, wait this long then treat initial load as EOSE (query-style finalize).
* Spells pass {@link FIRST_RELAY_RESULT_GRACE_MS} explicitly; feeds may override.
* Ignored by {@link ClientService.subscribeTimeline} (kept for compatibility). Initial completion is
* aggregate relay EOSE only; per-event results stream via `onEvents` without faking EOSE.
*/
firstRelayResultGraceMs?: number
} = {}
@ -882,31 +874,38 @@ class ClientService extends EventTarget { @@ -882,31 +874,38 @@ class ClientService extends EventTarget {
let eventIdSet = new Set<string>()
let events: NEvent[] = []
let eosedCount = 0
/** One merged buffer — slice using the largest child `limit` so a later child with a smaller limit cannot drop other relays’ events. */
const mergedTimelineLimit = Math.max(
500,
...subRequests.map(({ filter }) =>
typeof filter.limit === 'number' && filter.limit > 0 ? filter.limit : 0
)
)
/** First merged batch goes out synchronously so the list paints without waiting a frame. */
let outerMergedDelivered = false
/** One React update per animation frame after the first paint — limits setEvents/profile churn. */
let outerFlushRaf: number | null = null
const scheduleOuterFlush = () => {
let outerFlushQueued = false
let outerFlushBump = 0
const scheduleOuterFlush = (immediate = false) => {
const run = () => {
outerFlushQueued = false
const snapshot = events.length ? [...events] : []
const allEosed = eosedCount >= requestCount
if (!outerMergedDelivered && (snapshot.length > 0 || allEosed)) {
outerMergedDelivered = true
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents(snapshot, allEosed)
return
}
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
if (immediate || eosedCount >= requestCount || events.length <= 1) {
outerFlushBump++
outerFlushQueued = false
run()
return
}
outerFlushRaf = requestAnimationFrame(() => {
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
if (!outerFlushQueued) {
outerFlushQueued = true
const b = outerFlushBump
queueMicrotask(() => {
if (b !== outerFlushBump) return
run()
})
}
}
const subs = await Promise.all(
subRequests.map(({ urls, filter }) => {
@ -924,10 +923,12 @@ class ClientService extends EventTarget { @@ -924,10 +923,12 @@ class ClientService extends EventTarget {
eventIdSet.add(evt.id)
events.push(evt)
})
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
events = events
.sort((a, b) => b.created_at - a.created_at)
.slice(0, mergedTimelineLimit)
eventIdSet = new Set(events.map((evt) => evt.id))
scheduleOuterFlush()
scheduleOuterFlush(!!_eosed)
},
onNew: (evt) => {
if (newEventIdSet.has(evt.id)) return
@ -936,7 +937,7 @@ class ClientService extends EventTarget { @@ -936,7 +937,7 @@ class ClientService extends EventTarget {
},
onClose
},
{ startLogin, needSort, useCache, omitDefaultSinceWhenUseCache, firstRelayResultGraceMs }
{ startLogin, needSort, firstRelayResultGraceMs }
)
})
)
@ -944,18 +945,8 @@ class ClientService extends EventTarget { @@ -944,18 +945,8 @@ class ClientService extends EventTarget {
const key = this.generateMultipleTimelinesKey(subRequests)
this.timelines[key] = subs.map((sub) => sub.timelineKey)
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
onEvents(events.length ? [...events] : [], eosedCount >= requestCount)
}
return {
closer: () => {
if (outerFlushRaf != null) {
cancelAnimationFrame(outerFlushRaf)
outerFlushRaf = null
}
onEvents = () => {}
onNew = () => {}
subs.forEach((sub) => {
@ -1102,7 +1093,7 @@ class ClientService extends EventTarget { @@ -1102,7 +1093,7 @@ class ClientService extends EventTarget {
await that.queryService.acquireSubSlot(relayKey)
let relay: AbstractRelay
try {
relay = await that.pool.ensureRelay(url, { connectionTimeout: 5000 })
relay = await that.pool.ensureRelay(url, { connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS })
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
@ -1136,7 +1127,9 @@ class ClientService extends EventTarget { @@ -1136,7 +1127,9 @@ class ClientService extends EventTarget {
// resubscribe on a fresh connection from ensureRelay (fixes SendingOnClosedConnection).
let liveRelay: AbstractRelay
try {
liveRelay = await that.pool.ensureRelay(url, { connectionTimeout: 5000 })
liveRelay = await that.pool.ensureRelay(url, {
connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS
})
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
@ -1159,7 +1152,7 @@ class ClientService extends EventTarget { @@ -1159,7 +1152,7 @@ class ClientService extends EventTarget {
handleClose(i, reason2)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
subs.push({
relayKey,
@ -1184,7 +1177,7 @@ class ClientService extends EventTarget { @@ -1184,7 +1177,7 @@ class ClientService extends EventTarget {
handleClose(i, reason)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
subs.push({
relayKey,
@ -1236,23 +1229,19 @@ class ClientService extends EventTarget { @@ -1236,23 +1229,19 @@ class ClientService extends EventTarget {
{
startLogin,
needSort = true,
useCache = false,
omitDefaultSinceWhenUseCache = false,
firstRelayResultGraceMs = FIRST_RELAY_RESULT_GRACE_MS
/** @deprecated No longer used; streaming does not fake EOSE (see flushStreamingSnapshot). Kept for call-site compatibility. */
firstRelayResultGraceMs: _unusedFirstRelayGraceMs = FIRST_RELAY_RESULT_GRACE_MS
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
omitDefaultSinceWhenUseCache?: boolean
firstRelayResultGraceMs?: number
} = {}
) {
void _unusedFirstRelayGraceMs
const relays = Array.from(new Set(urls))
const key = this.generateTimelineKey(relays, filter)
let timeline = this.timelines[key]
// CRITICAL FIX: Always initialize timeline object, even when useCache is false
// This ensures refs are always available for pagination tracking
if (!timeline || Array.isArray(timeline)) {
this.timelines[key] = {
refs: [],
@ -1262,206 +1251,82 @@ class ClientService extends EventTarget { @@ -1262,206 +1251,82 @@ class ClientService extends EventTarget {
timeline = this.timelines[key]
}
let cachedEvents: NEvent[] = []
let since: number | undefined
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
let events: NEvent[] = []
let eosedAt: number | null = null
let initialBatchScheduled = false
let lastDeliveredCount = 0
let progressiveIntervalId: ReturnType<typeof setInterval> | null = null
let firstRelayResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const PROGRESSIVE_INTERVAL_MS = 100 // Backup tick while relays stream without new onevent bursts
const MIN_NEW_EVENTS_AFTER_FIRST = 1
const mergeTimelineLiveAndCache = (): NEvent[] => {
const sortedLive = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (!needSort || !useCache || cachedEvents.length === 0) {
return sortedLive
}
const byId = new Map<string, NEvent>()
for (const e of cachedEvents) {
byId.set(e.id, e)
}
for (const e of sortedLive) {
byId.set(e.id, e)
}
return [...byId.values()].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
}
const deliverProgressive = () => {
/**
* Stream every matching event to the UI immediately. Do **not** use a "grace EOSE" timer: it set `eosedAt`
* to wall-clock time while relays were still returning historical rows, so `evt.created_at > eosedAt` was
* almost always false and later relay results were dropped until the feed looked empty/slow.
* Real initial completion is only when {@link ClientService.subscribe} fires aggregate `oneose` (all relays).
*/
let streamFlushMicrotask = false
const flushStreamingSnapshot = () => {
if (eosedAt) return
const combined = mergeTimelineLiveAndCache()
if (combined.length === 0) return
const newEventCount = combined.length - lastDeliveredCount
const isFirstPaint = lastDeliveredCount === 0
const shouldDeliver =
isFirstPaint
? combined.length >= 1
: newEventCount >= MIN_NEW_EVENTS_AFTER_FIRST || combined.length >= filter.limit * 0.5
if (shouldDeliver) {
lastDeliveredCount = combined.length
onEvents(combined, false)
}
}
// CRITICAL: Only use cache if explicitly enabled (for profile timelines)
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
const refs = timeline.refs
const prefetchN = Math.min(refs.length, filter.limit, TIMELINE_CACHE_PREFETCH_CAP)
// Spell / catalog feeds: refs already carry created_at — set `since` immediately and open the live REQ
// without awaiting dozens of IndexedDB reads (that delayed first events by seconds).
if (omitDefaultSinceWhenUseCache && refs[0]![1] >= oneDayAgo) {
since = refs[0]![1] + 1
void (async () => {
try {
const loaded = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => that.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (!loaded.length) return
loaded.sort((a, b) => b.created_at - a.created_at)
const recent = loaded.filter((evt) => evt.created_at >= oneDayAgo)
if (!recent.length) return
cachedEvents = recent
deliverProgressive()
} catch {
// ignore
}
})()
} else if (!omitDefaultSinceWhenUseCache) {
cachedEvents = (
await Promise.all(refs.slice(0, prefetchN).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
cachedEvents.sort((a, b) => b.created_at - a.created_at)
const recentCachedEvents = cachedEvents.filter((evt) => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
onEvents([...recentCachedEvents], false)
since = recentCachedEvents[0].created_at + 1
const emit = () => {
streamFlushMicrotask = false
if (eosedAt) return
if (needSort) {
const sorted = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
onEvents(sorted, false)
} else {
cachedEvents = []
onEvents([...events], false)
}
}
if (events.length <= 1) {
streamFlushMicrotask = false
emit()
return
}
if (!streamFlushMicrotask) {
streamFlushMicrotask = true
queueMicrotask(emit)
}
// CRITICAL FIX: Only set since parameter if caching is enabled
// When useCache is false, we want to stream raw from relays without time restrictions
// This allows relay feeds to show all available events, not just recent ones
if (!since && needSort && useCache && !omitDefaultSinceWhenUseCache) {
since = oneDayAgo
}
const handleTimelineEose = (eosed: boolean) => {
if (eosed && eosedAt != null) return
if (!eosed) return
if (eosedAt != null) return
if (eosed && !eosedAt) {
if (firstRelayResultGraceTimer != null) {
clearTimeout(firstRelayResultGraceTimer)
firstRelayResultGraceTimer = null
}
eosedAt = dayjs().unix()
if (progressiveIntervalId) {
clearInterval(progressiveIntervalId)
progressiveIntervalId = null
}
}
// (algo feeds) no need to sort and cache
if (!needSort) {
return onEvents([...events], !!eosedAt)
}
if (!eosed) {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
// Only include cached events if caching is enabled
return onEvents([...(useCache ? events.concat(cachedEvents).slice(0, filter.limit) : events)], false)
}
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
// Only update timeline cache if caching is enabled
if (useCache) {
const timeline = that.timelines[key]
// no cache yet
if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
that.timelines[key] = {
refs: events.map((evt) => [evt.id, evt.created_at]),
filter,
urls
}
if (!needSort) {
return onEvents([...events], true)
}
// Prevent concurrent requests from duplicating the same event
const firstRefCreatedAt = timeline.refs[0][1]
const newRefs = events
.filter((evt) => evt.created_at > firstRefCreatedAt)
.map((evt) => [evt.id, evt.created_at] as TTimelineRef)
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
if (events.length >= filter.limit) {
// if new refs are more than limit, means old refs are too old, replace them
timeline.refs = newRefs
onEvents([...events], true)
} else {
// merge new refs with old refs
timeline.refs = newRefs.concat(timeline.refs)
onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
}
} else {
// No caching for initial load, but still need to initialize timeline.refs for loadMoreTimeline pagination
const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline)) {
// Initialize timeline with refs for pagination (even though we don't use cache for initial load)
const tl = that.timelines[key]
if (!tl || Array.isArray(tl)) {
that.timelines[key] = {
refs: events.map((evt) => [evt.id, evt.created_at]),
filter,
urls
}
} else {
// Update refs with new events for pagination tracking
const firstRefCreatedAt = timeline.refs.length > 0 ? timeline.refs[0][1] : dayjs().unix()
const firstRefCreatedAt = tl.refs.length > 0 ? tl.refs[0][1] : dayjs().unix()
const newRefs = events
.filter((evt) => evt.created_at > firstRefCreatedAt)
.map((evt) => [evt.id, evt.created_at] as TTimelineRef)
if (events.length >= filter.limit) {
timeline.refs = newRefs
tl.refs = newRefs
} else {
timeline.refs = newRefs.concat(timeline.refs)
tl.refs = newRefs.concat(tl.refs)
}
}
// Return events directly (no cache concatenation)
onEvents([...events], true)
}
}
const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, {
const subCloser = this.subscribe(relays, filter, {
startLogin,
onevent: (evt: NEvent) => {
that.addEventToCache(evt)
// not eosed yet, push to events
if (!eosedAt) {
events.push(evt)
if (firstRelayResultGraceTimer == null) {
firstRelayResultGraceTimer = setTimeout(() => {
firstRelayResultGraceTimer = null
handleTimelineEose(true)
}, firstRelayResultGraceMs)
}
// Deliver on every live event before EOSE (plus interval as a safety net)
if (needSort && events.length >= 1) {
if (!initialBatchScheduled) {
initialBatchScheduled = true
if (!progressiveIntervalId) {
progressiveIntervalId = setInterval(deliverProgressive, PROGRESSIVE_INTERVAL_MS)
}
}
deliverProgressive()
}
flushStreamingSnapshot()
return
}
// new event
@ -1469,7 +1334,7 @@ class ClientService extends EventTarget { @@ -1469,7 +1334,7 @@ class ClientService extends EventTarget {
onNew(evt)
}
// Update timeline refs for pagination tracking (even when useCache is false)
// Update timeline refs for pagination tracking
// This is needed for loadMoreTimeline to know what events have been loaded
const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline)) {
@ -1506,14 +1371,6 @@ class ClientService extends EventTarget { @@ -1506,14 +1371,6 @@ class ClientService extends EventTarget {
return {
timelineKey: key,
closer: () => {
if (firstRelayResultGraceTimer != null) {
clearTimeout(firstRelayResultGraceTimer)
firstRelayResultGraceTimer = null
}
if (progressiveIntervalId) {
clearInterval(progressiveIntervalId)
progressiveIntervalId = null
}
onEvents = () => {}
onNew = () => {}
subCloser.close()
@ -1525,86 +1382,38 @@ class ClientService extends EventTarget { @@ -1525,86 +1382,38 @@ class ClientService extends EventTarget {
const timeline = this.timelines[key]
if (!timeline || Array.isArray(timeline)) return []
const { filter, urls, refs } = timeline
// Try to load from cache if refs exist
let cachedEvents: NEvent[] = []
if (refs && refs.length > 0) {
const startIdx = refs.findIndex(([, createdAt]) => createdAt <= until)
if (startIdx >= 0) {
cachedEvents = (
await Promise.all(
refs.slice(startIdx, startIdx + limit).map(([id]) => this.eventService.fetchEvent(id))
)
).filter((evt): evt is NEvent => !!evt) as NEvent[]
}
if (cachedEvents.length >= limit) {
return cachedEvents
}
}
// CRITICAL FIX: Always query relay for more events, even if we have some cached
// This ensures we continue fetching from relays when scrolling, not just from cache
// Calculate the correct until timestamp based on what we already have
until = cachedEvents.length ? cachedEvents[cachedEvents.length - 1].created_at - 1 : until
limit = limit - cachedEvents.length
// CRITICAL: Ensure we always query the relay, even if limit is small
// This prevents the feed from stopping when we have few cached events
if (limit <= 0) {
limit = 100 // Minimum limit to ensure we get more events from relay
}
const { filter, urls } = timeline
// Query relay for more events with proper until parameter for pagination
let events = await this.query(urls, { ...filter, until, limit })
events.forEach((evt) => {
this.addEventToCache(evt)
})
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, limit)
// Update refs for pagination tracking (even when useCache is false)
// Initialize refs if empty
if (!timeline.refs) {
timeline.refs = []
}
// Prevent duplicate events in refs
const existingRefIds = new Set(timeline.refs.map(([id]) => id))
const newRefs: TTimelineRef[] = []
// Add cached events to refs if not already present
for (const evt of cachedEvents) {
if (!existingRefIds.has(evt.id)) {
newRefs.push([evt.id, evt.created_at])
existingRefIds.add(evt.id)
}
}
// Add new events from relay to refs
for (const evt of events) {
if (!existingRefIds.has(evt.id)) {
newRefs.push([evt.id, evt.created_at])
existingRefIds.add(evt.id)
}
}
// Sort new refs by created_at descending and merge with existing refs
newRefs.sort((a, b) => b[1] - a[1])
// Merge with existing refs, maintaining sorted order
if (timeline.refs.length > 0) {
const lastRefCreatedAt = timeline.refs[timeline.refs.length - 1][1]
// Only add events that are older than the last ref (for pagination)
const olderRefs = newRefs.filter(([, createdAt]) => createdAt < lastRefCreatedAt)
timeline.refs.push(...olderRefs)
// Keep refs sorted
timeline.refs.sort((a, b) => b[1] - a[1])
} else {
// No existing refs, add all new refs
timeline.refs.push(...newRefs)
}
return [...cachedEvents, ...events]
return events
}
/** =========== Event =========== */

52
src/services/note-stats.service.ts

@ -2,7 +2,7 @@ import { ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants' @@ -2,7 +2,7 @@ import { ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants'
import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event'
import { getZapInfoFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger'
import { getEmojiInfosFromEmojiTags, tagNameEquals } from '@/lib/tag'
import { getEmojiInfosFromEmojiTags, getFirstHexEventIdFromETags, tagNameEquals } from '@/lib/tag'
import { normalizeUrl } from '@/lib/url'
import { eventService } from '@/services/client.service'
import { TEmoji } from '@/types'
@ -241,7 +241,18 @@ class NoteStatsService { @@ -241,7 +241,18 @@ class NoteStatsService {
return eventId
}
updateNoteStatsByEvents(events: Event[], originalEventAuthor?: string) {
/**
* @param mergeOpts When the UI just published a single interaction, pass the note id the user acted on
* so stats merge even if `e` tag shape varies (extensions, multiple ancestors).
*/
updateNoteStatsByEvents(
events: Event[],
originalEventAuthor?: string,
mergeOpts?: {
interactionTargetNoteId?: string
replyParentNoteId?: string
}
) {
const updatedEventIdSet = new Set<string>()
// Process events in batches for better performance
@ -249,7 +260,7 @@ class NoteStatsService { @@ -249,7 +260,7 @@ class NoteStatsService {
for (let i = 0; i < events.length; i += batchSize) {
const batch = events.slice(i, i + batchSize)
batch.forEach((evt) => {
const updatedEventId = this.processEvent(evt, originalEventAuthor)
const updatedEventId = this.processEvent(evt, originalEventAuthor, mergeOpts)
if (updatedEventId) {
updatedEventIdSet.add(updatedEventId)
}
@ -261,19 +272,25 @@ class NoteStatsService { @@ -261,19 +272,25 @@ class NoteStatsService {
})
}
private processEvent(evt: Event, originalEventAuthor?: string): string | undefined {
private processEvent(
evt: Event,
originalEventAuthor?: string,
mergeOpts?: { interactionTargetNoteId?: string; replyParentNoteId?: string }
): string | undefined {
let updatedEventId: string | undefined
if (evt.kind === kinds.Reaction) {
updatedEventId = this.addLikeByEvent(evt, originalEventAuthor)
updatedEventId = this.addLikeByEvent(evt, originalEventAuthor, mergeOpts?.interactionTargetNoteId)
} else if (evt.kind === kinds.Repost) {
updatedEventId = this.addRepostByEvent(evt, originalEventAuthor)
updatedEventId = this.addRepostByEvent(evt, originalEventAuthor, mergeOpts?.interactionTargetNoteId)
} else if (evt.kind === kinds.Zap) {
updatedEventId = this.addZapByEvent(evt, originalEventAuthor)
} else if (evt.kind === kinds.ShortTextNote || evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) {
const isQuote = this.isQuoteByEvent(evt)
if (isQuote) {
updatedEventId = this.addQuoteByEvent(evt, originalEventAuthor)
} else if (mergeOpts?.replyParentNoteId) {
updatedEventId = this.addReplyByEvent(evt, originalEventAuthor, mergeOpts.replyParentNoteId)
} else {
updatedEventId = this.addReplyByEvent(evt, originalEventAuthor)
}
@ -284,8 +301,8 @@ class NoteStatsService { @@ -284,8 +301,8 @@ class NoteStatsService {
return updatedEventId
}
private addLikeByEvent(evt: Event, originalEventAuthor?: string) {
const targetEventId = evt.tags.findLast(tagNameEquals('e'))?.[1]
private addLikeByEvent(evt: Event, originalEventAuthor?: string, forcedTargetEventId?: string) {
const targetEventId = forcedTargetEventId ?? getFirstHexEventIdFromETags(evt.tags)
if (!targetEventId) return
const old = this.noteStatsMap.get(targetEventId) || {}
@ -298,7 +315,14 @@ class NoteStatsService { @@ -298,7 +315,14 @@ class NoteStatsService {
}
let emoji: TEmoji | string = evt.content.trim()
if (!emoji) return
if (!emoji) {
const fromTags = getEmojiInfosFromEmojiTags(evt.tags)
if (fromTags.length) {
emoji = fromTags[0]
} else {
emoji = '+'
}
}
if (emoji.startsWith(':') && emoji.endsWith(':')) {
const emojiInfos = getEmojiInfosFromEmojiTags(evt.tags)
@ -331,8 +355,8 @@ class NoteStatsService { @@ -331,8 +355,8 @@ class NoteStatsService {
return eventId
}
private addRepostByEvent(evt: Event, originalEventAuthor?: string) {
const eventId = evt.tags.find(tagNameEquals('e'))?.[1]
private addRepostByEvent(evt: Event, originalEventAuthor?: string, forcedTargetEventId?: string) {
const eventId = forcedTargetEventId ?? getFirstHexEventIdFromETags(evt.tags)
if (!eventId) return
const old = this.noteStatsMap.get(eventId) || {}
@ -371,9 +395,10 @@ class NoteStatsService { @@ -371,9 +395,10 @@ class NoteStatsService {
)
}
private addReplyByEvent(evt: Event, originalEventAuthor?: string) {
let originalEventId: string | undefined
private addReplyByEvent(evt: Event, originalEventAuthor?: string, forcedOriginalEventId?: string) {
let originalEventId: string | undefined = forcedOriginalEventId
if (!originalEventId) {
if (evt.kind === ExtendedKind.COMMENT || evt.kind === ExtendedKind.VOICE_COMMENT) {
const eTag = evt.tags.find(tagNameEquals('e')) ?? evt.tags.find(tagNameEquals('E'))
originalEventId = eTag?.[1]
@ -402,6 +427,7 @@ class NoteStatsService { @@ -402,6 +427,7 @@ class NoteStatsService {
}
}
}
}
if (!originalEventId) return

23
src/services/spell.service.ts

@ -2,7 +2,8 @@ @@ -2,7 +2,8 @@
* NIP-A7 Spells: parse and execute kind 777 events as portable relay query filters.
*/
import { ExtendedKind, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants'
import { ExtendedKind, FAST_WRITE_RELAY_URLS } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { tagNameEquals } from '@/lib/tag'
import logger from '@/lib/logger'
import type { TRelayList } from '@/types'
@ -80,19 +81,15 @@ export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): st @@ -80,19 +81,15 @@ export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): st
}
/**
* Relays to fetch the user's kind-777 spells: **read** (inboxes), **write** (outboxes), and
* {@link FAST_READ_RELAY_URLS}.
*
* Pass `relayList` from {@link ClientService.fetchRelayList} / NostrProvider it already merges
* kind **10002** and kind **10432** (CACHE_RELAYS / local relays in the app). Do not infer local
* relays from hostnames.
* Relays to fetch the user's kind-777 spells: favorites + default fast-read relays + user read/inboxes
* (same extension as other nonfavorites-feed reads; not the favorites-only home list).
*/
export function getRelaysForSpellCatalogSync(relayList: TRelayList | null | undefined): string[] {
return dedupeRelayUrls([
...(relayList?.read ?? []),
...(relayList?.write ?? []),
...FAST_READ_RELAY_URLS
])
export function getRelaysForSpellCatalogSync(
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
): string[] {
return getRelayUrlsWithFavoritesFastReadAndInbox(favoriteRelays, blockedRelays, userInboxReadRelays)
}
function dedupeRelayUrls(urls: string[]): string[] {

Loading…
Cancel
Save