Browse Source

speed up feeds

imwald
Silberengel 1 month ago
parent
commit
2c12b822fb
  1. 38
      src/components/CacheRelaysSetting/index.tsx
  2. 4
      src/components/Explore/ExploreRelayReviews.tsx
  3. 8
      src/components/NoteList/index.tsx
  4. 10
      src/constants.ts
  5. 42
      src/pages/primary/NoteListPage/FollowingFeed.tsx
  6. 49
      src/pages/primary/NoteListPage/RelaysFeed.tsx
  7. 20
      src/pages/secondary/ProfileEditorPage/index.tsx
  8. 270
      src/providers/NostrProvider/index.tsx
  9. 5
      src/providers/nostr-context.tsx
  10. 139
      src/services/client.service.ts
  11. 24
      src/services/local-storage.service.ts

38
src/components/CacheRelaysSetting/index.tsx

@ -41,12 +41,21 @@ import { Drawer, DrawerContent, DrawerHeader, DrawerTitle, DrawerDescription } f @@ -41,12 +41,21 @@ import { Drawer, DrawerContent, DrawerHeader, DrawerTitle, DrawerDescription } f
import { HoverCard, HoverCardContent, HoverCardTrigger } from '@/components/ui/hover-card'
import { useScreenSize } from '@/providers/ScreenSizeProvider'
import { toast } from 'sonner'
import { syncUserDeletionTombstones } from '@/lib/sync-user-deletions'
import { Event } from 'nostr-tools'
export default function CacheRelaysSetting() {
const { t } = useTranslation()
const { isSmallScreen } = useScreenSize()
const { pubkey, cacheRelayListEvent, checkLogin, publish, updateCacheRelayListEvent } = useNostr()
const {
pubkey,
cacheRelayListEvent,
checkLogin,
publish,
updateCacheRelayListEvent,
relayList,
requestAccountNetworkHydrate
} = useNostr()
const [relays, setRelays] = useState<TMailboxRelay[]>([])
const [hasChange, setHasChange] = useState(false)
const [pushing, setPushing] = useState(false)
@ -62,6 +71,7 @@ export default function CacheRelaysSetting() { @@ -62,6 +71,7 @@ export default function CacheRelaysSetting() {
const [showConsoleLogs, setShowConsoleLogs] = useState(false)
const [consoleLogSearch, setConsoleLogSearch] = useState('')
const [consoleLogLevel, setConsoleLogLevel] = useState<'errors-warnings' | 'all'>('all')
const [cacheRefreshBusy, setCacheRefreshBusy] = useState(false)
const consoleLogRef = useRef<Array<{ type: string; message: string; formattedParts?: Array<{ text: string; style?: string }>; timestamp: number }>>([])
const sensors = useSensors(
@ -282,16 +292,19 @@ export default function CacheRelaysSetting() { @@ -282,16 +292,19 @@ export default function CacheRelaysSetting() {
const handleRefreshCache = async () => {
try {
// Force database upgrade to update structure
setCacheRefreshBusy(true)
await indexedDb.forceDatabaseUpgrade()
// Reload cache info
await loadCacheInfo()
if (pubkey) {
await requestAccountNetworkHydrate()
await syncUserDeletionTombstones(pubkey, relayList)
}
toast.success(t('Cache refreshed successfully'))
} catch (error) {
logger.error('Failed to refresh cache', { error })
toast.error(t('Failed to refresh cache'))
} finally {
setCacheRefreshBusy(false)
}
}
@ -848,14 +861,25 @@ export default function CacheRelaysSetting() { @@ -848,14 +861,25 @@ export default function CacheRelaysSetting() {
<h3 className="text-sm font-semibold">{t('In-Browser Cache')}</h3>
<div className="text-xs text-muted-foreground space-y-1">
<div>{t('Clear cached data stored in your browser, including IndexedDB events, localStorage settings, and service worker caches.')}</div>
<div>
{t('refreshCacheButtonExplainer', {
defaultValue:
'Refresh Cache runs an IndexedDB upgrade check, re-fetches your relay lists and profile-related events from the network (same work as the automatic startup sync), syncs kind-5 deletions into tombstones and removes deleted items from the local cache, then refreshes the store counts below.'
})}
</div>
</div>
<div className="flex min-w-0 flex-wrap gap-2">
<Button variant="outline" className="shrink-0" onClick={handleClearCache}>
<Trash2 className="mr-2 h-4 w-4" />
{t('Clear Cache')}
</Button>
<Button variant="outline" className="shrink-0" onClick={handleRefreshCache}>
<RefreshCw className="mr-2 h-4 w-4" />
<Button
variant="outline"
className="shrink-0"
onClick={handleRefreshCache}
disabled={cacheRefreshBusy}
>
<RefreshCw className={`mr-2 h-4 w-4 ${cacheRefreshBusy ? 'animate-spin' : ''}`} />
{t('Refresh Cache')}
</Button>
<Button variant="outline" className="shrink-0" onClick={handleBrowseCache}>

4
src/components/Explore/ExploreRelayReviews.tsx

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
import RelayReviewCard from '@/components/RelayInfo/RelayReviewCard'
import { Skeleton } from '@/components/ui/skeleton'
import { ExtendedKind, FIRST_RELAY_RESULT_GRACE_MS } from '@/constants'
import { ExtendedKind } from '@/constants'
import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event'
import { getRelayUrlFromRelayReviewEvent } from '@/lib/event-metadata'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
@ -81,7 +81,7 @@ export default function ExploreRelayReviews() { @@ -81,7 +81,7 @@ export default function ExploreRelayReviews() {
setEvents((prev) => dedupeRelayReviewsNewestFirst([...prev, e]))
}
},
firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS,
firstRelayResultGraceMs: false,
globalTimeout: 12_000,
eoseTimeout: EXPLORE_REVIEWS_EOSE_TAIL_MS,
cache: true

8
src/components/NoteList/index.tsx

@ -638,9 +638,11 @@ const NoteList = forwardRef( @@ -638,9 +638,11 @@ const NoteList = forwardRef(
}
const totalRelayUrls = mappedSubRequests.reduce((n, r) => n + r.urls.length, 0)
// Wide REQ batches open many sockets; a short race rejects and drops the subscription before first paint.
const subscribeSetupRaceMs =
totalRelayUrls > 24 ? 30_000 : totalRelayUrls > 8 ? 15_000 : 5000
// Many relays are opened under MAX_CONCURRENT_RELAY_CONNECTIONS; a short race aborts the whole feed.
const subscribeSetupRaceMs = Math.min(
300_000,
Math.max(90_000, 25_000 + totalRelayUrls * 2_500)
)
let closer: (() => void) | undefined
let timelineKey: string | undefined

10
src/constants.ts

@ -34,7 +34,7 @@ export const MAX_PUBLISH_RELAYS = MAX_CONCURRENT_RELAY_CONNECTIONS @@ -34,7 +34,7 @@ export const MAX_PUBLISH_RELAYS = MAX_CONCURRENT_RELAY_CONNECTIONS
export const MAX_REQ_RELAY_URLS = MAX_CONCURRENT_RELAY_CONNECTIONS
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000
export const FIRST_RELAY_RESULT_GRACE_MS = 5000
/** Legacy name: was used to cap spell NoteList skeleton time; loading now ends on EOSE / first events / safety timeouts. Kept for forks. */
export const SPELL_FEED_LOADING_MAX_MS = 1000
@ -48,6 +48,12 @@ export const SPELL_FEED_FIRST_RELAY_GRACE_MS = SPELL_FEED_LOADING_MAX_MS @@ -48,6 +48,12 @@ export const SPELL_FEED_FIRST_RELAY_GRACE_MS = SPELL_FEED_LOADING_MAX_MS
*/
export const FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT = 200
/**
* Minimum time between full account network hydrates (NostrProvider: relay + replaceable fetch from relays).
* IndexedDB cache still applies on every load; this only skips redundant network merges after a recent run.
*/
export const ACCOUNT_SESSION_NETWORK_HYDRATE_MIN_INTERVAL_MS = 24 * 60 * 60 * 1000
/**
* Batched kind-0 queries (ReplaceableEventService) over many relays (inbox, favorites, cache, defaults).
* Too low causes empty profiles and NIP-05 gaps when relays are slow or many URLs are queried.
@ -86,6 +92,8 @@ export const StorageKey = { @@ -86,6 +92,8 @@ export const StorageKey = {
QUICK_ZAP: 'quickZap',
ZAP_REPLY_THRESHOLD: 'zapReplyThreshold',
ACCOUNT_FEED_INFO_MAP: 'accountFeedInfoMap',
/** Per-pubkey ms timestamps: last full network hydrate (see ACCOUNT_SESSION_NETWORK_HYDRATE_MIN_INTERVAL_MS). */
ACCOUNT_NETWORK_HYDRATE_AT_MAP: 'accountNetworkHydrateAtMap',
AUTOPLAY: 'autoplay',
HIDE_UNTRUSTED_INTERACTIONS: 'hideUntrustedInteractions',
HIDE_UNTRUSTED_NOTIFICATIONS: 'hideUntrustedNotifications',

42
src/pages/primary/NoteListPage/FollowingFeed.tsx

@ -1,13 +1,14 @@ @@ -1,13 +1,14 @@
import NormalFeed from '@/components/NormalFeed'
import type { TNoteListRef } from '@/components/NoteList'
import { augmentSubRequestsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { normalizeUrl } from '@/lib/url'
import { useFeed } from '@/providers/FeedProvider'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { useNostr } from '@/providers/NostrProvider'
import client from '@/services/client.service'
import { TFeedSubRequest } from '@/types'
import type { ReactNode } from 'react'
import { forwardRef, useEffect, useState } from 'react'
import { forwardRef, useEffect, useMemo, useState } from 'react'
const FollowingFeed = forwardRef<
TNoteListRef,
@ -21,6 +22,43 @@ const FollowingFeed = forwardRef< @@ -21,6 +22,43 @@ const FollowingFeed = forwardRef<
const { feedInfo } = useFeed()
const [subRequests, setSubRequests] = useState<TFeedSubRequest[]>([])
const favoriteRelaysKey = useMemo(
() =>
[...favoriteRelays]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort()
.join('\0'),
[favoriteRelays]
)
const blockedRelaysKey = useMemo(
() =>
[...blockedRelays]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort()
.join('\0'),
[blockedRelays]
)
const relayReadKey = useMemo(
() =>
[...(relayList?.read ?? [])]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort()
.join('\0'),
[relayList?.read]
)
const relayWriteKey = useMemo(
() =>
[...(relayList?.write ?? [])]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort()
.join('\0'),
[relayList?.write]
)
useEffect(() => {
async function init() {
if (feedInfo.feedType !== 'following' || !pubkey) {
@ -42,7 +80,7 @@ const FollowingFeed = forwardRef< @@ -42,7 +80,7 @@ const FollowingFeed = forwardRef<
}
void init()
}, [feedInfo.feedType, pubkey, favoriteRelays, blockedRelays, relayList])
}, [feedInfo.feedType, pubkey, favoriteRelaysKey, blockedRelaysKey, relayReadKey, relayWriteKey])
return (
<NormalFeed

49
src/pages/primary/NoteListPage/RelaysFeed.tsx

@ -1,11 +1,12 @@ @@ -1,11 +1,12 @@
import NormalFeed from '@/components/NormalFeed'
import type { TNoteListRef } from '@/components/NoteList'
import { checkAlgoRelay } from '@/lib/relay'
import { normalizeUrl } from '@/lib/url'
import { useFeed } from '@/providers/FeedProvider'
import { useKindFilter } from '@/providers/KindFilterProvider'
import relayInfoService from '@/services/relay-info.service'
import { kinds } from 'nostr-tools'
import React, { forwardRef, useEffect, useMemo, useState, useRef } from 'react'
import React, { forwardRef, useEffect, useMemo, useState } from 'react'
const RelaysFeed = forwardRef<
TNoteListRef,
@ -19,48 +20,46 @@ const RelaysFeed = forwardRef< @@ -19,48 +20,46 @@ const RelaysFeed = forwardRef<
const { feedInfo, relayUrls } = useFeed()
const { showKinds } = useKindFilter()
const [areAlgoRelays, setAreAlgoRelays] = useState(false)
const relayInfoFetchedRef = useRef(false)
// Fetch relay info in background (non-blocking) - don't wait for it to render
const relayUrlsKey = useMemo(
() =>
[...relayUrls]
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
.sort()
.join('|'),
[relayUrls]
)
useEffect(() => {
// Only fetch once per relayUrls change
if (relayInfoFetchedRef.current || relayUrls.length === 0) {
return
}
if (relayUrls.length === 0) return
let cancelled = false
const init = async () => {
relayInfoFetchedRef.current = true
// Add aggressive timeout to prevent hanging (reduced from 5s to 2s)
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => {
reject(new Error('getRelayInfos timeout after 2 seconds'))
}, 2000)
reject(new Error('getRelayInfos timeout after 8 seconds'))
}, 8000)
})
try {
const relayInfos = await Promise.race([
relayInfoService.getRelayInfos(relayUrls),
timeoutPromise
])
if (cancelled) return
const areAlgo = relayInfos.every((relayInfo) => checkAlgoRelay(relayInfo))
setAreAlgoRelays(areAlgo)
} catch (_error) {
// Default to false - feed will work without this info
setAreAlgoRelays(false)
if (!cancelled) setAreAlgoRelays(false)
}
}
// Don't await - let it run in background
init().catch(() => {
setAreAlgoRelays(false)
})
}, [relayUrls])
// Reset fetch flag when relayUrls change
useEffect(() => {
relayInfoFetchedRef.current = false
}, [relayUrls])
void init()
return () => {
cancelled = true
}
}, [relayUrlsKey, relayUrls.length])
const defaultKinds =
kindsOverride && kindsOverride.length > 0

20
src/pages/secondary/ProfileEditorPage/index.tsx

@ -22,6 +22,7 @@ import SecondaryPageLayout from '@/layouts/SecondaryPageLayout' @@ -22,6 +22,7 @@ import SecondaryPageLayout from '@/layouts/SecondaryPageLayout'
import { createPaymentInfoDraftEvent, createProfileDraftEvent } from '@/lib/draft-event'
import { generateImageByPubkey } from '@/lib/pubkey'
import { isEmail } from '@/lib/utils'
import { syncUserDeletionTombstones } from '@/lib/sync-user-deletions'
import { useSecondaryPage } from '@/PageManager'
import { useNostr } from '@/providers/NostrProvider'
import client from '@/services/client.service'
@ -34,7 +35,15 @@ import { toast } from 'sonner' @@ -34,7 +35,15 @@ import { toast } from 'sonner'
const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => {
const { t } = useTranslation()
const { pop } = useSecondaryPage()
const { account, profile, profileEvent, publish, updateProfileEvent } = useNostr()
const {
account,
profile,
profileEvent,
publish,
updateProfileEvent,
relayList,
requestAccountNetworkHydrate
} = useNostr()
const [banner, setBanner] = useState<string>('')
const [avatar, setAvatar] = useState<string>('')
const [username, setUsername] = useState<string>('')
@ -239,6 +248,8 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => { @@ -239,6 +248,8 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => {
if (!account?.pubkey) return
setRefreshingCache(true)
try {
await requestAccountNetworkHydrate()
await syncUserDeletionTombstones(account.pubkey, relayList)
await client.forceRefreshProfileAndPaymentInfoCache(account.pubkey)
const [profileEvt, paymentEvt] = await Promise.all([
client.fetchProfileEvent(account.pubkey),
@ -252,7 +263,7 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => { @@ -252,7 +263,7 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => {
} finally {
setRefreshingCache(false)
}
}, [account?.pubkey, updateProfileEvent, t])
}, [account?.pubkey, relayList, requestAccountNetworkHydrate, updateProfileEvent, t])
if (!account || !profile) return null
@ -298,7 +309,10 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => { @@ -298,7 +309,10 @@ const ProfileEditorPage = forwardRef(({ index }: { index?: number }, ref) => {
onClick={forceRefreshProfileAndPaymentCache}
disabled={refreshingCache}
className="gap-1.5"
title={t('Force-refresh profile and payment info from relays')}
title={t('profileEditorRefreshCacheHint', {
defaultValue:
'Full account sync from relays (like Settings → Cache), deletion tombstones, then profile and payment info.'
})}
>
{refreshingCache ? <Skeleton className="size-3.5 shrink-0 rounded-sm" aria-hidden /> : <RefreshCw className="h-3.5 w-3.5" />}
{t('Refresh cache')}

270
src/providers/NostrProvider/index.tsx

@ -1,5 +1,6 @@ @@ -1,5 +1,6 @@
import LoginDialog from '@/components/LoginDialog'
import {
ACCOUNT_SESSION_NETWORK_HYDRATE_MIN_INTERVAL_MS,
DEFAULT_FAVORITE_RELAYS,
FAST_READ_RELAY_URLS,
ExtendedKind,
@ -43,7 +44,7 @@ import { Event, kinds, VerifiedEvent, validateEvent } from 'nostr-tools' @@ -43,7 +44,7 @@ import { Event, kinds, VerifiedEvent, validateEvent } from 'nostr-tools'
import * as nip19 from 'nostr-tools/nip19'
import * as nip49 from 'nostr-tools/nip49'
import { NostrContext } from '@/providers/nostr-context'
import { useEffect, useRef, useState } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { toast } from 'sonner'
import { BunkerSigner } from './bunker.signer'
@ -145,6 +146,10 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -145,6 +146,10 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
const [isAccountSessionHydrating, setIsAccountSessionHydrating] = useState(false)
/** Bumps on each account hydration run so stale async completions cannot clear {@link isAccountSessionHydrating}. */
const accountHydrationGenerationRef = useRef(0)
/** When true, next hydrate run performs a full network merge without clearing UI state from IndexedDB first. */
const forceNextAccountNetworkHydrateRef = useRef(false)
const manualNetworkHydrateResolveRef = useRef<(() => void) | null>(null)
const [accountNetworkHydrateBump, setAccountNetworkHydrateBump] = useState(0)
useEffect(() => {
const init = async () => {
@ -191,21 +196,39 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -191,21 +196,39 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
useEffect(() => {
let hydrationGenForThisRun = -1
const init = async () => {
setRelayList(null)
setProfile(null)
setProfileEvent(null)
setNsec(null)
setFavoriteRelaysEvent(null)
setFollowListEvent(null)
setMuteListEvent(null)
setBookmarkListEvent(null)
setRssFeedListEvent(null)
if (!account) {
accountHydrationGenerationRef.current += 1
setIsAccountSessionHydrating(false)
forceNextAccountNetworkHydrateRef.current = false
setRelayList(null)
setProfile(null)
setProfileEvent(null)
setNsec(null)
setFavoriteRelaysEvent(null)
setFollowListEvent(null)
setMuteListEvent(null)
setBookmarkListEvent(null)
setRssFeedListEvent(null)
return undefined
}
const userForcedAccountNetworkHydrate = forceNextAccountNetworkHydrateRef.current
if (userForcedAccountNetworkHydrate) {
forceNextAccountNetworkHydrateRef.current = false
}
if (!userForcedAccountNetworkHydrate) {
setRelayList(null)
setProfile(null)
setProfileEvent(null)
setNsec(null)
setFavoriteRelaysEvent(null)
setFollowListEvent(null)
setMuteListEvent(null)
setBookmarkListEvent(null)
setRssFeedListEvent(null)
}
hydrationGenForThisRun = accountHydrationGenerationRef.current += 1
setIsAccountSessionHydrating(true)
logger.info('[NostrProvider] Account session hydrate: loading cache and relays…', {
@ -226,6 +249,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -226,6 +249,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
setNcryptsec(null)
}
const INTEREST_LIST_KIND = 10015
const [
storedRelayListEvent,
storedCacheRelayListEvent,
@ -236,7 +261,9 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -236,7 +261,9 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
storedFavoriteRelaysEvent,
storedBlockedRelaysEvent,
storedUserEmojiListEvent,
storedRssFeedListEvent
storedRssFeedListEvent,
storedInterestListEvent,
storedBlossomServerListEvent
] = await Promise.all([
indexedDb.getReplaceableEvent(account.pubkey, kinds.RelayList),
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.CACHE_RELAYS),
@ -247,7 +274,9 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -247,7 +274,9 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.FAVORITE_RELAYS),
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.BLOCKED_RELAYS),
indexedDb.getReplaceableEvent(account.pubkey, kinds.UserEmojiList),
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.RSS_FEED_LIST)
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.RSS_FEED_LIST),
indexedDb.getReplaceableEvent(account.pubkey, INTEREST_LIST_KIND),
indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.BLOSSOM_SERVER_LIST)
])
// Extract blocked relays from event
@ -261,12 +290,14 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -261,12 +290,14 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
}
}
})
setBlockedRelaysEvent(storedBlockedRelaysEvent)
if (!userForcedAccountNetworkHydrate) {
setBlockedRelaysEvent(storedBlockedRelaysEvent)
}
}
// Set initial relay list from stored events (will be updated with merged list later)
// Merge cache relays even at initial load so cache relays are available immediately
if (storedRelayListEvent || storedCacheRelayListEvent) {
if (!userForcedAccountNetworkHydrate && (storedRelayListEvent || storedCacheRelayListEvent)) {
const baseRelayList = storedRelayListEvent
? getRelayListFromEvent(storedRelayListEvent, blockedRelays)
: { write: [], read: [], originalRelays: [] }
@ -300,82 +331,105 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -300,82 +331,105 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
setRelayList(baseRelayList)
}
}
if (storedProfileEvent) {
setProfileEvent(storedProfileEvent)
setProfile(getProfileFromEvent(storedProfileEvent))
}
if (storedFollowListEvent) {
setFollowListEvent(storedFollowListEvent)
}
if (storedMuteListEvent) {
setMuteListEvent(storedMuteListEvent)
}
if (storedBookmarkListEvent) {
setBookmarkListEvent(storedBookmarkListEvent)
}
if (storedFavoriteRelaysEvent) {
setFavoriteRelaysEvent(storedFavoriteRelaysEvent)
}
if (storedUserEmojiListEvent) {
setUserEmojiListEvent(storedUserEmojiListEvent)
}
if (storedRssFeedListEvent) {
setRssFeedListEvent(storedRssFeedListEvent)
logger.debug('[NostrProvider] Loaded RSS feed list event from cache', {
eventId: storedRssFeedListEvent.id,
created_at: storedRssFeedListEvent.created_at
})
if (!userForcedAccountNetworkHydrate) {
if (storedProfileEvent) {
setProfileEvent(storedProfileEvent)
setProfile(getProfileFromEvent(storedProfileEvent))
}
if (storedFollowListEvent) {
setFollowListEvent(storedFollowListEvent)
}
if (storedMuteListEvent) {
setMuteListEvent(storedMuteListEvent)
}
if (storedBookmarkListEvent) {
setBookmarkListEvent(storedBookmarkListEvent)
}
if (storedFavoriteRelaysEvent) {
setFavoriteRelaysEvent(storedFavoriteRelaysEvent)
}
if (storedUserEmojiListEvent) {
setUserEmojiListEvent(storedUserEmojiListEvent)
}
if (storedRssFeedListEvent) {
setRssFeedListEvent(storedRssFeedListEvent)
logger.debug('[NostrProvider] Loaded RSS feed list event from cache', {
eventId: storedRssFeedListEvent.id,
created_at: storedRssFeedListEvent.created_at
})
}
if (storedInterestListEvent) {
setInterestListEvent(storedInterestListEvent)
}
if (storedBlossomServerListEvent) {
void client.updateBlossomServerListEventCache(storedBlossomServerListEvent)
}
}
// Fetch RSS feed list from relays if cache is missing or stale (older than 1 hour)
const rssFeedListStale = !storedRssFeedListEvent ||
(dayjs().unix() - storedRssFeedListEvent.created_at > 3600) // 1 hour
if (rssFeedListStale) {
logger.debug('[NostrProvider] RSS feed list cache is missing or stale, fetching from relays', {
hasCache: !!storedRssFeedListEvent,
cacheAge: storedRssFeedListEvent ? dayjs().unix() - storedRssFeedListEvent.created_at : 'N/A'
})
// Fetch in background - don't block initialization
queryService.fetchEvents(FAST_WRITE_RELAY_URLS.concat(PROFILE_RELAY_URLS), {
kinds: [ExtendedKind.RSS_FEED_LIST],
authors: [account.pubkey],
limit: 1
}).then(events => {
const latestEvent = getLatestEvent(events)
if (latestEvent) {
// Only update if the fetched event is newer than cached
if (!storedRssFeedListEvent || latestEvent.created_at > storedRssFeedListEvent.created_at) {
logger.debug('[NostrProvider] Found newer RSS feed list event from relays', {
eventId: latestEvent.id,
created_at: latestEvent.created_at,
wasCached: !!storedRssFeedListEvent
})
indexedDb.putReplaceableEvent(latestEvent).then(() => {
setRssFeedListEvent(latestEvent)
logger.debug('[NostrProvider] Updated RSS feed list event in cache and state')
}).catch(err => {
logger.error('[NostrProvider] Failed to cache RSS feed list event', { error: err })
})
} else {
logger.debug('[NostrProvider] Cached RSS feed list event is up to date', {
cachedCreatedAt: storedRssFeedListEvent.created_at,
fetchedCreatedAt: latestEvent.created_at
})
}
} else if (!storedRssFeedListEvent) {
logger.debug('[NostrProvider] No RSS feed list event found on relays (user may not have created one yet)')
}
}).catch(err => {
logger.error('[NostrProvider] Failed to fetch RSS feed list from relays', { error: err })
// Don't clear cache on fetch error - use cached value
})
} else {
logger.debug('[NostrProvider] RSS feed list cache is fresh, using cached value')
}
const lastNetworkHydrateAt = storage.getAccountNetworkHydrateAt(account.pubkey)
const hasLocalRelayAndProfile = !!storedRelayListEvent && !!storedProfileEvent
const skipNetworkHydrate =
!userForcedAccountNetworkHydrate &&
hasLocalRelayAndProfile &&
typeof lastNetworkHydrateAt === 'number' &&
Date.now() - lastNetworkHydrateAt < ACCOUNT_SESSION_NETWORK_HYDRATE_MIN_INTERVAL_MS
if (!skipNetworkHydrate) {
// Fetch RSS feed list from relays if cache is missing or stale (older than 1 hour)
const rssFeedListStale =
!storedRssFeedListEvent ||
dayjs().unix() - storedRssFeedListEvent.created_at > 3600 // 1 hour
if (rssFeedListStale) {
logger.debug('[NostrProvider] RSS feed list cache is missing or stale, fetching from relays', {
hasCache: !!storedRssFeedListEvent,
cacheAge: storedRssFeedListEvent ? dayjs().unix() - storedRssFeedListEvent.created_at : 'N/A'
})
queryService
.fetchEvents(FAST_WRITE_RELAY_URLS.concat(PROFILE_RELAY_URLS), {
kinds: [ExtendedKind.RSS_FEED_LIST],
authors: [account.pubkey],
limit: 1
})
.then((events) => {
const latestEvent = getLatestEvent(events)
if (latestEvent) {
if (!storedRssFeedListEvent || latestEvent.created_at > storedRssFeedListEvent.created_at) {
logger.debug('[NostrProvider] Found newer RSS feed list event from relays', {
eventId: latestEvent.id,
created_at: latestEvent.created_at,
wasCached: !!storedRssFeedListEvent
})
indexedDb
.putReplaceableEvent(latestEvent)
.then(() => {
setRssFeedListEvent(latestEvent)
logger.debug('[NostrProvider] Updated RSS feed list event in cache and state')
})
.catch((err) => {
logger.error('[NostrProvider] Failed to cache RSS feed list event', { error: err })
})
} else {
logger.debug('[NostrProvider] Cached RSS feed list event is up to date', {
cachedCreatedAt: storedRssFeedListEvent.created_at,
fetchedCreatedAt: latestEvent.created_at
})
}
} else if (!storedRssFeedListEvent) {
logger.debug(
'[NostrProvider] No RSS feed list event found on relays (user may not have created one yet)'
)
}
})
.catch((err) => {
logger.error('[NostrProvider] Failed to fetch RSS feed list from relays', { error: err })
})
} else {
logger.debug('[NostrProvider] RSS feed list cache is fresh, using cached value')
}
const [relayListEvents, cacheRelayListEvents] = await Promise.all([
const [relayListEvents, cacheRelayListEvents] = await Promise.all([
queryService.fetchEvents(FAST_READ_RELAY_URLS, {
kinds: [kinds.RelayList],
authors: [account.pubkey]
@ -414,7 +468,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -414,7 +468,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
kinds.Contacts,
kinds.Mutelist,
kinds.BookmarkList,
10015, // Interest list
INTEREST_LIST_KIND,
ExtendedKind.FAVORITE_RELAYS,
ExtendedKind.BLOCKED_RELAYS,
ExtendedKind.BLOSSOM_SERVER_LIST,
@ -428,7 +482,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -428,7 +482,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
const followListEvent = sortedEvents.find((e) => e.kind === kinds.Contacts)
const muteListEvent = sortedEvents.find((e) => e.kind === kinds.Mutelist)
const bookmarkListEvent = sortedEvents.find((e) => e.kind === kinds.BookmarkList)
const interestListEvent = sortedEvents.find((e) => e.kind === 10015)
const interestListEvent = sortedEvents.find((e) => e.kind === INTEREST_LIST_KIND)
const favoriteRelaysEvent = sortedEvents.find((e) => e.kind === ExtendedKind.FAVORITE_RELAYS)
const blockedRelaysEvent = sortedEvents.find((e) => e.kind === ExtendedKind.BLOCKED_RELAYS)
const blossomServerListEvent = sortedEvents.find(
@ -513,13 +567,29 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -513,13 +567,29 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
}
}
void client.runSessionPrewarm({ pubkey: account.pubkey, signal: controller.signal })
logger.info('[NostrProvider] Account session hydrate: core relay/profile merge finished; client prewarm started (parallel)', {
pubkeySlice: account.pubkey.slice(0, 12)
})
storage.setAccountNetworkHydrateAt(account.pubkey, Date.now())
void client.runSessionPrewarm({ pubkey: account.pubkey, signal: controller.signal })
logger.info('[NostrProvider] Account session hydrate: core relay/profile merge finished; client prewarm started (parallel)', {
pubkeySlice: account.pubkey.slice(0, 12)
})
} else {
logger.info('[NostrProvider] Skipped network hydrate (within min interval); IndexedDB cache only', {
pubkeySlice: account.pubkey.slice(0, 12),
lastNetworkHydrateAt,
ageMs: Date.now() - (lastNetworkHydrateAt ?? 0)
})
if (storedRelayListEvent) {
client.updateRelayListCache(storedRelayListEvent)
}
}
return controller
}
const promise = init()
void promise.finally(() => {
const r = manualNetworkHydrateResolveRef.current
manualNetworkHydrateResolveRef.current = null
r?.()
})
const finishHydration = () => {
if (
hydrationGenForThisRun >= 0 &&
@ -539,7 +609,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -539,7 +609,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
})
.catch(() => {})
}
}, [account])
}, [account, accountNetworkHydrateBump])
useEffect(() => {
if (!account) return
@ -1133,6 +1203,15 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -1133,6 +1203,15 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
setRssFeedListEvent(newRssFeedListEvent)
}
const requestAccountNetworkHydrate = useCallback(() => {
if (!account) return Promise.resolve()
forceNextAccountNetworkHydrateRef.current = true
return new Promise<void>((resolve) => {
manualNetworkHydrateResolveRef.current = resolve
setAccountNetworkHydrateBump((n) => n + 1)
})
}, [account])
return (
<NostrContext.Provider
value={{
@ -1180,7 +1259,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -1180,7 +1259,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
updateInterestListEvent,
updateFavoriteRelaysEvent,
updateBlockedRelaysEvent,
updateRssFeedListEvent
updateRssFeedListEvent,
requestAccountNetworkHydrate
}}
>
{children}

5
src/providers/nostr-context.tsx

@ -59,6 +59,11 @@ export type TNostrContext = { @@ -59,6 +59,11 @@ export type TNostrContext = {
updateFavoriteRelaysEvent: (favoriteRelaysEvent: Event) => Promise<void>
updateBlockedRelaysEvent: (blockedRelaysEvent: Event) => Promise<void>
updateRssFeedListEvent: (rssFeedListEvent: Event) => Promise<void>
/**
* Re-run the full account network hydrate (relay lists + replaceable merge + prewarm), bypassing the
* 24h throttle. Resolves when the hydrate pass finishes. No-op when logged out.
*/
requestAccountNetworkHydrate: () => Promise<void>
}
export const NostrContext = createContext<TNostrContext | undefined>(undefined)

139
src/services/client.service.ts

@ -20,7 +20,7 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter { @@ -20,7 +20,7 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter {
import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter'
import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger'
import { dispatchTombstonesUpdated } from '@/lib/tombstone-events'
import { buildDeletionRelayUrls, dispatchTombstonesUpdated } from '@/lib/tombstone-events'
import { hexPubkeysEqual, isValidPubkey, pubkeyToNpub, userIdToPubkey } from '@/lib/pubkey'
import { getPubkeysFromPTags, tagNameEquals } from '@/lib/tag'
import {
@ -52,7 +52,8 @@ import { @@ -52,7 +52,8 @@ import {
Event as NEvent,
Relay,
SimplePool,
VerifiedEvent
VerifiedEvent,
verifyEvent
} from 'nostr-tools'
import { AbstractRelay } from 'nostr-tools/abstract-relay'
import indexedDb from './indexed-db.service'
@ -63,6 +64,13 @@ import { QueryService } from './client-query.service' @@ -63,6 +64,13 @@ import { QueryService } from './client-query.service'
const SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS = 2800
const SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS = 4800
/**
* After initial timeline EOSE (incl. grace), events with `created_at` older than this many seconds
* (relative to wall clock at EOSE) are treated as backlog stragglers and merged into the feed;
* fresher timestamps go to `onNew` (live / new notes UX).
*/
const TIMELINE_STRAGGLER_MAX_AGE_SEC = 600
function summarizeFiltersForRelayLog(filters: Filter[]): Record<string, unknown> {
const f = filters[0]
if (!f) return {}
@ -1608,7 +1616,9 @@ class ClientService extends EventTarget { @@ -1608,7 +1616,9 @@ class ClientService extends EventTarget {
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
let events: NEvent[] = []
/** `null` until initial backlog is considered complete; then wall-clock unix at completion (for straggler vs live). */
let eosedAt: number | null = null
let eventIds = new Set<string>()
let firstResultGraceTimer: ReturnType<typeof setTimeout> | null = null
const clearFirstResultGraceTimer = () => {
@ -1670,6 +1680,7 @@ class ClientService extends EventTarget { @@ -1670,6 +1680,7 @@ class ClientService extends EventTarget {
}
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
eventIds = new Set(events.map((e) => e.id))
const tl = that.timelines[key]
if (!tl || Array.isArray(tl)) {
@ -1678,8 +1689,10 @@ class ClientService extends EventTarget { @@ -1678,8 +1689,10 @@ class ClientService extends EventTarget {
filter,
urls
}
} else if (tl.refs.length === 0) {
tl.refs = events.map((evt) => [evt.id, evt.created_at] as TTimelineRef)
} else {
const firstRefCreatedAt = tl.refs.length > 0 ? tl.refs[0][1] : dayjs().unix()
const firstRefCreatedAt = tl.refs[0]![1]
const newRefs = events
.filter((evt) => evt.created_at > firstRefCreatedAt)
.map((evt) => [evt.id, evt.created_at] as TTimelineRef)
@ -1698,44 +1711,63 @@ class ClientService extends EventTarget { @@ -1698,44 +1711,63 @@ class ClientService extends EventTarget {
that.addEventToCache(evt)
// not eosed yet, push to events
if (!eosedAt) {
if (eventIds.has(evt.id)) return
eventIds.add(evt.id)
events.push(evt)
flushStreamingSnapshot()
armFirstResultGraceAfterFirstEvent()
return
}
// new event
if (evt.created_at > eosedAt) {
onNew(evt)
if (eventIds.has(evt.id)) return
const wallClockAtEose = eosedAt
const isBacklogStraggler =
evt.created_at + TIMELINE_STRAGGLER_MAX_AGE_SEC < wallClockAtEose
if (isBacklogStraggler) {
eventIds.add(evt.id)
events.push(evt)
if (needSort) {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
}
eventIds = new Set(events.map((e) => e.id))
onEvents([...events], false)
const timeline = that.timelines[key]
if (timeline && !Array.isArray(timeline)) {
timeline.refs = events
.map((e) => [e.id, e.created_at] as TTimelineRef)
.sort((a, b) => b[1] - a[1])
}
return
}
// Update timeline refs for pagination tracking
// This is needed for loadMoreTimeline to know what events have been loaded
eventIds.add(evt.id)
onNew(evt)
const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline)) {
return
}
// Initialize refs if empty (needed for pagination even when not using cache)
if (!timeline.refs || timeline.refs.length === 0) {
timeline.refs = []
if (timeline.refs.length === 0) {
timeline.refs = events.map((e) => [e.id, e.created_at] as TTimelineRef).sort((a, b) => b[1] - a[1])
return
}
// find the right position to insert
let idx = 0
for (const ref of timeline.refs) {
if (evt.created_at > ref[1] || (evt.created_at === ref[1] && evt.id < ref[0])) {
break
}
// the event is already in the cache
if (evt.created_at === ref[1] && evt.id === ref[0]) {
return
}
idx++
}
// the event is too old, ignore it
if (idx >= timeline.refs.length) return
// insert the event to the right position
timeline.refs.splice(idx, 0, [evt.id, evt.created_at])
},
oneose: handleTimelineEose,
@ -1760,7 +1792,11 @@ class ClientService extends EventTarget { @@ -1760,7 +1792,11 @@ class ClientService extends EventTarget {
const { filter, urls } = timeline
let events = await this.query(urls, { ...filter, until, limit })
let events = await this.query(urls, { ...filter, until, limit }, undefined, {
firstRelayResultGraceMs: false,
globalTimeout: 25_000,
eoseTimeout: 2500
})
events.forEach((evt) => {
this.addEventToCache(evt)
})
@ -2071,19 +2107,68 @@ class ClientService extends EventTarget { @@ -2071,19 +2107,68 @@ class ClientService extends EventTarget {
}
/**
* Fetch deletion events (kind 5) and update the tombstone list.
* Network sync is intentionally disabled: it queried many relays on every refresh/login and saturated
* the connection pool. Tombstones still update via {@link applyDeletionRequestToLocalCache} when the user deletes from this client.
* Fetch kind-5 deletion events for an author, merge tombstones, remove matching rows from IndexedDB,
* and notify the UI. Intended for **manual** refresh (e.g. cache settings); not run on every login.
*/
async fetchDeletionEvents(_relayUrls: string[] = [], _authorPubkey?: string): Promise<void> {
return
async fetchDeletionEvents(relayUrls: string[] = [], authorPubkey?: string): Promise<void> {
const pk = authorPubkey?.trim().toLowerCase()
if (!pk || !/^[0-9a-f]{64}$/.test(pk)) return
const urls = (relayUrls.length > 0 ? relayUrls : [...PROFILE_FETCH_RELAY_URLS])
.map((u) => normalizeUrl(u) || u)
.filter(Boolean)
const capped = Array.from(new Set(urls)).slice(0, 16)
if (capped.length === 0) return
try {
const events = await this.queryService.fetchEvents(
capped,
{
kinds: [kinds.EventDeletion],
authors: [pk],
limit: 500
},
{
firstRelayResultGraceMs: false,
globalTimeout: 22_000,
eoseTimeout: 2500
}
)
let any = false
for (const e of events) {
if (e.kind !== kinds.EventDeletion || e.pubkey.toLowerCase() !== pk) continue
if (!verifyEvent(e)) continue
if (shouldDropEventOnIngest(e)) continue
await this.addTombstoneEntriesFromDeletionEvent(e)
any = true
}
if (any) {
const removed = await indexedDb.removeTombstonedFromCache()
if (removed > 0) {
logger.info('[ClientService] Removed tombstoned events from cache after deletion sync', {
count: removed
})
}
dispatchTombstonesUpdated()
}
} catch (e) {
logger.warn('[ClientService] fetchDeletionEvents failed', { error: e })
}
}
/**
* @deprecated No-op see {@link fetchDeletionEvents}.
*/
async fetchDeletionEventsForPubkey(_profilePubkey: string): Promise<void> {
return
/** Fetch deletions for a profile pubkey using that user’s NIP-65 read stack when possible. */
async fetchDeletionEventsForPubkey(profilePubkey: string): Promise<void> {
const pk = profilePubkey.trim().toLowerCase()
if (!/^[0-9a-f]{64}$/.test(pk)) return
try {
const rl = await this.fetchRelayList(pk)
const urls = buildDeletionRelayUrls(rl)
await this.fetchDeletionEvents(urls, pk)
} catch {
await this.fetchDeletionEvents(buildDeletionRelayUrls(null), pk)
}
}
async searchNpubsForMention(

24
src/services/local-storage.service.ts

@ -934,6 +934,30 @@ class LocalStorageService { @@ -934,6 +934,30 @@ class LocalStorageService {
this.panelMode = mode
this.persistSetting(StorageKey.PANE_MODE, mode)
}
getAccountNetworkHydrateAt(pubkey: string): number | undefined {
try {
const raw = window.localStorage.getItem(StorageKey.ACCOUNT_NETWORK_HYDRATE_AT_MAP)
if (!raw) return undefined
const map = JSON.parse(raw) as Record<string, unknown>
const pk = pubkey.trim().toLowerCase()
const v = map[pk]
return typeof v === 'number' && Number.isFinite(v) ? v : undefined
} catch {
return undefined
}
}
setAccountNetworkHydrateAt(pubkey: string, atMs: number): void {
try {
const raw = window.localStorage.getItem(StorageKey.ACCOUNT_NETWORK_HYDRATE_AT_MAP)
const map: Record<string, number> = raw ? (JSON.parse(raw) as Record<string, number>) : {}
map[pubkey.trim().toLowerCase()] = atMs
window.localStorage.setItem(StorageKey.ACCOUNT_NETWORK_HYDRATE_AT_MAP, JSON.stringify(map))
} catch {
/* ignore quota / privacy mode */
}
}
}
const instance = new LocalStorageService()

Loading…
Cancel
Save