Browse Source

expand caching

imwald
Silberengel 1 month ago
parent
commit
bcf318c351
  1. 53
      src/components/LatestFromFollowsSection/index.tsx
  2. 92
      src/lib/search-follows-feed-cache.ts
  3. 78
      src/services/client-events.service.ts
  4. 16
      src/services/client-query.service.ts
  5. 30
      src/services/client-replaceable-events.service.ts
  6. 5
      src/services/client.service.ts

53
src/components/LatestFromFollowsSection/index.tsx

@ -3,6 +3,15 @@ import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/component @@ -3,6 +3,15 @@ import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/component
import { Skeleton } from '@/components/ui/skeleton'
import { ExtendedKind } from '@/constants'
import { buildFollowOutboxAggregateReadUrls } from '@/lib/follow-outbox-aggregate-relays'
import {
buildSearchFollowsFeedScopeKey,
fingerprintRelaySet,
fingerprintSortedPubkeys,
postsMapToRecord,
postsRecordToMap,
readSearchFollowsFeedCache,
writeSearchFollowsFeedCache
} from '@/lib/search-follows-feed-cache'
import { shouldFilterEvent } from '@/lib/event-filtering'
import { toProfile } from '@/lib/link'
import { getPubkeysFromPTags } from '@/lib/tag'
@ -127,6 +136,25 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa @@ -127,6 +136,25 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa
const [aggregateRelayUrls, setAggregateRelayUrls] = useState<string[]>([])
const [aggregateRelaysReady, setAggregateRelaysReady] = useState(false)
const followListFingerprint = useMemo(
() => fingerprintSortedPubkeys(followPubkeys),
[followPubkeys]
)
const aggregateRelayFingerprint = useMemo(
() => fingerprintRelaySet(aggregateRelayUrls),
[aggregateRelayUrls]
)
const followsFeedScopeKey = useMemo(
() =>
buildSearchFollowsFeedScopeKey({
mode: followsLabel,
viewerPubkey: pubkey?.toLowerCase() ?? null,
followListFingerprint,
aggregateRelayFingerprint
}),
[followsLabel, pubkey, followListFingerprint, aggregateRelayFingerprint]
)
const acceptEvent = useCallback(
(e: Event) => {
if (!feedKindSet.has(e.kind)) return false
@ -241,7 +269,18 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa @@ -241,7 +269,18 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa
const run = async () => {
setBatchBusy(true)
setPostsByPubkey(new Map())
const seed = readSearchFollowsFeedCache(followsFeedScopeKey)
let working = seed ? postsRecordToMap(seed.posts) : new Map<string, NostrEvent[]>()
setPostsByPubkey(new Map(working))
const persist = () => {
writeSearchFollowsFeedCache({
v: 1,
scopeKey: followsFeedScopeKey,
posts: postsMapToRecord(working),
savedAtMs: Date.now()
})
}
for (let i = 0; i < followPubkeys.length; i += AUTHORS_PER_BATCH) {
if (cancelled || abortedRef.current) break
@ -258,12 +297,17 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa @@ -258,12 +297,17 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa
)
if (cancelled || abortedRef.current) break
const filtered = raw.filter((e) => acceptEvent(e))
setPostsByPubkey((prev) => mergeBatchPosts(prev, filtered, batch))
working = mergeBatchPosts(working, filtered, batch)
setPostsByPubkey(new Map(working))
persist()
} catch (err) {
logger.warn('[LatestFromFollows] Batch fetch failed', { err, batchSize: batch.length })
}
}
if (!cancelled) setBatchBusy(false)
if (!cancelled) {
persist()
setBatchBusy(false)
}
}
void run()
@ -278,7 +322,8 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa @@ -278,7 +322,8 @@ export default function LatestFromFollowsSection({ defaultOpen = false }: { defa
aggregateRelaysReady,
loadingFollowList,
isInitialized,
acceptEvent
acceptEvent,
followsFeedScopeKey
])
const sortedRowPubkeys = useMemo(() => {

92
src/lib/search-follows-feed-cache.ts

@ -0,0 +1,92 @@ @@ -0,0 +1,92 @@
import logger from '@/lib/logger'
import { bytesToHex } from '@noble/hashes/utils'
import { sha256 } from '@noble/hashes/sha256'
import type { NostrEvent } from 'nostr-tools'
const STORAGE_KEY = 'jumble.searchFollowsFeed.v1'
/** Stay under typical 5MB localStorage budgets */
const MAX_JSON_CHARS = 4_000_000
export type SearchFollowsFeedCachePayloadV1 = {
v: 1
scopeKey: string
/** Hex pubkey → recent posts (same shape as in-memory map) */
posts: Record<string, NostrEvent[]>
savedAtMs: number
}
export function fingerprintSortedPubkeys(pubkeys: string[]): string {
if (pubkeys.length === 0) return '0'
const sorted = [...pubkeys].sort()
return bytesToHex(sha256(new TextEncoder().encode(sorted.join('\n'))))
}
export function fingerprintRelaySet(urls: string[]): string {
if (urls.length === 0) return '0'
return bytesToHex(sha256(new TextEncoder().encode(urls.join('\n'))))
}
export function buildSearchFollowsFeedScopeKey(input: {
mode: 'self' | 'recommended'
viewerPubkey: string | null
followListFingerprint: string
aggregateRelayFingerprint: string
}): string {
const v = input.viewerPubkey?.toLowerCase() ?? ''
return `${input.mode}|${v}|${input.followListFingerprint}|${input.aggregateRelayFingerprint}`
}
export function readSearchFollowsFeedCache(
scopeKey: string
): SearchFollowsFeedCachePayloadV1 | null {
try {
const raw = localStorage.getItem(STORAGE_KEY)
if (!raw || raw.length > MAX_JSON_CHARS) return null
const data = JSON.parse(raw) as unknown
if (!data || typeof data !== 'object') return null
const o = data as Record<string, unknown>
if (o.v !== 1 || o.scopeKey !== scopeKey) return null
if (typeof o.savedAtMs !== 'number' || typeof o.posts !== 'object' || o.posts === null) return null
const posts = o.posts as Record<string, unknown>
const out: Record<string, NostrEvent[]> = {}
for (const [pk, arr] of Object.entries(posts)) {
if (!Array.isArray(arr)) continue
const evs = arr.filter((x): x is NostrEvent => x && typeof x === 'object' && typeof (x as NostrEvent).id === 'string')
if (evs.length) out[pk] = evs
}
return { v: 1, scopeKey, posts: out, savedAtMs: o.savedAtMs }
} catch {
return null
}
}
export function writeSearchFollowsFeedCache(payload: SearchFollowsFeedCachePayloadV1): void {
try {
const json = JSON.stringify(payload)
if (json.length > MAX_JSON_CHARS) {
logger.debug('[SearchFollowsFeedCache] skip write (payload too large)', {
chars: json.length
})
return
}
localStorage.setItem(STORAGE_KEY, json)
} catch (e) {
logger.debug('[SearchFollowsFeedCache] write failed', { error: e })
}
}
export function postsMapToRecord(m: Map<string, NostrEvent[]>): Record<string, NostrEvent[]> {
const o: Record<string, NostrEvent[]> = {}
for (const [k, v] of m) {
if (v.length) o[k] = v
}
return o
}
export function postsRecordToMap(r: Record<string, NostrEvent[]>): Map<string, NostrEvent[]> {
const m = new Map<string, NostrEvent[]>()
for (const [k, v] of Object.entries(r)) {
if (Array.isArray(v) && v.length) m.set(k, v)
}
return m
}

78
src/services/client-events.service.ts

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
import logger from '@/lib/logger'
import type { Event as NEvent, Filter } from 'nostr-tools'
import { nip19 } from 'nostr-tools'
import { kinds, nip19 } from 'nostr-tools'
import DataLoader from 'dataloader'
import { LRUCache } from 'lru-cache'
import indexedDb from './indexed-db.service'
@ -37,7 +37,13 @@ const PREFETCH_HEX_IDS_CHUNK = 48 @@ -37,7 +37,13 @@ const PREFETCH_HEX_IDS_CHUNK = 48
export class EventService {
private queryService: QueryService
private eventCacheMap = new Map<string, Promise<NEvent | undefined>>()
private sessionEventCache = new LRUCache<string, NEvent>({ max: 500, ttl: 1000 * 60 * 30 })
/**
* In-memory session cache: events seen this tab session (timelines, queries, fetches).
* Larger cap + no TTL so navigation and repeat fetches reuse data until reload.
*/
private sessionEventCache = new LRUCache<string, NEvent>({ max: 15000 })
/** Latest kind-0 per pubkey from {@link sessionEventCache} for batch profile short-circuit. */
private sessionMetadataByPubkey = new Map<string, NEvent>()
/** Callbacks waiting for an event id to appear in {@link sessionEventCache} (e.g. embed loads before timeline caches the note). */
private sessionEventWaiters = new Map<string, Set<() => void>>()
private eventDataLoader: DataLoader<string, NEvent | undefined>
@ -329,9 +335,28 @@ export class EventService { @@ -329,9 +335,28 @@ export class EventService {
;(cleanEvent as NEvent).id = id
}
this.sessionEventCache.set(id, cleanEvent as NEvent)
if (cleanEvent.kind === kinds.Metadata) {
const pk = cleanEvent.pubkey.toLowerCase()
const prev = this.sessionMetadataByPubkey.get(pk)
if (!prev || cleanEvent.created_at >= prev.created_at) {
this.sessionMetadataByPubkey.set(pk, cleanEvent as NEvent)
}
}
this.notifySessionEventWaiters(id)
}
/** Kind 0 already ingested this session (e.g. from a timeline REQ). */
getSessionMetadataForPubkey(hexPubkey: string): NEvent | undefined {
const pk = hexPubkey.toLowerCase()
const e = this.sessionMetadataByPubkey.get(pk)
if (!e) return undefined
if (shouldDropEventOnIngest(e)) {
this.sessionMetadataByPubkey.delete(pk)
return undefined
}
return e
}
/**
* Get events from session cache matching search
*/
@ -392,6 +417,7 @@ export class EventService { @@ -392,6 +417,7 @@ export class EventService {
clearCaches(): void {
this.eventDataLoader.clearAll()
this.sessionEventCache.clear()
this.sessionMetadataByPubkey.clear()
this.eventCacheMap.clear()
this.sessionEventWaiters.clear()
this.fetchEventFromBigRelaysDataloader.clearAll()
@ -543,34 +569,46 @@ export class EventService { @@ -543,34 +569,46 @@ export class EventService {
* Uses same comprehensive list as single-event fetch (inboxes, fast read, searchable, cache).
*/
private async fetchEventsFromBigRelays(ids: readonly string[]): Promise<(NEvent | undefined)[]> {
const normalized = ids.map((id) => (/^[0-9a-f]{64}$/i.test(id) ? id.toLowerCase() : id))
const fromSession = normalized.map((k) => this.getSessionEventIfAllowed(k))
const missingIndices: number[] = []
for (let i = 0; i < normalized.length; i++) {
if (!fromSession[i]) missingIndices.push(i)
}
if (missingIndices.length === 0) {
return fromSession as NEvent[]
}
// Build comprehensive relay list (user's inboxes + defaults)
// Note: For batch fetches, we don't have author info, so we use user's inboxes + defaults
const relayUrls = await buildComprehensiveRelayListForEvents(undefined, [], [], [])
const isSingleEventFetch = ids.length === 1
const missingIds = missingIndices.map((i) => normalized[i]!)
const isSingleEventFetch = missingIds.length === 1
// For single-event fetches, always use immediateReturn to return ASAP
// This is especially important for non-replaceable events (not in 10000-19999 or 30000-39999 ranges)
const events = await this.queryService.query(relayUrls, {
ids: Array.from(new Set(ids)),
limit: ids.length
}, undefined, {
immediateReturn: isSingleEventFetch, // Return immediately when found
eoseTimeout: isSingleEventFetch ? 1500 : 500,
globalTimeout: isSingleEventFetch ? 12000 : 10000
})
const eventsMap = new Map<string, NEvent>()
const events = await this.queryService.query(
relayUrls,
{
ids: Array.from(new Set(missingIds)),
limit: missingIds.length
},
undefined,
{
immediateReturn: isSingleEventFetch,
eoseTimeout: isSingleEventFetch ? 1500 : 500,
globalTimeout: isSingleEventFetch ? 12000 : 10000
}
)
const fetchedById = new Map<string, NEvent>()
for (const event of events) {
if (shouldDropEventOnIngest(event)) continue
const key = /^[0-9a-f]{64}$/i.test(event.id) ? event.id.toLowerCase() : event.id
eventsMap.set(key, event)
// Note: We can't track which relay returned which event in batch queries,
// but events are still cached and will be found in future queries
fetchedById.set(key, event)
this.addEventToCache(event)
}
return ids.map((id) => {
const k = /^[0-9a-f]{64}$/i.test(id) ? id.toLowerCase() : id
return eventsMap.get(k)
})
return normalized.map((k, i) => fromSession[i] ?? fetchedById.get(k))
}
}

16
src/services/client-query.service.ts

@ -60,6 +60,8 @@ export class QueryService { @@ -60,6 +60,8 @@ export class QueryService {
private signerType?: TSignerType
private shouldSkipRelayForSession?: (normalizedUrl: string) => boolean
private onRelayConnectionFailure?: (normalizedUrl: string) => void
/** Optional: ingest every resolved `query()` result (e.g. session event LRU). */
private onQueryResultIngest?: (events: NEvent[]) => void
/** Max concurrent REQ subscriptions per relay URL */
private static readonly MAX_CONCURRENT_SUBS_PER_RELAY = MAX_CONCURRENT_RELAY_CONNECTIONS
@ -96,6 +98,11 @@ export class QueryService { @@ -96,6 +98,11 @@ export class QueryService {
this.onRelayConnectionFailure = relaySession?.onRelayConnectionFailure
}
/** Wire after {@link EventService} exists so all `query()` / `fetchEvents` results populate the session cache. */
setQueryResultIngest(handler: ((events: NEvent[]) => void) | undefined): void {
this.onQueryResultIngest = handler
}
setSigner(signer: ISigner | undefined, signerType: TSignerType | undefined) {
this.signer = signer
this.signerType = signerType
@ -245,11 +252,10 @@ export class QueryService { @@ -245,11 +252,10 @@ export class QueryService {
sub.close()
if (replaceableRace && events.length > 0) {
resolve(resolveReplaceableRaceEvents())
} else {
resolve(events)
}
const resolvedList =
replaceableRace && events.length > 0 ? resolveReplaceableRaceEvents() : events
this.onQueryResultIngest?.(resolvedList)
resolve(resolvedList)
}
const sub = this.subscribe(urls, filter, {

30
src/services/client-replaceable-events.service.ts

@ -22,6 +22,7 @@ import type { QueryService } from './client-query.service' @@ -22,6 +22,7 @@ import type { QueryService } from './client-query.service'
import logger from '@/lib/logger'
import client from './client.service'
import { buildComprehensiveRelayList, buildExploreProfileAndUserRelayList } from '@/lib/relay-list-builder'
import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter'
export class ReplaceableEventService {
/** Limits parallel Step 2/3 profile network work (relay list + wide metadata REQ). */
@ -430,23 +431,37 @@ export class ReplaceableEventService { @@ -430,23 +431,37 @@ export class ReplaceableEventService {
})
return results
}
const networkMissing: { pubkey: string; kind: number; index: number }[] = []
for (const m of missingParams) {
if (m.kind === kinds.Metadata) {
const ev = client.eventService.getSessionMetadataForPubkey(m.pubkey)
if (ev && !shouldDropEventOnIngest(ev)) {
results[m.index] = ev
eventsMap.set(`${m.pubkey}:${m.kind}`, ev)
continue
}
}
networkMissing.push(m)
}
if (networkMissing.length > 0) {
// Only log at info level for large batches
if (missingParams.length > 50) {
if (networkMissing.length > 50) {
logger.debug('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
missingCount: networkMissing.length,
totalCount: params.length
})
} else {
logger.debug('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
missingCount: networkMissing.length,
totalCount: params.length
})
}
// Group missing params by kind for network fetch
const missingGroups = new Map<number, { pubkey: string; index: number }[]>()
missingParams.forEach(({ pubkey, kind, index }) => {
networkMissing.forEach(({ pubkey, kind, index }) => {
if (!missingGroups.has(kind)) {
missingGroups.set(kind, [])
}
@ -621,6 +636,11 @@ export class ReplaceableEventService { @@ -621,6 +636,11 @@ export class ReplaceableEventService {
})
)
} else {
logger.debug('[ReplaceableEventService] All missing events resolved from session, skipping network fetch', {
totalCount: params.length
})
}
// Step 3: Persist hits only. Do not write negative cache rows (`value: null`) — optional kinds
// (e.g. 10432 cache relays, 10001 pins) are missing for most pubkeys and would flood IndexedDB.

5
src/services/client.service.ts

@ -156,6 +156,11 @@ class ClientService extends EventTarget { @@ -156,6 +156,11 @@ class ClientService extends EventTarget {
this.queryService,
(profileEvent) => this.addUsernameToIndex(profileEvent)
)
this.queryService.setQueryResultIngest((events) => {
for (const e of events) {
this.eventService.addEventToCache(e)
}
})
this.bookstrService = createBookstrService(this.queryService)
}

Loading…
Cancel
Save