9 changed files with 902 additions and 1263 deletions
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,277 @@
@@ -0,0 +1,277 @@
|
||||
import { |
||||
batchFetchPublicationSectionEvents, |
||||
buildPublicationSectionRelayUrls, |
||||
publicationRefKey, |
||||
resolvePublicationEventIdToHex, |
||||
type PublicationSectionRef |
||||
} from '@/lib/publication-section-fetch' |
||||
import { generateBech32IdFromATag } from '@/lib/tag' |
||||
import { isReplaceableEvent } from '@/lib/event' |
||||
import client from '@/services/client.service' |
||||
import { eventService } from '@/services/client.service' |
||||
import indexedDb from '@/services/indexed-db.service' |
||||
import type { Event } from 'nostr-tools' |
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react' |
||||
|
||||
export type SectionLoadStatus = 'idle' | 'loading' | 'loaded' | 'error' |
||||
|
||||
export type PublicationSectionRow = { |
||||
ref: PublicationSectionRef |
||||
status: SectionLoadStatus |
||||
event?: Event |
||||
} |
||||
|
||||
function refKey(ref: PublicationSectionRef): string { |
||||
return publicationRefKey(ref) |
||||
} |
||||
|
||||
async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise<Map<string, Event>> { |
||||
const out = new Map<string, Event>() |
||||
for (const ref of refs) { |
||||
const key = refKey(ref) |
||||
if (!key) continue |
||||
try { |
||||
if (ref.type === 'a' && ref.coordinate) { |
||||
const ev = await indexedDb.getPublicationEvent(ref.coordinate) |
||||
if (ev) out.set(key, ev) |
||||
} else if (ref.type === 'e' && ref.eventId) { |
||||
const hex = resolvePublicationEventIdToHex(ref.eventId) |
||||
if (!hex) continue |
||||
let ev = await indexedDb.getEventFromPublicationStore(hex) |
||||
if (!ev && ref.kind != null && ref.pubkey && isReplaceableEvent(ref.kind)) { |
||||
const rep = await indexedDb.getReplaceableEvent(ref.pubkey, ref.kind) |
||||
if (rep && rep.id === hex) ev = rep |
||||
} |
||||
if (ev) out.set(key, ev) |
||||
} |
||||
} catch { |
||||
/* ignore per-ref */ |
||||
} |
||||
} |
||||
return out |
||||
} |
||||
|
||||
async function fetchSingleRefFallback(ref: PublicationSectionRef): Promise<Event | undefined> { |
||||
try { |
||||
if (ref.type === 'a' && ref.coordinate) { |
||||
const bech32 = generateBech32IdFromATag(['a', ref.coordinate, ref.relay || '', '']) |
||||
if (bech32) return await eventService.fetchEvent(bech32) |
||||
} |
||||
if (ref.type === 'e' && ref.eventId) { |
||||
return await eventService.fetchEvent(ref.eventId) |
||||
} |
||||
} catch { |
||||
/* ignore */ |
||||
} |
||||
return undefined |
||||
} |
||||
|
||||
/** |
||||
* Lazy publication sections: debounced batched REQ (chunked `ids` + grouped `authors`/`kinds`/`#d`), |
||||
* IndexedDB first, capped relay list. Call {@link requestKeys} from IntersectionObserver. |
||||
*/ |
||||
export function usePublicationSectionLoader(indexEvent: Event, referencesData: PublicationSectionRef[]) { |
||||
const orderedKeys = useMemo(() => { |
||||
const keys: string[] = [] |
||||
for (const r of referencesData) { |
||||
const k = refKey(r) |
||||
if (k) keys.push(k) |
||||
} |
||||
return keys |
||||
}, [referencesData]) |
||||
|
||||
const [rows, setRows] = useState<Map<string, PublicationSectionRow>>(() => new Map()) |
||||
const rowsRef = useRef(rows) |
||||
rowsRef.current = rows |
||||
|
||||
useEffect(() => { |
||||
const m = new Map<string, PublicationSectionRow>() |
||||
for (const ref of referencesData) { |
||||
const k = refKey(ref) |
||||
if (!k) continue |
||||
m.set(k, { ref, status: 'idle' }) |
||||
} |
||||
setRows(m) |
||||
}, [referencesData]) |
||||
|
||||
const relayUrlsRef = useRef<string[]>([]) |
||||
const [relayReady, setRelayReady] = useState(false) |
||||
|
||||
useEffect(() => { |
||||
let cancelled = false |
||||
void (async () => { |
||||
const urls = await buildPublicationSectionRelayUrls(indexEvent, referencesData) |
||||
if (cancelled) return |
||||
relayUrlsRef.current = urls |
||||
setRelayReady(true) |
||||
})() |
||||
return () => { |
||||
cancelled = true |
||||
} |
||||
}, [indexEvent, referencesData]) |
||||
|
||||
const pendingRef = useRef(new Set<string>()) |
||||
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null) |
||||
const flushInFlightRef = useRef(false) |
||||
|
||||
const runFlush = useCallback(async () => { |
||||
if (flushInFlightRef.current) return |
||||
const keys = [...pendingRef.current] |
||||
pendingRef.current.clear() |
||||
if (keys.length === 0) return |
||||
|
||||
flushInFlightRef.current = true |
||||
|
||||
try { |
||||
const snapshot = rowsRef.current |
||||
const refsToLoad: PublicationSectionRef[] = [] |
||||
for (const k of keys) { |
||||
const row = snapshot.get(k) |
||||
if (!row) continue |
||||
if (row.status === 'loaded' && row.event) continue |
||||
refsToLoad.push(row.ref) |
||||
} |
||||
|
||||
if (refsToLoad.length === 0) return |
||||
|
||||
setRows((prev) => { |
||||
const next = new Map(prev) |
||||
for (const ref of refsToLoad) { |
||||
const k = refKey(ref) |
||||
const row = next.get(k) |
||||
if (row) next.set(k, { ...row, status: 'loading' }) |
||||
} |
||||
return next |
||||
}) |
||||
|
||||
const urls = relayUrlsRef.current |
||||
const resolved = new Map<string, Event>() |
||||
|
||||
if (urls.length > 0) { |
||||
const fromDb = await hydrateRefsFromIndexedDb(refsToLoad) |
||||
for (const [k, ev] of fromDb) { |
||||
resolved.set(k, ev) |
||||
client.addEventToCache(ev) |
||||
} |
||||
|
||||
const stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r))) |
||||
if (stillNeed.length > 0) { |
||||
const fromNet = await batchFetchPublicationSectionEvents(stillNeed, urls) |
||||
for (const [k, ev] of fromNet) { |
||||
resolved.set(k, ev) |
||||
client.addEventToCache(ev) |
||||
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev) |
||||
} |
||||
} |
||||
} |
||||
|
||||
const missing = refsToLoad.filter((r) => !resolved.has(refKey(r))) |
||||
await Promise.all( |
||||
missing.map(async (ref) => { |
||||
const k = refKey(ref) |
||||
const ev = await fetchSingleRefFallback(ref) |
||||
if (ev) { |
||||
resolved.set(k, ev) |
||||
client.addEventToCache(ev) |
||||
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev) |
||||
} |
||||
}) |
||||
) |
||||
|
||||
setRows((prev) => { |
||||
const next = new Map(prev) |
||||
for (const ref of refsToLoad) { |
||||
const k = refKey(ref) |
||||
const row = next.get(k) |
||||
if (!row) continue |
||||
const ev = resolved.get(k) |
||||
if (ev) { |
||||
next.set(k, { ...row, event: ev, status: 'loaded' }) |
||||
} else { |
||||
next.set(k, { ...row, status: 'error', event: undefined }) |
||||
} |
||||
} |
||||
return next |
||||
}) |
||||
} finally { |
||||
flushInFlightRef.current = false |
||||
// While a batch was in flight, debounced runFlush() calls may have returned early
|
||||
// (flush lock). Drain any keys that accumulated so scroll-triggered sections still load.
|
||||
if (pendingRef.current.size > 0) { |
||||
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current) |
||||
debounceTimerRef.current = setTimeout(() => { |
||||
debounceTimerRef.current = null |
||||
void runFlush() |
||||
}, 0) |
||||
} |
||||
} |
||||
}, []) |
||||
|
||||
const requestKeys = useCallback( |
||||
(keys: string[]) => { |
||||
for (const k of keys) { |
||||
if (k) pendingRef.current.add(k) |
||||
} |
||||
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current) |
||||
debounceTimerRef.current = setTimeout(() => { |
||||
debounceTimerRef.current = null |
||||
void runFlush() |
||||
}, 56) |
||||
}, |
||||
[runFlush] |
||||
) |
||||
|
||||
useEffect(() => { |
||||
if (!relayReady || orderedKeys.length === 0) return |
||||
const n = Math.min(3, orderedKeys.length) |
||||
requestKeys(orderedKeys.slice(0, n)) |
||||
}, [relayReady, orderedKeys, requestKeys]) |
||||
|
||||
const failedKeys = useMemo( |
||||
() => [...rows.entries()].filter(([, v]) => v.status === 'error').map(([k]) => k), |
||||
[rows] |
||||
) |
||||
|
||||
const retryKeys = useCallback( |
||||
(keys: string[]) => { |
||||
setRows((prev) => { |
||||
const next = new Map(prev) |
||||
for (const k of keys) { |
||||
const row = next.get(k) |
||||
if (row) next.set(k, { ...row, status: 'idle', event: undefined }) |
||||
} |
||||
return next |
||||
}) |
||||
requestKeys(keys) |
||||
}, |
||||
[requestKeys] |
||||
) |
||||
|
||||
const referencesWithEvents = useMemo(() => { |
||||
return orderedKeys.map((k) => { |
||||
const row = rows.get(k) |
||||
const ref = row?.ref ?? referencesData.find((r) => refKey(r) === k)! |
||||
return { |
||||
type: ref.type, |
||||
coordinate: ref.coordinate, |
||||
eventId: ref.eventId, |
||||
kind: ref.kind, |
||||
pubkey: ref.pubkey, |
||||
identifier: ref.identifier, |
||||
relay: ref.relay, |
||||
event: row?.event, |
||||
loadStatus: row?.status ?? 'idle' |
||||
} |
||||
}) |
||||
}, [orderedKeys, rows, referencesData]) |
||||
|
||||
return { |
||||
orderedKeys, |
||||
rows, |
||||
relayReady, |
||||
requestKeys, |
||||
retryKeys, |
||||
failedKeys, |
||||
referencesWithEvents |
||||
} |
||||
} |
||||
@ -0,0 +1,29 @@
@@ -0,0 +1,29 @@
|
||||
/** Split `kind:64-hex-pubkey:d…` (d may contain `:`). */ |
||||
export function splitPublicationCoordinate(coordinate: string): { |
||||
kind: number |
||||
pubkey: string |
||||
d: string |
||||
} | null { |
||||
const trimmed = coordinate.trim() |
||||
const i0 = trimmed.indexOf(':') |
||||
const i1 = trimmed.indexOf(':', i0 + 1) |
||||
if (i0 < 1 || i1 <= i0 + 1) return null |
||||
const kind = parseInt(trimmed.slice(0, i0), 10) |
||||
if (Number.isNaN(kind)) return null |
||||
const pubkeyRaw = trimmed.slice(i0 + 1, i1) |
||||
if (!/^[0-9a-fA-F]{64}$/.test(pubkeyRaw)) return null |
||||
const pubkey = pubkeyRaw.toLowerCase() |
||||
const d = trimmed.slice(i1 + 1) |
||||
return { kind, pubkey, d } |
||||
} |
||||
|
||||
/** |
||||
* Coordinate strings to try when matching index `a` tags to events (NFC/NFD on `d` only). |
||||
* Relays filter `#d` on exact bytes; we still need flexible client-side matching after REQ. |
||||
*/ |
||||
export function publicationCoordinateLookupKeys(coordinate: string): string[] { |
||||
const p = splitPublicationCoordinate(coordinate) |
||||
if (!p) return [coordinate.trim()] |
||||
const ds = [...new Set([p.d, p.d.normalize('NFC'), p.d.normalize('NFD')])] |
||||
return [...new Set(ds.map((dt) => `${p.kind}:${p.pubkey}:${dt}`))] |
||||
} |
||||
@ -0,0 +1,215 @@
@@ -0,0 +1,215 @@
|
||||
import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate' |
||||
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder' |
||||
import { normalizeUrl } from '@/lib/url' |
||||
import client, { queryService } from '@/services/client.service' |
||||
import type { Event, Filter } from 'nostr-tools' |
||||
import { nip19 } from 'nostr-tools' |
||||
|
||||
/** Parsed a/e reference from publication index tags (same shape as PublicationIndex uses). */ |
||||
export type PublicationSectionRef = { |
||||
type: 'a' | 'e' |
||||
coordinate?: string |
||||
eventId?: string |
||||
kind?: number |
||||
pubkey?: string |
||||
identifier?: string |
||||
relay?: string |
||||
} |
||||
|
||||
export function publicationRefKey(ref: PublicationSectionRef): string { |
||||
return (ref.coordinate || ref.eventId || '').trim() |
||||
} |
||||
|
||||
/** |
||||
* Parse NIP-33 `a` coordinate `kind:64-hex-pubkey:d-identifier` where `d` may contain `:`. |
||||
* Returns a canonical coordinate with lowercase pubkey for cache / REQ / matching. |
||||
*/ |
||||
export function parsePublicationATagCoordinate(raw: string): { |
||||
kind: number |
||||
pubkey: string |
||||
identifier: string |
||||
coordinate: string |
||||
} | null { |
||||
const trimmed = raw.trim() |
||||
const i0 = trimmed.indexOf(':') |
||||
const i1 = trimmed.indexOf(':', i0 + 1) |
||||
if (i0 < 1 || i1 <= i0 + 1) return null |
||||
const kindStr = trimmed.slice(0, i0) |
||||
const pubkeyRaw = trimmed.slice(i0 + 1, i1) |
||||
const identifier = trimmed.slice(i1 + 1) |
||||
const kind = parseInt(kindStr, 10) |
||||
if (Number.isNaN(kind) || !/^[0-9a-fA-F]{64}$/.test(pubkeyRaw)) return null |
||||
const pubkey = pubkeyRaw.toLowerCase() |
||||
return { |
||||
kind, |
||||
pubkey, |
||||
identifier, |
||||
coordinate: `${kind}:${pubkey}:${identifier}` |
||||
} |
||||
} |
||||
|
||||
export function resolvePublicationEventIdToHex(eventId: string): string | undefined { |
||||
if (!eventId) return undefined |
||||
const trimmed = eventId.trim() |
||||
if (/^[0-9a-fA-F]{64}$/.test(trimmed)) return trimmed.toLowerCase() |
||||
try { |
||||
const decoded = nip19.decode(trimmed) |
||||
if (decoded.type === 'note') return decoded.data |
||||
if (decoded.type === 'nevent') return decoded.data.id |
||||
} catch { |
||||
/* ignore */ |
||||
} |
||||
return undefined |
||||
} |
||||
|
||||
function collectRelayHints(refs: PublicationSectionRef[]): string[] { |
||||
const out: string[] = [] |
||||
for (const r of refs) { |
||||
const h = r.relay?.trim() |
||||
if (h && (h.startsWith('wss://') || h.startsWith('ws://'))) { |
||||
const n = normalizeUrl(h) || h |
||||
out.push(n) |
||||
} |
||||
} |
||||
return out |
||||
} |
||||
|
||||
/** |
||||
* Focused relay set for publication sections: hints + author + user + profile/fast read, capped. |
||||
* Omits full SEARCHABLE list to avoid opening dozens of relays per publication. |
||||
*/ |
||||
export async function buildPublicationSectionRelayUrls( |
||||
indexEvent: Event, |
||||
refs: PublicationSectionRef[], |
||||
maxRelays = 22 |
||||
): Promise<string[]> { |
||||
const hints = collectRelayHints(refs) |
||||
const urls = await buildComprehensiveRelayList({ |
||||
authorPubkey: indexEvent.pubkey, |
||||
userPubkey: client.pubkey || undefined, |
||||
relayHints: hints, |
||||
includeUserOwnRelays: true, |
||||
includeProfileFetchRelays: true, |
||||
includeFastReadRelays: true, |
||||
includeSearchableRelays: false, |
||||
includeFavoriteRelays: true, |
||||
includeLocalRelays: true |
||||
}) |
||||
return urls.slice(0, maxRelays) |
||||
} |
||||
|
||||
const IDS_CHUNK = 44 |
||||
const D_TAGS_CHUNK = 28 |
||||
|
||||
function coordinateFromEvent(ev: Event): string { |
||||
const d = ev.tags.find((t) => t[0] === 'd')?.[1] ?? '' |
||||
return `${ev.kind}:${ev.pubkey.toLowerCase()}:${d}` |
||||
} |
||||
|
||||
/** |
||||
* One batched query: chunk `ids` filters and grouped `authors + kinds + #d` filters. |
||||
* Caller should hydrate from IndexedDB first. Keys are {@link publicationRefKey}. |
||||
*/ |
||||
export async function batchFetchPublicationSectionEvents( |
||||
refs: PublicationSectionRef[], |
||||
relayUrls: string[] |
||||
): Promise<Map<string, Event>> { |
||||
const out = new Map<string, Event>() |
||||
if (refs.length === 0 || relayUrls.length === 0) return out |
||||
|
||||
const idRefs: PublicationSectionRef[] = [] |
||||
const hexByKey = new Map<string, string>() |
||||
for (const r of refs) { |
||||
if (r.type !== 'e' || !r.eventId) continue |
||||
const key = publicationRefKey(r) |
||||
if (!key) continue |
||||
const hex = resolvePublicationEventIdToHex(r.eventId) |
||||
if (hex) { |
||||
idRefs.push(r) |
||||
hexByKey.set(key, hex) |
||||
} |
||||
} |
||||
|
||||
const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && r.kind != null) |
||||
const aGroups = new Map<string, { pubkey: string; kind: number; dTags: string[] }>() |
||||
for (const r of aRefs) { |
||||
const idf = r.identifier ?? r.coordinate!.split(':').slice(2).join(':') |
||||
if (!idf) continue |
||||
const gk = `${r.pubkey}:${r.kind}` |
||||
let g = aGroups.get(gk) |
||||
if (!g) { |
||||
g = { pubkey: r.pubkey!, kind: r.kind!, dTags: [] } |
||||
aGroups.set(gk, g) |
||||
} |
||||
g.dTags.push(idf) |
||||
} |
||||
|
||||
const filters: Filter[] = [] |
||||
|
||||
const hexList = [...new Set([...hexByKey.values()])].filter((id) => /^[0-9a-f]{64}$/.test(id)) |
||||
for (let i = 0; i < hexList.length; i += IDS_CHUNK) { |
||||
const chunk = hexList.slice(i, i + IDS_CHUNK) |
||||
filters.push({ ids: chunk, limit: chunk.length }) |
||||
} |
||||
|
||||
for (const g of aGroups.values()) { |
||||
const uniqueD = [...new Set(g.dTags)] |
||||
for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) { |
||||
const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK) |
||||
filters.push({ |
||||
authors: [g.pubkey.toLowerCase()], |
||||
kinds: [g.kind], |
||||
'#d': dChunk, |
||||
limit: dChunk.length |
||||
}) |
||||
} |
||||
} |
||||
|
||||
if (filters.length === 0) return out |
||||
|
||||
let events: Event[] = [] |
||||
try { |
||||
events = await queryService.fetchEvents(relayUrls, filters, { |
||||
globalTimeout: 14_000, |
||||
eoseTimeout: 2_500, |
||||
/** Do not early-resolve after the first event; this query must wait for the full batch. */ |
||||
firstRelayResultGraceMs: false |
||||
}) |
||||
} catch { |
||||
return out |
||||
} |
||||
|
||||
const byId = new Map<string, Event>() |
||||
const byCoord = new Map<string, Event>() |
||||
for (const ev of events) { |
||||
byId.set(ev.id.toLowerCase(), ev) |
||||
const d = ev.tags.find((t) => t[0] === 'd')?.[1] |
||||
if (d !== undefined && d !== '') { |
||||
const base = coordinateFromEvent(ev) |
||||
for (const k of publicationCoordinateLookupKeys(base)) { |
||||
if (!byCoord.has(k)) byCoord.set(k, ev) |
||||
} |
||||
} |
||||
} |
||||
|
||||
for (const r of idRefs) { |
||||
const key = publicationRefKey(r) |
||||
const hex = hexByKey.get(key) |
||||
if (!hex) continue |
||||
const ev = byId.get(hex.toLowerCase()) |
||||
if (ev) out.set(key, ev) |
||||
} |
||||
|
||||
for (const r of aRefs) { |
||||
const key = publicationRefKey(r) |
||||
const coord = r.coordinate! |
||||
let ev: Event | undefined |
||||
for (const k of publicationCoordinateLookupKeys(coord)) { |
||||
ev = byCoord.get(k) |
||||
if (ev) break |
||||
} |
||||
if (ev) out.set(key, ev) |
||||
} |
||||
|
||||
return out |
||||
} |
||||
Loading…
Reference in new issue