Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
d88d426e8c
  1. 271
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  2. 531
      src/hooks/usePublicationSectionLoader.ts
  3. 366
      src/lib/publication-section-fetch.ts

271
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
import { ExtendedKind } from '@/constants'
import { Event, kinds, nip19 } from 'nostr-tools'
import { useEffect, useMemo, useState, useCallback, useRef, type ReactNode } from 'react'
import { useEffect, useMemo, useState, useCallback } from 'react'
import { usePublicationSectionLoader } from '@/hooks/usePublicationSectionLoader'
import { parsePublicationATagCoordinate, publicationRefKey } from '@/lib/publication-section-fetch'
import { cn } from '@/lib/utils'
@ -78,72 +78,6 @@ function publicationSectionNotesLink(ref: { @@ -78,72 +78,6 @@ function publicationSectionNotesLink(ref: {
return null
}
const SECTION_IO_ROOT_MARGIN = '480px'
/**
* IntersectionObserver with `root: null` uses the browser viewport. Note / feed layouts scroll inside
* `overflow-y: auto` panels ({@link SecondaryPageLayout}, {@link PrimaryPageLayout}), so section
* placeholders never intersect the viewport while scrolling only the first prefetch batch loads.
*/
function findScrollPortRoot(from: HTMLElement | null): Element | null {
if (!from) return null
let el: HTMLElement | null = from.parentElement
while (el && el !== document.documentElement) {
const s = window.getComputedStyle(el)
if (s.overflowY === 'auto' || s.overflowY === 'scroll' || s.overflowY === 'overlay') {
return el
}
el = el.parentElement
}
return null
}
/** Request section payload when this block nears the visible scrollport (batched + debounced upstream). */
function PublicationSectionBoundary({
sectionKey,
requestKeys,
children
}: {
sectionKey: string
requestKeys: (keys: string[]) => void
children: ReactNode
}) {
const rootRef = useRef<HTMLDivElement>(null)
useEffect(() => {
if (!sectionKey) return
const el = rootRef.current
if (!el) return
let io: IntersectionObserver | null = null
let cancelled = false
const attach = () => {
if (cancelled) return
const scrollRoot = findScrollPortRoot(el)
io?.disconnect()
io = new IntersectionObserver(
(entries) => {
if (entries[0]?.isIntersecting) requestKeys([sectionKey])
},
{ root: scrollRoot, rootMargin: SECTION_IO_ROOT_MARGIN, threshold: 0 }
)
io.observe(el)
}
attach()
const raf = requestAnimationFrame(() => {
requestAnimationFrame(attach)
})
return () => {
cancelled = true
cancelAnimationFrame(raf)
io?.disconnect()
}
}, [sectionKey, requestKeys])
return <div ref={rootRef}>{children}</div>
}
export default function PublicationIndex({
event,
className,
@ -206,8 +140,7 @@ export default function PublicationIndex({ @@ -206,8 +140,7 @@ export default function PublicationIndex({
kind: parsed.kind,
pubkey: parsed.pubkey,
identifier: parsed.identifier,
relay: tag[2],
eventId: tag[3]
relay: tag[2]
})
}
} else if (tag[0] === 'e' && tag[1]) {
@ -222,7 +155,7 @@ export default function PublicationIndex({ @@ -222,7 +155,7 @@ export default function PublicationIndex({
return refs
}, [event])
const { requestKeys, retryKeys, failedKeys, referencesWithEvents } =
const { retryKeys, failedKeys, referencesWithEvents } =
usePublicationSectionLoader(event, referencesData)
// Helper function to format bookstr titles (remove hyphens, title case)
@ -527,13 +460,12 @@ export default function PublicationIndex({ @@ -527,13 +460,12 @@ export default function PublicationIndex({
</div>
)}
{/* Failed sections banner — batched fetch missed some payloads */}
{/* Failed sections banner */}
{!isNested && failedKeys.length > 0 && referencesWithEvents.length > 0 && (
<div className="p-4 border rounded-lg bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800">
<div className="flex items-center justify-between gap-4">
<div className="text-sm text-yellow-800 dark:text-yellow-200">
{failedKeys.length} section{failedKeys.length !== 1 ? 's' : ''} failed to load. Scroll near a
section or retry all.
{failedKeys.length} section{failedKeys.length !== 1 ? 's' : ''} failed to load.
</div>
<Button
variant="outline"
@ -552,7 +484,7 @@ export default function PublicationIndex({ @@ -552,7 +484,7 @@ export default function PublicationIndex({
</div>
)}
{/* Sections: intersection-observer triggers debounced batch REQ; placeholders until loaded */}
{/* Sections */}
{referencesData.length === 0 ? (
<div className="p-6 border rounded-lg bg-muted/30 text-center text-sm text-muted-foreground">
This publication index has no linked sections.
@ -568,61 +500,50 @@ export default function PublicationIndex({ @@ -568,61 +500,50 @@ export default function PublicationIndex({
if (!ref.event) {
if (ref.loadStatus === 'error') {
return (
<PublicationSectionBoundary
key={sectionKey || index}
sectionKey={sectionKey}
requestKeys={requestKeys}
>
<div id={sectionId} className="scroll-mt-24 p-4 border rounded-lg bg-muted/50">
<div className="flex items-center justify-between gap-2">
<div className="text-sm text-muted-foreground">
Section {index + 1}: unable to load{' '}
{notesLink ? (
<a
href={notesLink}
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
push(notesLink)
}}
className="text-primary hover:underline cursor-pointer"
>
{coordinate || 'unknown'}
</a>
) : (
<span>{coordinate || 'unknown'}</span>
)}
</div>
<Button
variant="outline"
size="sm"
className="shrink-0"
onClick={() => retryKeys([sectionKey])}
>
<RefreshCw className="h-4 w-4" />
</Button>
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 p-4 border rounded-lg bg-muted/50">
<div className="flex items-center justify-between gap-2">
<div className="text-sm text-muted-foreground">
Section {index + 1}: unable to load{' '}
{notesLink ? (
<a
href={notesLink}
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
push(notesLink)
}}
className="text-primary hover:underline cursor-pointer"
>
{coordinate || 'unknown'}
</a>
) : (
<span>{coordinate || 'unknown'}</span>
)}
</div>
<Button
variant="outline"
size="sm"
className="shrink-0"
onClick={() => retryKeys([sectionKey])}
>
<RefreshCw className="h-4 w-4" />
</Button>
</div>
</PublicationSectionBoundary>
</div>
)
}
return (
<PublicationSectionBoundary
<div
key={sectionKey || index}
sectionKey={sectionKey}
requestKeys={requestKeys}
id={sectionId}
className="scroll-mt-24 rounded-lg border border-dashed p-6 bg-muted/20 space-y-3"
aria-busy
>
<div
id={sectionId}
className="scroll-mt-24 rounded-lg border border-dashed p-6 bg-muted/20 space-y-3"
aria-busy
>
<Skeleton className="h-5 w-2/3 max-w-md" />
<Skeleton className="h-28 w-full" />
<Skeleton className="h-28 w-full" />
</div>
</PublicationSectionBoundary>
<Skeleton className="h-5 w-2/3 max-w-md" />
<Skeleton className="h-28 w-full" />
<Skeleton className="h-28 w-full" />
</div>
)
}
@ -660,10 +581,11 @@ export default function PublicationIndex({ @@ -660,10 +581,11 @@ export default function PublicationIndex({
)
}
if (
const renderAsAsciidoc =
eventKind === ExtendedKind.PUBLICATION_CONTENT ||
eventKind === ExtendedKind.WIKI_ARTICLE
) {
if (renderAsAsciidoc) {
return (
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 pt-6 relative">
<div className="absolute top-0 right-0 flex items-center gap-2">
@ -690,90 +612,29 @@ export default function PublicationIndex({ @@ -690,90 +612,29 @@ export default function PublicationIndex({
)
}
if (eventKind === ExtendedKind.WIKI_ARTICLE_MARKDOWN) {
return (
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 pt-6 relative">
<div className="absolute top-0 right-0 flex items-center gap-2">
{!isNested && (
<Button
variant="ghost"
size="sm"
className="opacity-70 hover:opacity-100"
onClick={scrollToToc}
title="Back to Table of Contents"
>
<ArrowUp className="h-4 w-4 mr-2" />
ToC
</Button>
)}
<NoteOptions event={ref.event} />
</div>
<MarkdownArticle
event={ref.event}
hideMetadata={true}
parentImageUrl={effectiveParentImageUrl}
/>
</div>
)
}
// NIP-23 long-form (30023): same markdown body path as standalone note view
if (eventKind === kinds.LongFormArticle) {
return (
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 pt-6 relative">
<div className="absolute top-0 right-0 flex items-center gap-2">
{!isNested && (
<Button
variant="ghost"
size="sm"
className="opacity-70 hover:opacity-100"
onClick={scrollToToc}
title="Back to Table of Contents"
>
<ArrowUp className="h-4 w-4 mr-2" />
ToC
</Button>
)}
<NoteOptions event={ref.event} />
</div>
<MarkdownArticle
event={ref.event}
hideMetadata={true}
parentImageUrl={effectiveParentImageUrl}
/>
</div>
)
}
// Kind 1: plain text / markdown body like {@link Note}
if (eventKind === kinds.ShortTextNote) {
return (
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 pt-6 relative">
<div className="absolute top-0 right-0 flex items-center gap-2">
{!isNested && (
<Button
variant="ghost"
size="sm"
className="opacity-70 hover:opacity-100"
onClick={scrollToToc}
title="Back to Table of Contents"
>
<ArrowUp className="h-4 w-4 mr-2" />
ToC
</Button>
)}
<NoteOptions event={ref.event} />
</div>
<MarkdownArticle event={ref.event} hideMetadata={true} />
</div>
)
}
// All non-publication, non-AsciiDoc section kinds use markdown renderer.
return (
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 p-4 border rounded-lg">
<div className="text-sm text-muted-foreground">
Section {index + 1}: unsupported kind {eventKind}
<div key={sectionKey || index} id={sectionId} className="scroll-mt-24 pt-6 relative">
<div className="absolute top-0 right-0 flex items-center gap-2">
{!isNested && (
<Button
variant="ghost"
size="sm"
className="opacity-70 hover:opacity-100"
onClick={scrollToToc}
title="Back to Table of Contents"
>
<ArrowUp className="h-4 w-4 mr-2" />
ToC
</Button>
)}
<NoteOptions event={ref.event} />
</div>
<MarkdownArticle
event={ref.event}
hideMetadata={true}
parentImageUrl={effectiveParentImageUrl}
/>
</div>
)
})}

531
src/hooks/usePublicationSectionLoader.ts

@ -1,372 +1,295 @@ @@ -1,372 +1,295 @@
import logger from '@/lib/logger'
import {
batchFetchPublicationSectionEvents,
buildPublicationSectionRelayUrls,
parsePublicationATagCoordinate,
publicationRefKey,
resolvePublicationEventIdToHex,
type PublicationSectionRef
} from '@/lib/publication-section-fetch'
import { generateBech32IdFromATag } from '@/lib/tag'
import { isReplaceableEvent } from '@/lib/event'
import client from '@/services/client.service'
import { eventService } from '@/services/client.service'
import logger from '@/lib/logger'
import { eventService, queryService } from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import type { Event } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
const PUB_SEC_LOG = '[PublicationSection]'
const SINGLE_REF_FALLBACK_TIMEOUT_MS = 7000
function pubLog(message: string, data?: Record<string, unknown>) {
if (!import.meta.env.DEV) return
if (data) logger.info(`${PUB_SEC_LOG} ${message}`, data)
else logger.info(`${PUB_SEC_LOG} ${message}`)
}
export type SectionLoadStatus = 'idle' | 'loading' | 'loaded' | 'error'
type LoadStatus = 'idle' | 'loading' | 'loaded' | 'error'
export type PublicationSectionRow = {
ref: PublicationSectionRef
status: SectionLoadStatus
type Row = PublicationSectionRef & {
key: string
event?: Event
status: LoadStatus
}
function refKey(ref: PublicationSectionRef): string {
return publicationRefKey(ref)
type CachedState = {
loaded: Map<string, Event>
failed: Set<string>
}
async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise<Map<string, Event>> {
const out = new Map<string, Event>()
for (const ref of refs) {
const key = refKey(ref)
if (!key) continue
try {
if (ref.type === 'a' && ref.coordinate) {
const ev = await indexedDb.getPublicationEvent(ref.coordinate)
if (ev) out.set(key, ev)
} else if (ref.type === 'e' && ref.eventId) {
const hex = resolvePublicationEventIdToHex(ref.eventId)
if (!hex) continue
let ev = await indexedDb.getEventFromPublicationStore(hex)
if (!ev && ref.kind != null && ref.pubkey && isReplaceableEvent(ref.kind)) {
const rep = await indexedDb.getReplaceableEvent(ref.pubkey, ref.kind)
if (rep && rep.id === hex) ev = rep
}
if (ev) out.set(key, ev)
const indexCache = new Map<string, CachedState>()
const SINGLE_REF_TIMEOUT_MS = 6_000
function withTimeout<T>(p: Promise<T>, ms: number): Promise<T> {
return new Promise<T>((resolve, reject) => {
const timer = window.setTimeout(() => reject(new Error('timeout')), ms)
p.then(
(v) => {
clearTimeout(timer)
resolve(v)
},
(err) => {
clearTimeout(timer)
reject(err)
}
} catch {
/* ignore per-ref */
}
}
return out
)
})
}
async function fetchSingleRefFallback(ref: PublicationSectionRef): Promise<Event | undefined> {
const withTimeout = <T,>(p: Promise<T>, ms: number): Promise<T | undefined> =>
new Promise((resolve) => {
const t = setTimeout(() => resolve(undefined), ms)
p.then((v) => resolve(v)).catch(() => resolve(undefined)).finally(() => clearTimeout(t))
})
try {
if (ref.type === 'a' && ref.coordinate) {
const bech32 = generateBech32IdFromATag(['a', ref.coordinate, ref.relay || '', ''])
if (bech32) return await withTimeout(eventService.fetchEvent(bech32), SINGLE_REF_FALLBACK_TIMEOUT_MS)
}
if (ref.type === 'e' && ref.eventId) {
return await withTimeout(eventService.fetchEvent(ref.eventId), SINGLE_REF_FALLBACK_TIMEOUT_MS)
}
} catch {
/* ignore */
}
return undefined
function signatureOfRefs(refs: PublicationSectionRef[]): string {
return refs.map((r) => publicationRefKey(r)).join('|')
}
/**
* Lazy publication sections: debounced batched REQ (chunked `ids` + grouped `authors`/`kinds`/`#d`),
* IndexedDB first, capped relay list. Call {@link requestKeys} from IntersectionObserver.
*/
export function usePublicationSectionLoader(indexEvent: Event, referencesData: PublicationSectionRef[]) {
const orderedKeys = useMemo(() => {
const keys: string[] = []
for (const r of referencesData) {
const k = refKey(r)
if (k) keys.push(k)
}
return keys
}, [referencesData])
const orderedKeysSignature = useMemo(() => orderedKeys.join('|'), [orderedKeys])
const [rows, setRows] = useState<Map<string, PublicationSectionRow>>(() => new Map())
const rowsRef = useRef(rows)
rowsRef.current = rows
export function usePublicationSectionLoader(indexEvent: Event, refs: PublicationSectionRef[]) {
const indexId = indexEvent.id
const refsSignature = useMemo(() => signatureOfRefs(refs), [refs])
const [relayUrls, setRelayUrls] = useState<string[]>([])
const [rows, setRows] = useState<Row[]>([])
const inflightKeysRef = useRef<Set<string>>(new Set())
const autoLoadedSignatureRef = useRef<string | null>(null)
useEffect(() => {
// Preserve per-key load state across rerenders to avoid reinitializing rows to idle
// when parent components recreate reference objects.
setRows((prev) => {
const next = new Map<string, PublicationSectionRow>()
for (const ref of referencesData) {
const k = refKey(ref)
if (!k) continue
const existing = prev.get(k)
if (existing) {
next.set(k, { ...existing, ref })
} else {
next.set(k, { ref, status: 'idle' })
}
const cached = indexCache.get(indexId) ?? { loaded: new Map(), failed: new Set() }
const next: Row[] = []
for (const ref of refs) {
const key = publicationRefKey(ref)
if (!key) continue
const cachedEvent = cached.loaded.get(key)
if (cachedEvent) {
next.push({ ...ref, key, event: cachedEvent, status: 'loaded' })
continue
}
return next
})
}, [orderedKeysSignature, referencesData])
const relayUrlsRef = useRef<string[]>([])
const searchableRelayUrlsRef = useRef<string[]>([])
const [relayReady, setRelayReady] = useState(false)
if (cached.failed.has(key)) {
next.push({ ...ref, key, status: 'error' })
continue
}
next.push({ ...ref, key, status: 'idle' })
}
setRows(next)
}, [indexId, refsSignature, refs])
useEffect(() => {
let cancelled = false
void (async () => {
const [urls, searchableUrls] = await Promise.all([
buildPublicationSectionRelayUrls(indexEvent, referencesData, 22, false),
buildPublicationSectionRelayUrls(indexEvent, referencesData, 40, true)
])
;(async () => {
const primary = await buildPublicationSectionRelayUrls(indexEvent, refs, 22, false)
if (cancelled) return
if (primary.length > 0) {
setRelayUrls(primary)
return
}
const fallback = await buildPublicationSectionRelayUrls(indexEvent, refs, 30, true)
if (cancelled) return
relayUrlsRef.current = urls
searchableRelayUrlsRef.current = searchableUrls
setRelayReady(true)
})()
setRelayUrls(fallback)
})().catch((err) => {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] relay_build_failed', {
indexId,
message: err instanceof Error ? err.message : String(err)
})
}
if (!cancelled) setRelayUrls([])
})
return () => {
cancelled = true
}
}, [indexEvent.id, orderedKeysSignature])
const pendingRef = useRef(new Set<string>())
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const flushInFlightRef = useRef(false)
const runFlush = useCallback(async () => {
if (flushInFlightRef.current) return
const keys = [...pendingRef.current]
pendingRef.current.clear()
if (keys.length === 0) return
flushInFlightRef.current = true
try {
const snapshot = rowsRef.current
const refsToLoad: PublicationSectionRef[] = []
for (const k of keys) {
const row = snapshot.get(k)
if (!row) continue
// Auto-queue should only process idle rows.
// - loaded rows are done
// - loading rows are already in-flight
// - error rows require explicit retry via retry button
if (row.status !== 'idle') continue
refsToLoad.push(row.ref)
}, [indexId, refsSignature, indexEvent, refs])
const applyLoadedAndFailed = useCallback(
(loaded: Map<string, Event>, failedKeys: string[]) => {
const cached = indexCache.get(indexId) ?? { loaded: new Map(), failed: new Set() }
for (const [k, ev] of loaded) {
cached.loaded.set(k, ev)
cached.failed.delete(k)
}
if (refsToLoad.length === 0) return
pubLog('flush_start', {
keys: refsToLoad.map((r) => refKey(r)),
relayCount: relayUrlsRef.current.length
})
setRows((prev) => {
const next = new Map(prev)
for (const ref of refsToLoad) {
const k = refKey(ref)
const row = next.get(k)
if (row) next.set(k, { ...row, status: 'loading' })
}
return next
})
const urls = relayUrlsRef.current
const resolved = new Map<string, Event>()
// Always hydrate from IDB — do not gate on relay URLs (they resolve async after first IO batch).
const fromDb = await hydrateRefsFromIndexedDb(refsToLoad)
for (const [k, ev] of fromDb) {
resolved.set(k, ev)
client.addEventToCache(ev)
for (const k of failedKeys) {
if (!loaded.has(k)) cached.failed.add(k)
}
indexCache.set(indexId, cached)
setRows((prev) =>
prev.map((row) => {
const ev = loaded.get(row.key)
if (ev) return { ...row, event: ev, status: 'loaded' as const }
if (failedKeys.includes(row.key)) return { ...row, status: 'error' as const }
if (inflightKeysRef.current.has(row.key)) return { ...row, status: 'loading' as const }
return row
})
)
},
[indexId]
)
let stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r)))
pubLog('after_idb', {
fromDb: fromDb.size,
stillNeed: stillNeed.map((r) => ({ key: refKey(r), type: r.type }))
})
const runFetch = useCallback(
async (keys: string[]) => {
const selectedRows = rows.filter((r) => keys.includes(r.key))
if (selectedRows.length === 0) return
// No relay list yet: apply DB hits only, re-queue the rest (do not mark error).
if (urls.length === 0 && stillNeed.length > 0) {
for (const r of stillNeed) pendingRef.current.add(refKey(r))
pubLog('defer_net_until_relays', { reQueued: stillNeed.length })
setRows((prev) => {
const next = new Map(prev)
for (const ref of refsToLoad) {
const k = refKey(ref)
const row = next.get(k)
if (!row) continue
const ev = resolved.get(k)
if (ev) next.set(k, { ...row, event: ev, status: 'loaded' })
else next.set(k, { ...row, status: 'idle', event: undefined })
const byDb = new Map<string, Event>()
const stillNeed: Row[] = []
await Promise.all(
selectedRows.map(async (row) => {
try {
let ev: Event | undefined
if (row.type === 'e' && row.eventId) {
const hex = resolvePublicationEventIdToHex(row.eventId)
if (hex) ev = await indexedDb.getEventFromPublicationStore(hex)
} else if (row.coordinate) {
ev = await indexedDb.getPublicationEvent(row.coordinate)
}
if (ev) byDb.set(row.key, ev)
else stillNeed.push(row)
} catch {
stillNeed.push(row)
}
return next
})
return
}
)
if (urls.length > 0 && stillNeed.length > 0) {
const fromNet = await batchFetchPublicationSectionEvents(stillNeed, urls)
pubLog('after_batch_fetch', { fromNet: fromNet.size })
for (const [k, ev] of fromNet) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
}
if (import.meta.env.DEV) {
logger.info('[PublicationSection] after_idb', {
fromDb: byDb.size,
stillNeed: stillNeed.map((r) => r.key)
})
}
stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r)))
if (stillNeed.length > 0) {
const searchableUrls = searchableRelayUrlsRef.current
const hasAdditionalSearchable = searchableUrls.some((u) => !urls.includes(u))
if (hasAdditionalSearchable) {
const fromSearchFallback = await batchFetchPublicationSectionEvents(stillNeed, searchableUrls)
pubLog('after_searchable_fallback', {
fromSearchFallback: fromSearchFallback.size,
stillNeedBefore: stillNeed.length,
relayCount: searchableUrls.length
})
for (const [k, ev] of fromSearchFallback) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
}
let fromNet = new Map<string, Event>()
if (stillNeed.length > 0 && relayUrls.length > 0) {
fromNet = await batchFetchPublicationSectionEvents(stillNeed, relayUrls)
if (import.meta.env.DEV) {
logger.info('[PublicationSection] after_batch_fetch', { fromNet: fromNet.size })
}
}
const missing = refsToLoad.filter((r) => !resolved.has(refKey(r)))
pubLog('before_fallback', {
missing: missing.map((r) => refKey(r)),
relayUrlsEmpty: urls.length === 0
})
const merged = new Map<string, Event>([...byDb, ...fromNet])
const unresolved = stillNeed.filter((r) => !merged.has(r.key))
const bySingle = new Map<string, Event>()
await Promise.all(
missing.map(async (ref) => {
const k = refKey(ref)
const ev = await fetchSingleRefFallback(ref)
if (ev) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
unresolved.map(async (row) => {
try {
if (row.type === 'e' && row.eventId) {
const ev = await withTimeout(
eventService.fetchEvent(row.eventId),
SINGLE_REF_TIMEOUT_MS
)
if (ev) bySingle.set(row.key, ev)
return
}
if (row.coordinate) {
const parsed = parsePublicationATagCoordinate(row.coordinate)
if (!parsed) return
const relaysToTry = row.relay ? [row.relay] : relayUrls
const ev = await withTimeout(
queryService
.fetchEvents(
relaysToTry,
{
authors: [parsed.pubkey],
kinds: [parsed.kind],
'#d': [parsed.identifier],
limit: 1
},
{
globalTimeout: 6_000,
eoseTimeout: 1_500
}
)
.then((arr) => arr[0]),
SINGLE_REF_TIMEOUT_MS
)
if (ev) bySingle.set(row.key, ev)
}
} catch {
// unresolved single-ref fallback
}
})
)
const failed = refsToLoad.filter((r) => !resolved.has(refKey(r)))
pubLog('flush_done', {
loaded: refsToLoad.length - failed.length,
failed: failed.map((r) => ({
key: refKey(r),
type: r.type,
coordinate: r.coordinate,
eventId: r.eventId
}))
})
for (const [k, ev] of bySingle) merged.set(k, ev)
setRows((prev) => {
const next = new Map(prev)
for (const ref of refsToLoad) {
const k = refKey(ref)
const row = next.get(k)
if (!row) continue
const ev = resolved.get(k)
if (ev) {
next.set(k, { ...row, event: ev, status: 'loaded' })
} else {
next.set(k, { ...row, status: 'error', event: undefined })
}
}
return next
})
} finally {
flushInFlightRef.current = false
// While a batch was in flight, debounced runFlush() calls may have returned early
// (flush lock). Drain any keys that accumulated so scroll-triggered sections still load.
// IMPORTANT: if relay URLs are not ready yet, do NOT spin in a tight retry loop.
// The relayReady effect will trigger requestKeys() once relays are available.
if (pendingRef.current.size > 0 && relayUrlsRef.current.length > 0) {
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current)
debounceTimerRef.current = setTimeout(() => {
debounceTimerRef.current = null
void runFlush()
}, 0)
}
}
}, [])
const failed = selectedRows
.map((r) => r.key)
.filter((k) => !merged.has(k))
const requestKeys = useCallback(
(keys: string[]) => {
for (const k of keys) {
if (k) pendingRef.current.add(k)
}
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current)
debounceTimerRef.current = setTimeout(() => {
debounceTimerRef.current = null
void runFlush()
}, 56)
applyLoadedAndFailed(merged, failed)
},
[runFlush]
[applyLoadedAndFailed, relayUrls, rows]
)
useEffect(() => {
if (!relayReady || orderedKeys.length === 0) return
// Full list: scroll-IO may have fired before relays were ready; those keys were re-queued idle.
requestKeys(orderedKeys)
}, [relayReady, orderedKeysSignature, requestKeys])
const requestKeys = useCallback(
(keys: string[]) => {
const unique = [...new Set(keys.filter(Boolean))]
if (unique.length === 0) return
const eligible = rows.filter((r) => unique.includes(r.key) && r.status !== 'loaded' && r.status !== 'loading')
if (eligible.length === 0) return
const failedKeys = useMemo(
() => [...rows.entries()].filter(([, v]) => v.status === 'error').map(([k]) => k),
[rows]
const keysToLoad = eligible.map((r) => r.key)
for (const k of keysToLoad) inflightKeysRef.current.add(k)
setRows((prev) => prev.map((r) => (keysToLoad.includes(r.key) ? { ...r, status: 'loading' } : r)))
void runFetch(keysToLoad).finally(() => {
for (const k of keysToLoad) inflightKeysRef.current.delete(k)
})
},
[rows, runFetch]
)
const retryKeys = useCallback(
(keys: string[]) => {
setRows((prev) => {
const next = new Map(prev)
for (const k of keys) {
const row = next.get(k)
if (row) next.set(k, { ...row, status: 'idle', event: undefined })
}
return next
})
requestKeys(keys)
const unique = [...new Set(keys.filter(Boolean))]
if (unique.length === 0) return
const cached = indexCache.get(indexId)
if (cached) {
for (const key of unique) cached.failed.delete(key)
}
setRows((prev) =>
prev.map((r) => (unique.includes(r.key) && r.status !== 'loaded' ? { ...r, status: 'idle' } : r))
)
requestKeys(unique)
},
[requestKeys]
[indexId, requestKeys]
)
const referencesWithEvents = useMemo(() => {
return orderedKeys.map((k) => {
const row = rows.get(k)
const ref = row?.ref ?? referencesData.find((r) => refKey(r) === k)!
return {
type: ref.type,
coordinate: ref.coordinate,
eventId: ref.eventId,
kind: ref.kind,
pubkey: ref.pubkey,
identifier: ref.identifier,
relay: ref.relay,
event: row?.event,
loadStatus: row?.status ?? 'idle'
useEffect(() => {
if (relayUrls.length === 0) return
const sig = `${indexId}:${refsSignature}`
if (autoLoadedSignatureRef.current === sig) return
autoLoadedSignatureRef.current = sig
const idleKeys = rows.filter((r) => r.status === 'idle').map((r) => r.key)
if (idleKeys.length > 0) {
if (import.meta.env.DEV) {
logger.info('[PublicationSection] flush_start', { keys: idleKeys, relayCount: relayUrls.length })
}
})
}, [orderedKeys, rows, referencesData])
requestKeys(idleKeys)
}
}, [indexId, refsSignature, relayUrls, rows, requestKeys])
const referencesWithEvents = useMemo(
() =>
rows.map((row) => ({
...row,
loadStatus: row.status
})),
[rows]
)
const failedKeys = useMemo(
() =>
rows
.filter((r) => r.status === 'error')
.map((r) => r.key),
[rows]
)
return {
orderedKeys,
rows,
relayReady,
requestKeys,
retryKeys,
failedKeys,

366
src/lib/publication-section-fetch.ts

@ -1,14 +1,11 @@ @@ -1,14 +1,11 @@
import logger from '@/lib/logger'
import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate'
import { publicationCoordinateLookupKeys, splitPublicationCoordinate } from '@/lib/publication-coordinate'
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder'
import { normalizeUrl } from '@/lib/url'
import client, { queryService } from '@/services/client.service'
import { ExtendedKind } from '@/constants'
import type { Event, Filter } from 'nostr-tools'
import { nip19 } from 'nostr-tools'
import { kinds } from 'nostr-tools'
/** Parsed a/e reference from publication index tags (same shape as PublicationIndex uses). */
export type PublicationSectionRef = {
type: 'a' | 'e'
coordinate?: string
@ -23,64 +20,48 @@ export function publicationRefKey(ref: PublicationSectionRef): string { @@ -23,64 +20,48 @@ export function publicationRefKey(ref: PublicationSectionRef): string {
return (ref.coordinate || ref.eventId || '').trim()
}
/**
* Parse NIP-33 `a` coordinate `kind:64-hex-pubkey:d-identifier` where `d` may contain `:`.
* Returns a canonical coordinate with lowercase pubkey for cache / REQ / matching.
*/
export function parsePublicationATagCoordinate(raw: string): {
kind: number
pubkey: string
identifier: string
coordinate: string
} | null {
const trimmed = raw.trim()
const i0 = trimmed.indexOf(':')
const i1 = trimmed.indexOf(':', i0 + 1)
if (i0 < 1 || i1 <= i0 + 1) return null
const kindStr = trimmed.slice(0, i0)
const pubkeyRaw = trimmed.slice(i0 + 1, i1)
const identifier = trimmed.slice(i1 + 1)
const kind = parseInt(kindStr, 10)
if (Number.isNaN(kind) || !/^[0-9a-fA-F]{64}$/.test(pubkeyRaw)) return null
const pubkey = pubkeyRaw.toLowerCase()
const parsed = splitPublicationCoordinate(raw)
if (!parsed) return null
return {
kind,
pubkey,
identifier,
coordinate: `${kind}:${pubkey}:${identifier}`
kind: parsed.kind,
pubkey: parsed.pubkey,
identifier: parsed.d,
coordinate: `${parsed.kind}:${parsed.pubkey}:${parsed.d}`
}
}
export function resolvePublicationEventIdToHex(eventId: string): string | undefined {
if (!eventId) return undefined
const trimmed = eventId.trim()
if (!trimmed) return undefined
if (/^[0-9a-fA-F]{64}$/.test(trimmed)) return trimmed.toLowerCase()
try {
const decoded = nip19.decode(trimmed)
if (decoded.type === 'note') return decoded.data
if (decoded.type === 'nevent') return decoded.data.id
} catch {
/* ignore */
// ignore malformed bech32 ids
}
return undefined
}
function collectRelayHints(refs: PublicationSectionRef[]): string[] {
const out: string[] = []
for (const r of refs) {
const h = r.relay?.trim()
if (h && (h.startsWith('wss://') || h.startsWith('ws://'))) {
const n = normalizeUrl(h) || h
out.push(n)
}
for (const ref of refs) {
const relay = ref.relay?.trim()
if (!relay) continue
if (!relay.startsWith('wss://') && !relay.startsWith('ws://')) continue
const normalized = normalizeUrl(relay) || relay
out.push(normalized)
}
return out
return [...new Set(out)]
}
/**
* Focused relay set for publication sections: hints + author + user + profile/fast read, capped.
* Omits full SEARCHABLE list to avoid opening dozens of relays per publication.
*/
export async function buildPublicationSectionRelayUrls(
indexEvent: Event,
refs: PublicationSectionRef[],
@ -99,28 +80,25 @@ export async function buildPublicationSectionRelayUrls( @@ -99,28 +80,25 @@ export async function buildPublicationSectionRelayUrls(
includeFavoriteRelays: true,
includeLocalRelays: true
})
return urls.slice(0, maxRelays)
const prioritized = [...new Set([...hints, ...urls])]
return prioritized.slice(0, maxRelays)
}
const IDS_CHUNK = 44
const D_TAGS_CHUNK = 28
const SECTION_KIND_FALLBACK_CANDIDATES = [
ExtendedKind.PUBLICATION_CONTENT, // 30041
ExtendedKind.WIKI_ARTICLE, // 30818
ExtendedKind.WIKI_ARTICLE_MARKDOWN, // 30817
kinds.LongFormArticle, // 30023
kinds.ShortTextNote // 1
] as number[]
const D_CHUNK = 28
const ANY_KIND_LIMIT_PER_D = 12
function dTagOf(ev: Event): string | undefined {
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
return d && d.length > 0 ? d : undefined
}
function coordinateFromEvent(ev: Event): string {
const d = ev.tags.find((t) => t[0] === 'd')?.[1] ?? ''
function coordinateOfEvent(ev: Event): string | null {
const d = dTagOf(ev)
if (!d) return null
return `${ev.kind}:${ev.pubkey.toLowerCase()}:${d}`
}
/**
* One batched query: chunk `ids` filters and grouped `authors + kinds + #d` filters.
* Caller should hydrate from IndexedDB first. Keys are {@link publicationRefKey}.
*/
export async function batchFetchPublicationSectionEvents(
refs: PublicationSectionRef[],
relayUrls: string[]
@ -128,45 +106,44 @@ export async function batchFetchPublicationSectionEvents( @@ -128,45 +106,44 @@ export async function batchFetchPublicationSectionEvents(
const out = new Map<string, Event>()
if (refs.length === 0 || relayUrls.length === 0) return out
const idRefs: PublicationSectionRef[] = []
const hexByKey = new Map<string, string>()
for (const r of refs) {
if (r.type !== 'e' || !r.eventId) continue
const key = publicationRefKey(r)
if (!key) continue
const hex = resolvePublicationEventIdToHex(r.eventId)
if (hex) {
idRefs.push(r)
hexByKey.set(key, hex)
}
}
const eRefs: PublicationSectionRef[] = []
const eHexByKey = new Map<string, string>()
const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && typeof r.kind === 'number')
const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && r.kind != null)
const aGroups = new Map<string, { pubkey: string; kind: number; dTags: string[] }>()
for (const r of aRefs) {
const idf = r.identifier ?? r.coordinate!.split(':').slice(2).join(':')
if (!idf) continue
const gk = `${r.pubkey}:${r.kind}`
let g = aGroups.get(gk)
if (!g) {
g = { pubkey: r.pubkey!, kind: r.kind!, dTags: [] }
aGroups.set(gk, g)
}
g.dTags.push(idf)
for (const ref of refs) {
if (ref.type !== 'e' || !ref.eventId) continue
const key = publicationRefKey(ref)
const hex = resolvePublicationEventIdToHex(ref.eventId)
if (!key || !hex) continue
eRefs.push(ref)
eHexByKey.set(key, hex)
}
const filters: Filter[] = []
const hexList = [...new Set([...hexByKey.values()])].filter((id) => /^[0-9a-f]{64}$/.test(id))
for (let i = 0; i < hexList.length; i += IDS_CHUNK) {
const chunk = hexList.slice(i, i + IDS_CHUNK)
const ids = [...new Set([...eHexByKey.values()])]
for (let i = 0; i < ids.length; i += IDS_CHUNK) {
const chunk = ids.slice(i, i + IDS_CHUNK)
filters.push({ ids: chunk, limit: chunk.length })
}
for (const g of aGroups.values()) {
const groupedA = new Map<string, { pubkey: string; kind: number; dTags: string[] }>()
for (const ref of aRefs) {
const d = ref.identifier ?? ref.coordinate!.split(':').slice(2).join(':')
if (!d) continue
const gk = `${ref.pubkey}:${ref.kind}`
let g = groupedA.get(gk)
if (!g) {
g = { pubkey: ref.pubkey!, kind: ref.kind!, dTags: [] }
groupedA.set(gk, g)
}
g.dTags.push(d)
}
for (const g of groupedA.values()) {
const uniqueD = [...new Set(g.dTags)]
for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK)
for (let i = 0; i < uniqueD.length; i += D_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_CHUNK)
filters.push({
authors: [g.pubkey.toLowerCase()],
kinds: [g.kind],
@ -176,87 +153,153 @@ export async function batchFetchPublicationSectionEvents( @@ -176,87 +153,153 @@ export async function batchFetchPublicationSectionEvents(
}
}
if (filters.length === 0) {
if (import.meta.env.DEV) {
logger.info('[PublicationSection] batch_fetch_skip — no filters', {
aRefCount: aRefs.length,
idRefCount: idRefs.length
})
}
return out
}
let events: Event[] = []
try {
events = await queryService.fetchEvents(relayUrls, filters, {
globalTimeout: 14_000,
eoseTimeout: 2_500,
/** Do not early-resolve after the first event; this query must wait for the full batch. */
firstRelayResultGraceMs: false
})
} catch (err) {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] batch_fetch_error', {
message: err instanceof Error ? err.message : String(err),
filterCount: filters.length,
relayCount: relayUrls.length
if (filters.length > 0) {
try {
events = await queryService.fetchEvents(relayUrls, filters, {
globalTimeout: 12_000,
eoseTimeout: 2_000,
firstRelayResultGraceMs: false
})
} catch (err) {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] batch_fetch_error', {
message: err instanceof Error ? err.message : String(err),
filterCount: filters.length,
relayCount: relayUrls.length
})
}
}
return out
}
const byId = new Map<string, Event>()
const byCoord = new Map<string, Event>()
for (const ev of events) {
byId.set(ev.id.toLowerCase(), ev)
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
if (d !== undefined && d !== '') {
const base = coordinateFromEvent(ev)
for (const k of publicationCoordinateLookupKeys(base)) {
if (!byCoord.has(k)) byCoord.set(k, ev)
}
const coord = coordinateOfEvent(ev)
if (!coord) continue
for (const key of publicationCoordinateLookupKeys(coord)) {
const prev = byCoord.get(key)
if (!prev || ev.created_at > prev.created_at) byCoord.set(key, ev)
}
}
for (const r of idRefs) {
const key = publicationRefKey(r)
const hex = hexByKey.get(key)
for (const ref of eRefs) {
const key = publicationRefKey(ref)
const hex = eHexByKey.get(key)
if (!hex) continue
const ev = byId.get(hex.toLowerCase())
const ev = byId.get(hex)
if (ev) out.set(key, ev)
}
for (const ref of aRefs) {
const key = publicationRefKey(ref)
if (out.has(key)) continue
const coord = ref.coordinate!
let ev: Event | undefined
for (const k of publicationCoordinateLookupKeys(coord)) {
ev = byCoord.get(k)
if (ev) break
}
if (ev) out.set(key, ev)
}
// Fallback for mismatched/legacy kind in `a` tags:
// retry unresolved refs by author + #d across common section kinds.
const unresolvedARefs = aRefs.filter((r) => !out.has(publicationRefKey(r)))
if (unresolvedARefs.length > 0) {
const fallbackGroups = new Map<string, { pubkey: string; dTags: string[] }>()
for (const r of unresolvedARefs) {
const pubkey = r.pubkey?.toLowerCase()
const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':')
if (!pubkey || !idf) continue
let g = fallbackGroups.get(pubkey)
// Relay-hint targeted pass for unresolved `a` refs.
const unresolvedAfterBatch = aRefs.filter((r) => !out.has(publicationRefKey(r)))
const byHintRelay = new Map<string, PublicationSectionRef[]>()
for (const ref of unresolvedAfterBatch) {
const relay = normalizeUrl(ref.relay || '') || ref.relay?.trim()
if (!relay) continue
const list = byHintRelay.get(relay)
if (list) list.push(ref)
else byHintRelay.set(relay, [ref])
}
for (const [relay, relayRefs] of byHintRelay) {
const hintFilters: Filter[] = []
const groups = new Map<string, { pubkey: string; kind: number; dTags: string[] }>()
for (const ref of relayRefs) {
const d = ref.identifier ?? ref.coordinate!.split(':').slice(2).join(':')
if (!d) continue
const gk = `${ref.pubkey}:${ref.kind}`
let g = groups.get(gk)
if (!g) {
g = { pubkey, dTags: [] }
fallbackGroups.set(pubkey, g)
g = { pubkey: ref.pubkey!.toLowerCase(), kind: ref.kind!, dTags: [] }
groups.set(gk, g)
}
g.dTags.push(d)
}
for (const g of groups.values()) {
const uniqueD = [...new Set(g.dTags)]
for (let i = 0; i < uniqueD.length; i += D_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_CHUNK)
hintFilters.push({
authors: [g.pubkey],
kinds: [g.kind],
'#d': dChunk,
limit: dChunk.length
})
}
g.dTags.push(idf)
}
if (hintFilters.length === 0) continue
try {
const hintEvents = await queryService.fetchEvents([relay], hintFilters, {
globalTimeout: 8_000,
eoseTimeout: 1_500,
firstRelayResultGraceMs: false
})
const hintByCoord = new Map<string, Event>()
for (const ev of hintEvents) {
const coord = coordinateOfEvent(ev)
if (!coord) continue
for (const key of publicationCoordinateLookupKeys(coord)) {
const prev = hintByCoord.get(key)
if (!prev || ev.created_at > prev.created_at) hintByCoord.set(key, ev)
}
}
for (const ref of relayRefs) {
const key = publicationRefKey(ref)
if (out.has(key)) continue
const coord = ref.coordinate!
let ev: Event | undefined
for (const k of publicationCoordinateLookupKeys(coord)) {
ev = hintByCoord.get(k)
if (ev) break
}
if (ev) out.set(key, ev)
}
} catch {
// ignore per-relay hint failures
}
}
// Last fallback: author + #d across any kind.
const unresolvedAfterHint = aRefs.filter((r) => !out.has(publicationRefKey(r)))
if (unresolvedAfterHint.length > 0) {
const fallbackFilters: Filter[] = []
for (const g of fallbackGroups.values()) {
const groups = new Map<string, { pubkey: string; dTags: string[] }>()
for (const ref of unresolvedAfterHint) {
const d = ref.identifier ?? ref.coordinate!.split(':').slice(2).join(':')
if (!d) continue
const pk = ref.pubkey!.toLowerCase()
let g = groups.get(pk)
if (!g) {
g = { pubkey: pk, dTags: [] }
groups.set(pk, g)
}
g.dTags.push(d)
}
for (const g of groups.values()) {
const uniqueD = [...new Set(g.dTags)]
for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK)
for (let i = 0; i < uniqueD.length; i += D_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_CHUNK)
fallbackFilters.push({
authors: [g.pubkey],
kinds: [...SECTION_KIND_FALLBACK_CANDIDATES],
'#d': dChunk,
limit: dChunk.length * SECTION_KIND_FALLBACK_CANDIDATES.length
limit: dChunk.length * ANY_KIND_LIMIT_PER_D
})
}
}
if (fallbackFilters.length > 0) {
try {
const fallbackEvents = await queryService.fetchEvents(relayUrls, fallbackFilters, {
@ -264,49 +307,38 @@ export async function batchFetchPublicationSectionEvents( @@ -264,49 +307,38 @@ export async function batchFetchPublicationSectionEvents(
eoseTimeout: 2_000,
firstRelayResultGraceMs: false
})
const byAuthorAndD = new Map<string, Event>()
const byAuthorD = new Map<string, Event[]>()
for (const ev of fallbackEvents) {
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
const d = dTagOf(ev)
if (!d) continue
const k = `${ev.pubkey.toLowerCase()}:${d}`
const prev = byAuthorAndD.get(k)
if (!prev || ev.created_at > prev.created_at) byAuthorAndD.set(k, ev)
const arr = byAuthorD.get(k)
if (arr) arr.push(ev)
else byAuthorD.set(k, [ev])
}
for (const r of unresolvedARefs) {
const key = publicationRefKey(r)
for (const ref of unresolvedAfterHint) {
const key = publicationRefKey(ref)
if (out.has(key)) continue
const pubkey = r.pubkey?.toLowerCase()
const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':')
if (!pubkey || !idf) continue
const ev = byAuthorAndD.get(`${pubkey}:${idf}`)
if (ev) out.set(key, ev)
}
} catch (err) {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] batch_fetch_fallback_error', {
message: err instanceof Error ? err.message : String(err),
filterCount: fallbackFilters.length,
relayCount: relayUrls.length
})
const d = ref.identifier ?? ref.coordinate!.split(':').slice(2).join(':')
const candidates = byAuthorD.get(`${ref.pubkey!.toLowerCase()}:${d}`)
if (!candidates || candidates.length === 0) continue
const preferred = candidates.filter((ev) => ev.kind === ref.kind)
const src = preferred.length > 0 ? preferred : candidates
let newest = src[0]
for (let i = 1; i < src.length; i++) {
if (src[i].created_at > newest.created_at) newest = src[i]
}
out.set(key, newest)
}
} catch {
// ignore fallback errors
}
}
}
for (const r of aRefs) {
const key = publicationRefKey(r)
const coord = r.coordinate!
let ev: Event | undefined
for (const k of publicationCoordinateLookupKeys(coord)) {
ev = byCoord.get(k)
if (ev) break
}
if (ev) out.set(key, ev)
}
if (import.meta.env.DEV) {
const unmatchedA = aRefs.filter((r) => !out.has(publicationRefKey(r)))
const unmatchedE = idRefs.filter((r) => !out.has(publicationRefKey(r)))
const unmatchedE = eRefs.filter((r) => !out.has(publicationRefKey(r)))
logger.info('[PublicationSection] batch_fetch_result', {
relayCount: relayUrls.length,
filterCount: filters.length,
@ -315,11 +347,7 @@ export async function batchFetchPublicationSectionEvents( @@ -315,11 +347,7 @@ export async function batchFetchPublicationSectionEvents(
resolved: out.size,
unmatchedACount: unmatchedA.length,
unmatchedECount: unmatchedE.length,
unmatchedAKeys: unmatchedA.map((r) => publicationRefKey(r)).slice(0, 12),
sampleEventCoords: events.slice(0, 3).map((ev) => {
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
return d !== undefined && d !== '' ? coordinateFromEvent(ev) : `${ev.kind}:${ev.pubkey.slice(0, 8)}`
})
unmatchedAKeys: unmatchedA.map((r) => publicationRefKey(r)).slice(0, 12)
})
}

Loading…
Cancel
Save