Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
2564627985
  1. 22
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  2. 4
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  3. 145
      src/components/NoteOptions/useMenuActions.tsx
  4. 79
      src/hooks/usePublicationSectionLoader.ts
  5. 50
      src/lib/publication-rendered-events.ts
  6. 173
      src/lib/publication-section-fetch.ts

22
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -3084,6 +3084,17 @@ function parseMarkdownContentMarked( @@ -3084,6 +3084,17 @@ function parseMarkdownContentMarked(
if (/^https?:\/\/\S+$/i.test(line)) {
const cleaned = cleanUrl(line)
if (cleaned) {
if (isYouTubeUrl(cleaned)) {
return (
<div key={`${key}-line-youtube-${lineIdx}`} className="my-2">
<YoutubeEmbeddedPlayer
url={cleaned}
className="max-w-[400px]"
mustLoad={false}
/>
</div>
)
}
if (isVideo(cleaned) || isAudio(cleaned)) {
const poster = videoPosterMap?.get(cleaned)
return (
@ -3210,6 +3221,17 @@ function parseMarkdownContentMarked( @@ -3210,6 +3221,17 @@ function parseMarkdownContentMarked(
if (/^https?:\/\/\S+$/i.test(paragraphText)) {
const cleaned = cleanUrl(paragraphText)
if (cleaned) {
if (isYouTubeUrl(cleaned)) {
return (
<div key={`${key}-youtube-url`} className="my-2">
<YoutubeEmbeddedPlayer
url={cleaned}
className="max-w-[400px]"
mustLoad={false}
/>
</div>
)
}
if (isVideo(cleaned) || isAudio(cleaned)) {
const poster = videoPosterMap?.get(cleaned)
return (

4
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -17,6 +17,7 @@ import { extractBookMetadata } from '@/lib/bookstr-parser' @@ -17,6 +17,7 @@ import { extractBookMetadata } from '@/lib/bookstr-parser'
import { dTagToTitleCase } from '@/lib/event-metadata'
import Image from '@/components/Image'
import NoteOptions from '@/components/NoteOptions'
import { upsertRenderedPublicationEvents } from '@/lib/publication-rendered-events'
interface PublicationReference {
coordinate?: string
@ -329,6 +330,9 @@ export default function PublicationIndex({ @@ -329,6 +330,9 @@ export default function PublicationIndex({
const loaded = referencesWithEvents
.filter((r) => r.event)
.map((r) => r.event!)
if (loaded.length > 0) {
upsertRenderedPublicationEvents(event.id, loaded)
}
if (loaded.length === 0) return
const t = window.setTimeout(() => {
void indexedDb.putPublicationWithNestedEvents(event, loaded).catch((err) => {

145
src/components/NoteOptions/useMenuActions.tsx

@ -5,6 +5,12 @@ import { buildHiveTalkJoinUrl } from '@/lib/hivetalk' @@ -5,6 +5,12 @@ import { buildHiveTalkJoinUrl } from '@/lib/hivetalk'
import { toAlexandria } from '@/lib/link'
import logger from '@/lib/logger'
import { formatPubkey, pubkeyToNpub } from '@/lib/pubkey'
import {
batchFetchPublicationSectionEvents,
buildPublicationSectionRelayUrls,
parsePublicationATagCoordinate,
type PublicationSectionRef
} from '@/lib/publication-section-fetch'
import { normalizeUrl, simplifyUrl } from '@/lib/url'
import { speakNoteReadAloud } from '@/lib/read-aloud'
import {
@ -456,6 +462,136 @@ export function useMenuActions({ @@ -456,6 +462,136 @@ export function useMenuActions({
}, [isArticleType, event, dTag])
const menuActions: MenuAction[] = useMemo(() => {
const rebroadcastEntirePublication = () => {
const rootPublication = event
closeDrawer()
const promise = (async () => {
if (rootPublication.kind !== ExtendedKind.PUBLICATION) {
throw new Error(t('This action is only available for publications'))
}
if (allAvailableRelayUrls.length === 0) {
throw new Error(t('No relays available'))
}
const MAX_NESTED_PUBLICATIONS = 128
const MAX_TOTAL_REBROADCAST_EVENTS = 5000
const collectedById = new Map<string, Event>()
const visitedPublicationIds = new Set<string>()
const queue: Event[] = [rootPublication]
let traversedPublications = 0
while (queue.length > 0) {
const currentPublication = queue.shift()!
if (visitedPublicationIds.has(currentPublication.id)) continue
visitedPublicationIds.add(currentPublication.id)
traversedPublications++
collectedById.set(currentPublication.id, currentPublication)
if (traversedPublications > MAX_NESTED_PUBLICATIONS) {
logger.warn('[NoteOptions] Rebroadcast publication traversal capped', {
rootId: rootPublication.id,
cap: MAX_NESTED_PUBLICATIONS
})
break
}
const refs: PublicationSectionRef[] = []
for (const tag of currentPublication.tags) {
if (tag[0] === 'a' && tag[1]) {
const parsed = parsePublicationATagCoordinate(tag[1])
if (!parsed) continue
refs.push({
type: 'a',
coordinate: parsed.coordinate,
kind: parsed.kind,
pubkey: parsed.pubkey,
identifier: parsed.identifier,
relay: tag[2]
})
} else if (tag[0] === 'e' && tag[1]) {
refs.push({
type: 'e',
eventId: tag[1],
relay: tag[2]
})
}
}
if (refs.length === 0) continue
const primaryRelays = await buildPublicationSectionRelayUrls(currentPublication, refs, 40, false)
const fallbackRelays = await buildPublicationSectionRelayUrls(currentPublication, refs, 80, true)
const relays = [...new Set([...primaryRelays, ...fallbackRelays, ...allAvailableRelayUrls])]
const resolved = await batchFetchPublicationSectionEvents(refs, relays)
for (const ev of resolved.values()) {
if (collectedById.size >= MAX_TOTAL_REBROADCAST_EVENTS) break
collectedById.set(ev.id, ev)
if (ev.kind === ExtendedKind.PUBLICATION && !visitedPublicationIds.has(ev.id)) {
queue.push(ev)
}
}
if (collectedById.size >= MAX_TOTAL_REBROADCAST_EVENTS) {
logger.warn('[NoteOptions] Rebroadcast event collection capped', {
rootId: rootPublication.id,
cap: MAX_TOTAL_REBROADCAST_EVENTS
})
break
}
}
const uniqueEvents = [...collectedById.values()]
if (uniqueEvents.length === 0) {
throw new Error(t('No publication events found for rebroadcast'))
}
const BATCH_SIZE = 6
let acceptedEvents = 0
let failedEvents = 0
let acceptedRelayAcks = 0
for (let i = 0; i < uniqueEvents.length; i += BATCH_SIZE) {
const batch = uniqueEvents.slice(i, i + BATCH_SIZE)
const batchResults = await Promise.allSettled(
batch.map(async (ev) => {
const result = await client.publishEvent(allAvailableRelayUrls, ev)
if (result.successCount > 0) {
acceptedEvents++
acceptedRelayAcks += result.successCount
} else {
failedEvents++
}
})
)
for (const res of batchResults) {
if (res.status === 'rejected') failedEvents++
}
}
if (acceptedEvents < 1) {
throw new Error(t('No publication events were accepted by any relay'))
}
return {
acceptedEvents,
failedEvents,
totalEvents: uniqueEvents.length,
acceptedRelayAcks,
traversedPublications
}
})()
toastPublishPromise(promise, {
loading: t('Rebroadcasting entire publication...'),
success: () => t('Rebroadcasted entire publication'),
error: (err) => t('Failed to rebroadcast entire publication: {{error}}', { error: err.message })
})
}
// Export functions for articles
const exportAsMarkdown = () => {
if (!isArticleType) return
@ -780,6 +916,15 @@ export function useMenuActions({ @@ -780,6 +916,15 @@ export function useMenuActions({
})
}
}
if (event.kind === ExtendedKind.PUBLICATION) {
actions.push({
icon: SatelliteDish,
label: t('Rebroadcast entire publication'),
onClick: rebroadcastEntirePublication,
separator: true
})
}
}
const isProtected = isProtectedEvent(event)

79
src/hooks/usePublicationSectionLoader.ts

@ -52,6 +52,7 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -52,6 +52,7 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
const indexId = indexEvent.id
const refsSignature = useMemo(() => signatureOfRefs(refs), [refs])
const [relayUrls, setRelayUrls] = useState<string[]>([])
const [fallbackRelayUrls, setFallbackRelayUrls] = useState<string[]>([])
const [rows, setRows] = useState<Row[]>([])
const inflightKeysRef = useRef<Set<string>>(new Set())
const autoLoadedSignatureRef = useRef<string | null>(null)
@ -79,15 +80,29 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -79,15 +80,29 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
useEffect(() => {
let cancelled = false
;(async () => {
const primary = await buildPublicationSectionRelayUrls(indexEvent, refs, 22, false)
const primary = await buildPublicationSectionRelayUrls(indexEvent, refs, 30, false)
if (cancelled) return
if (primary.length > 0) {
setRelayUrls(primary)
return
if (import.meta.env.DEV) {
logger.info('[PublicationSection] relay_urls_primary', {
indexId,
count: primary.length,
relays: primary
})
}
const fallback = await buildPublicationSectionRelayUrls(indexEvent, refs, 30, true)
setRelayUrls(primary)
const fallback = await buildPublicationSectionRelayUrls(indexEvent, refs, 60, true)
if (cancelled) return
setRelayUrls(fallback)
if (import.meta.env.DEV) {
const uniqueExtra = fallback.filter((u) => !primary.includes(u))
logger.info('[PublicationSection] relay_urls_searchable_fallback', {
indexId,
count: fallback.length,
extraCount: uniqueExtra.length,
relays: fallback
})
}
setFallbackRelayUrls(fallback)
})().catch((err) => {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] relay_build_failed', {
@ -95,7 +110,10 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -95,7 +110,10 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
message: err instanceof Error ? err.message : String(err)
})
}
if (!cancelled) setRelayUrls([])
if (!cancelled) {
setRelayUrls([])
setFallbackRelayUrls([])
}
})
return () => {
cancelled = true
@ -131,6 +149,14 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -131,6 +149,14 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
async (keys: string[]) => {
const selectedRows = rows.filter((r) => keys.includes(r.key))
if (selectedRows.length === 0) return
if (import.meta.env.DEV) {
logger.info('[PublicationSection] run_fetch_start', {
indexId,
keyCount: selectedRows.length,
keys: selectedRows.map((r) => r.key),
relayCount: relayUrls.length
})
}
const byDb = new Map<string, Event>()
const stillNeed: Row[] = []
@ -169,7 +195,33 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -169,7 +195,33 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
}
const merged = new Map<string, Event>([...byDb, ...fromNet])
const unresolved = stillNeed.filter((r) => !merged.has(r.key))
let unresolved = stillNeed.filter((r) => !merged.has(r.key))
// Second pass: unresolved refs on broader searchable relay set.
if (unresolved.length > 0 && fallbackRelayUrls.length > 0) {
const fallbackOnly = fallbackRelayUrls.filter((u) => !relayUrls.includes(u))
const relaysForFallback = fallbackOnly.length > 0 ? fallbackRelayUrls : []
if (relaysForFallback.length > 0) {
if (import.meta.env.DEV) {
logger.info('[PublicationSection] searchable_fallback_start', {
unresolved: unresolved.map((r) => r.key),
relayCount: relaysForFallback.length
})
}
const fromSearchFallback = await batchFetchPublicationSectionEvents(
unresolved,
relaysForFallback
)
for (const [k, ev] of fromSearchFallback) merged.set(k, ev)
unresolved = unresolved.filter((r) => !merged.has(r.key))
if (import.meta.env.DEV) {
logger.info('[PublicationSection] searchable_fallback_done', {
fromSearchFallback: fromSearchFallback.size,
stillNeed: unresolved.map((r) => r.key)
})
}
}
}
const bySingle = new Map<string, Event>()
await Promise.all(
@ -219,9 +271,18 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication @@ -219,9 +271,18 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
.map((r) => r.key)
.filter((k) => !merged.has(k))
if (import.meta.env.DEV) {
logger.info('[PublicationSection] run_fetch_done', {
indexId,
loadedCount: merged.size,
failedCount: failed.length,
failedKeys: failed
})
}
applyLoadedAndFailed(merged, failed)
},
[applyLoadedAndFailed, relayUrls, rows]
[applyLoadedAndFailed, fallbackRelayUrls, relayUrls, rows]
)
const requestKeys = useCallback(

50
src/lib/publication-rendered-events.ts

@ -0,0 +1,50 @@ @@ -0,0 +1,50 @@
import type { Event } from 'nostr-tools'
const renderedByPublication = new Map<string, Map<string, Event>>()
function normId(id: string): string {
return id.trim().toLowerCase()
}
export function upsertRenderedPublicationEvents(publicationId: string, events: Event[]): void {
const pubId = normId(publicationId)
let byId = renderedByPublication.get(pubId)
if (!byId) {
byId = new Map<string, Event>()
renderedByPublication.set(pubId, byId)
}
for (const ev of events) {
if (!ev?.id) continue
byId.set(normId(ev.id), ev)
}
}
export function getRenderedPublicationEvents(publicationId: string): Event[] {
const pubId = normId(publicationId)
return [...(renderedByPublication.get(pubId)?.values() ?? [])]
}
/**
* Deep collection for nested 30040 publications that were rendered in this session.
*/
export function getRenderedPublicationEventsDeep(publicationId: string, maxDepth = 6): Event[] {
const seenPublicationIds = new Set<string>()
const outByEventId = new Map<string, Event>()
const walk = (pubIdRaw: string, depth: number) => {
const pubId = normId(pubIdRaw)
if (depth > maxDepth || seenPublicationIds.has(pubId)) return
seenPublicationIds.add(pubId)
const direct = renderedByPublication.get(pubId)
if (!direct) return
for (const ev of direct.values()) {
outByEventId.set(normId(ev.id), ev)
if (ev.kind === 30040) {
walk(ev.id, depth + 1)
}
}
}
walk(publicationId, 0)
return [...outByEventId.values()]
}

173
src/lib/publication-section-fetch.ts

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
import logger from '@/lib/logger'
import { FAST_READ_RELAY_URLS } from '@/constants'
import { publicationCoordinateLookupKeys, splitPublicationCoordinate } from '@/lib/publication-coordinate'
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder'
import { normalizeUrl } from '@/lib/url'
@ -69,10 +70,15 @@ export async function buildPublicationSectionRelayUrls( @@ -69,10 +70,15 @@ export async function buildPublicationSectionRelayUrls(
includeSearchableRelays = false
): Promise<string[]> {
const hints = collectRelayHints(refs)
const fastReadRelays = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter((u) => !!u)
const seenOnRelays = queryService
.getSeenEventRelayUrls(indexEvent.id)
.map((u) => normalizeUrl(u) || u)
.filter((u) => !!u)
const urls = await buildComprehensiveRelayList({
authorPubkey: indexEvent.pubkey,
userPubkey: client.pubkey || undefined,
relayHints: hints,
relayHints: [...hints, ...seenOnRelays],
includeUserOwnRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: true,
@ -80,16 +86,33 @@ export async function buildPublicationSectionRelayUrls( @@ -80,16 +86,33 @@ export async function buildPublicationSectionRelayUrls(
includeFavoriteRelays: true,
includeLocalRelays: true
})
const prioritized = [...new Set([...hints, ...urls])]
// Keep fast-read relays pinned at the front so slicing can never drop them.
const prioritized = [...new Set([...fastReadRelays, ...hints, ...seenOnRelays, ...urls])]
if (import.meta.env.DEV) {
logger.info('[PublicationSection] relay_urls_built', {
indexId: indexEvent.id,
includeSearchableRelays,
fastReadCount: fastReadRelays.length,
relayHintsCount: hints.length,
seenOnRelaysCount: seenOnRelays.length,
totalBeforeSlice: prioritized.length,
maxRelays,
hasAggr: prioritized.includes(normalizeUrl('wss://aggr.nostr.land') || 'wss://aggr.nostr.land'),
hasTheCitadel: prioritized.includes(
normalizeUrl('wss://thecitadel.nostr1.com') || 'wss://thecitadel.nostr1.com'
)
})
}
return prioritized.slice(0, maxRelays)
}
const IDS_CHUNK = 44
const D_CHUNK = 28
const ANY_KIND_LIMIT_PER_D = 12
const AUTHOR_KIND_SCAN_LIMIT = 200
function dTagOf(ev: Event): string | undefined {
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
const d = ev.tags.find((t) => (t[0] || '').trim().toLowerCase() === 'd')?.[1]
return d && d.length > 0 ? d : undefined
}
@ -153,6 +176,22 @@ export async function batchFetchPublicationSectionEvents( @@ -153,6 +176,22 @@ export async function batchFetchPublicationSectionEvents(
}
}
if (import.meta.env.DEV) {
logger.info('[PublicationSection] batch_filters_prepared', {
relayCount: relayUrls.length,
refCount: refs.length,
aRefCount: aRefs.length,
eRefCount: eRefs.length,
filterCount: filters.length,
filterPreview: filters.slice(0, 3).map((f) => ({
ids: Array.isArray(f.ids) ? f.ids.length : 0,
authors: Array.isArray(f.authors) ? f.authors.length : 0,
kinds: Array.isArray(f.kinds) ? f.kinds : [],
d: Array.isArray(f['#d']) ? f['#d'].slice(0, 4) : []
}))
})
}
let events: Event[] = []
if (filters.length > 0) {
try {
@ -242,6 +281,14 @@ export async function batchFetchPublicationSectionEvents( @@ -242,6 +281,14 @@ export async function batchFetchPublicationSectionEvents(
}
}
if (hintFilters.length === 0) continue
if (import.meta.env.DEV) {
logger.info('[PublicationSection] relay_hint_pass_start', {
relay,
refCount: relayRefs.length,
filterCount: hintFilters.length,
sampleKeys: relayRefs.map((r) => publicationRefKey(r)).slice(0, 6)
})
}
try {
const hintEvents = await queryService.fetchEvents([relay], hintFilters, {
globalTimeout: 8_000,
@ -268,8 +315,22 @@ export async function batchFetchPublicationSectionEvents( @@ -268,8 +315,22 @@ export async function batchFetchPublicationSectionEvents(
}
if (ev) out.set(key, ev)
}
if (import.meta.env.DEV) {
const unresolvedAfterRelay = relayRefs
.map((r) => publicationRefKey(r))
.filter((k) => !out.has(k))
logger.info('[PublicationSection] relay_hint_pass_done', {
relay,
eventsReturned: hintEvents.length,
unresolvedAfterRelayCount: unresolvedAfterRelay.length,
unresolvedAfterRelay: unresolvedAfterRelay.slice(0, 8)
})
}
} catch {
// ignore per-relay hint failures
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] relay_hint_pass_error', { relay, filterCount: hintFilters.length })
}
}
}
@ -301,6 +362,13 @@ export async function batchFetchPublicationSectionEvents( @@ -301,6 +362,13 @@ export async function batchFetchPublicationSectionEvents(
}
}
if (fallbackFilters.length > 0) {
if (import.meta.env.DEV) {
logger.info('[PublicationSection] any_kind_fallback_start', {
relayCount: relayUrls.length,
filterCount: fallbackFilters.length,
unresolvedBefore: unresolvedAfterHint.map((r) => publicationRefKey(r)).slice(0, 12)
})
}
try {
const fallbackEvents = await queryService.fetchEvents(relayUrls, fallbackFilters, {
globalTimeout: 10_000,
@ -330,8 +398,96 @@ export async function batchFetchPublicationSectionEvents( @@ -330,8 +398,96 @@ export async function batchFetchPublicationSectionEvents(
}
out.set(key, newest)
}
if (import.meta.env.DEV) {
const unresolvedAfterAnyKind = unresolvedAfterHint
.map((r) => publicationRefKey(r))
.filter((k) => !out.has(k))
logger.info('[PublicationSection] any_kind_fallback_done', {
eventsReturned: fallbackEvents.length,
unresolvedAfterAnyKindCount: unresolvedAfterAnyKind.length,
unresolvedAfterAnyKind: unresolvedAfterAnyKind.slice(0, 10)
})
}
} catch {
// ignore fallback errors
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] any_kind_fallback_error', { filterCount: fallbackFilters.length })
}
}
}
}
// Final robust fallback for relays that do not properly index `#d`:
// scan author + kind and match d-tag client-side.
const unresolvedAfterAll = aRefs.filter((r) => !out.has(publicationRefKey(r)))
if (unresolvedAfterAll.length > 0) {
const scanFilters: Filter[] = []
const scanGroups = new Map<string, { pubkey: string; kind: number }>()
for (const ref of unresolvedAfterAll) {
const key = `${ref.pubkey!.toLowerCase()}:${ref.kind!}`
if (!scanGroups.has(key)) {
scanGroups.set(key, { pubkey: ref.pubkey!.toLowerCase(), kind: ref.kind! })
}
}
for (const g of scanGroups.values()) {
scanFilters.push({
authors: [g.pubkey],
kinds: [g.kind],
limit: AUTHOR_KIND_SCAN_LIMIT
})
}
if (scanFilters.length > 0) {
if (import.meta.env.DEV) {
logger.info('[PublicationSection] author_kind_scan_start', {
filterCount: scanFilters.length,
relayCount: relayUrls.length,
unresolvedCount: unresolvedAfterAll.length,
unresolvedKeys: unresolvedAfterAll.map((r) => publicationRefKey(r)).slice(0, 10)
})
}
try {
const scanEvents = await queryService.fetchEvents(relayUrls, scanFilters, {
globalTimeout: 12_000,
eoseTimeout: 2_000,
firstRelayResultGraceMs: false
})
const scanByCoord = new Map<string, Event>()
for (const ev of scanEvents) {
const coord = coordinateOfEvent(ev)
if (!coord) continue
for (const k of publicationCoordinateLookupKeys(coord)) {
const prev = scanByCoord.get(k)
if (!prev || ev.created_at > prev.created_at) scanByCoord.set(k, ev)
}
}
for (const ref of unresolvedAfterAll) {
const key = publicationRefKey(ref)
if (out.has(key)) continue
const coord = ref.coordinate!
let ev: Event | undefined
for (const lk of publicationCoordinateLookupKeys(coord)) {
ev = scanByCoord.get(lk)
if (ev) break
}
if (ev) out.set(key, ev)
}
if (import.meta.env.DEV) {
const unresolvedAfterScan = unresolvedAfterAll
.map((r) => publicationRefKey(r))
.filter((k) => !out.has(k))
logger.info('[PublicationSection] author_kind_scan_done', {
eventsReturned: scanEvents.length,
resolvedTotal: out.size,
unresolvedAfterScanCount: unresolvedAfterScan.length,
unresolvedAfterScan: unresolvedAfterScan.slice(0, 10)
})
}
} catch {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] author_kind_scan_error', {
filterCount: scanFilters.length
})
}
}
}
}
@ -339,6 +495,14 @@ export async function batchFetchPublicationSectionEvents( @@ -339,6 +495,14 @@ export async function batchFetchPublicationSectionEvents(
if (import.meta.env.DEV) {
const unmatchedA = aRefs.filter((r) => !out.has(publicationRefKey(r)))
const unmatchedE = eRefs.filter((r) => !out.has(publicationRefKey(r)))
const sampleEvents = events.slice(0, 8).map((ev) => ({
id: ev.id,
kind: ev.kind,
pubkey: ev.pubkey.slice(0, 12),
created_at: ev.created_at,
tagNames: ev.tags.slice(0, 8).map((t) => String(t[0] || '')),
dTag: dTagOf(ev)
}))
logger.info('[PublicationSection] batch_fetch_result', {
relayCount: relayUrls.length,
filterCount: filters.length,
@ -347,7 +511,8 @@ export async function batchFetchPublicationSectionEvents( @@ -347,7 +511,8 @@ export async function batchFetchPublicationSectionEvents(
resolved: out.size,
unmatchedACount: unmatchedA.length,
unmatchedECount: unmatchedE.length,
unmatchedAKeys: unmatchedA.map((r) => publicationRefKey(r)).slice(0, 12)
unmatchedAKeys: unmatchedA.map((r) => publicationRefKey(r)).slice(0, 12),
sampleEvents
})
}

Loading…
Cancel
Save