From 17664c7f0200d63c6b38a813a6c82ef301458e21 Mon Sep 17 00:00:00 2001 From: Silberengel Date: Tue, 31 Mar 2026 15:50:59 +0200 Subject: [PATCH] bug-fixes --- .../Note/AsciidocArticle/AsciidocArticle.tsx | 32 +++----- .../Note/MarkdownArticle/MarkdownArticle.tsx | 39 ++++----- .../PublicationIndex/PublicationIndex.tsx | 2 +- src/hooks/usePublicationSectionLoader.ts | 76 +++++++++++++---- src/lib/publication-section-fetch.ts | 81 ++++++++++++++++++- src/services/client-query.service.ts | 74 +++++++++++++++-- 6 files changed, 235 insertions(+), 69 deletions(-) diff --git a/src/components/Note/AsciidocArticle/AsciidocArticle.tsx b/src/components/Note/AsciidocArticle/AsciidocArticle.tsx index d83b828f..a71868a8 100644 --- a/src/components/Note/AsciidocArticle/AsciidocArticle.tsx +++ b/src/components/Note/AsciidocArticle/AsciidocArticle.tsx @@ -927,25 +927,19 @@ export default function AsciidocArticle({ return match }) - // Handle plain HTTP/HTTPS URLs in text (not in tags, not YouTube, not relay) - convert to regular links - // NO WebPreview conversion for AsciiDoc articles - const httpUrlRegex = /https?:\/\/[^\s<>"']+/g - htmlString = htmlString.replace(httpUrlRegex, (match) => { - // Only replace if not already in a tag (basic check) - if (!match.includes('<') && !match.includes('>')) { - // Skip if it's a YouTube URL or relay URL (already handled) - if (isYouTubeUrl(match) || isWebsocketUrl(match)) { - return match - } - // Skip if it's an image or media URL (handled separately) - if (isImage(match) || isVideo(match) || isAudio(match)) { - return match - } - // Convert to regular link - NO WebPreview - const cleanedUrl = cleanUrl(match) - return `${match}` - } - return match + // Handle plain HTTP/HTTPS URLs in text nodes only (not inside existing HTML tags/attributes). + // Regex-linkifying the full HTML string can corrupt existing anchor tags. + htmlString = htmlString.replace(/>([^<]+) { + const httpUrlRegex = /https?:\/\/[^\s<>"']+/g + const replacedText = textContent.replace(httpUrlRegex, (rawUrl: string) => { + // Skip URLs that are handled elsewhere. + if (isYouTubeUrl(rawUrl) || isWebsocketUrl(rawUrl)) return rawUrl + if (isImage(rawUrl) || isVideo(rawUrl) || isAudio(rawUrl)) return rawUrl + const cleanedUrl = cleanUrl(rawUrl) + if (!cleanedUrl) return rawUrl + return `${rawUrl}` + }) + return `>${replacedText}<` }) setParsedHtml(htmlString) diff --git a/src/components/Note/MarkdownArticle/MarkdownArticle.tsx b/src/components/Note/MarkdownArticle/MarkdownArticle.tsx index 1770d33b..ff660f8a 100644 --- a/src/components/Note/MarkdownArticle/MarkdownArticle.tsx +++ b/src/components/Note/MarkdownArticle/MarkdownArticle.tsx @@ -2943,8 +2943,10 @@ function parseMarkdownContentMarked( const src = String(token.href ?? '') const cleaned = cleanUrl(src) if (!cleaned) break + // Inline context: avoid block image/media mounts inside

/

  • //. + // Standalone image paragraphs are handled separately in renderParagraph(). + const label = String(token.text ?? src) if (isVideo(cleaned) || isAudio(cleaned)) { - // Inline context: do NOT mount block media players inside paragraph flow. out.push( - {src} + {label} ) break } if (!isImage(cleaned) || !isSafeMediaUrl(cleaned)) { - // Non-HTTP image tokens (e.g. npub...) must not be passed to image/media components. - out.push( - - {src} - - ) + out.push({label}) break } - const identifier = getImageIdentifier?.(cleaned) - const thumbnail = - imageThumbnailMap?.get(cleaned) ?? - (identifier ? imageThumbnailMap?.get(`__img_id:${identifier}`) : undefined) - const imageUrl = thumbnail || src - const imageIdx = imageIndexMap.get(cleaned) out.push( - {token.text { - e.stopPropagation() - if (typeof imageIdx === 'number') openLightbox(imageIdx) - }} - /> + + {label} + ) break } diff --git a/src/components/Note/PublicationIndex/PublicationIndex.tsx b/src/components/Note/PublicationIndex/PublicationIndex.tsx index 02d07d4f..a8d47c9d 100644 --- a/src/components/Note/PublicationIndex/PublicationIndex.tsx +++ b/src/components/Note/PublicationIndex/PublicationIndex.tsx @@ -626,7 +626,7 @@ export default function PublicationIndex({ ) } - const eventKind = ref.kind || ref.event.kind + const eventKind = ref.event?.kind ?? ref.kind ?? 0 const effectiveParentImageUrl = !isNested ? metadata.image : parentImageUrl if (eventKind === ExtendedKind.PUBLICATION) { diff --git a/src/hooks/usePublicationSectionLoader.ts b/src/hooks/usePublicationSectionLoader.ts index 5e88e923..45630e4b 100644 --- a/src/hooks/usePublicationSectionLoader.ts +++ b/src/hooks/usePublicationSectionLoader.ts @@ -15,6 +15,7 @@ import type { Event } from 'nostr-tools' import { useCallback, useEffect, useMemo, useRef, useState } from 'react' const PUB_SEC_LOG = '[PublicationSection]' +const SINGLE_REF_FALLBACK_TIMEOUT_MS = 7000 function pubLog(message: string, data?: Record) { if (!import.meta.env.DEV) return if (data) logger.info(`${PUB_SEC_LOG} ${message}`, data) @@ -60,13 +61,18 @@ async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise< } async function fetchSingleRefFallback(ref: PublicationSectionRef): Promise { + const withTimeout = (p: Promise, ms: number): Promise => + new Promise((resolve) => { + const t = setTimeout(() => resolve(undefined), ms) + p.then((v) => resolve(v)).catch(() => resolve(undefined)).finally(() => clearTimeout(t)) + }) try { if (ref.type === 'a' && ref.coordinate) { const bech32 = generateBech32IdFromATag(['a', ref.coordinate, ref.relay || '', '']) - if (bech32) return await eventService.fetchEvent(bech32) + if (bech32) return await withTimeout(eventService.fetchEvent(bech32), SINGLE_REF_FALLBACK_TIMEOUT_MS) } if (ref.type === 'e' && ref.eventId) { - return await eventService.fetchEvent(ref.eventId) + return await withTimeout(eventService.fetchEvent(ref.eventId), SINGLE_REF_FALLBACK_TIMEOUT_MS) } } catch { /* ignore */ @@ -87,36 +93,51 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P } return keys }, [referencesData]) + const orderedKeysSignature = useMemo(() => orderedKeys.join('|'), [orderedKeys]) const [rows, setRows] = useState>(() => new Map()) const rowsRef = useRef(rows) rowsRef.current = rows useEffect(() => { - const m = new Map() - for (const ref of referencesData) { - const k = refKey(ref) - if (!k) continue - m.set(k, { ref, status: 'idle' }) - } - setRows(m) - }, [referencesData]) + // Preserve per-key load state across rerenders to avoid reinitializing rows to idle + // when parent components recreate reference objects. + setRows((prev) => { + const next = new Map() + for (const ref of referencesData) { + const k = refKey(ref) + if (!k) continue + const existing = prev.get(k) + if (existing) { + next.set(k, { ...existing, ref }) + } else { + next.set(k, { ref, status: 'idle' }) + } + } + return next + }) + }, [orderedKeysSignature, referencesData]) const relayUrlsRef = useRef([]) + const searchableRelayUrlsRef = useRef([]) const [relayReady, setRelayReady] = useState(false) useEffect(() => { let cancelled = false void (async () => { - const urls = await buildPublicationSectionRelayUrls(indexEvent, referencesData) + const [urls, searchableUrls] = await Promise.all([ + buildPublicationSectionRelayUrls(indexEvent, referencesData, 22, false), + buildPublicationSectionRelayUrls(indexEvent, referencesData, 40, true) + ]) if (cancelled) return relayUrlsRef.current = urls + searchableRelayUrlsRef.current = searchableUrls setRelayReady(true) })() return () => { cancelled = true } - }, [indexEvent, referencesData]) + }, [indexEvent.id, orderedKeysSignature]) const pendingRef = useRef(new Set()) const debounceTimerRef = useRef | null>(null) @@ -136,7 +157,11 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P for (const k of keys) { const row = snapshot.get(k) if (!row) continue - if (row.status === 'loaded' && row.event) continue + // Auto-queue should only process idle rows. + // - loaded rows are done + // - loading rows are already in-flight + // - error rows require explicit retry via retry button + if (row.status !== 'idle') continue refsToLoad.push(row.ref) } @@ -202,6 +227,25 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P } } + stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r))) + if (stillNeed.length > 0) { + const searchableUrls = searchableRelayUrlsRef.current + const hasAdditionalSearchable = searchableUrls.some((u) => !urls.includes(u)) + if (hasAdditionalSearchable) { + const fromSearchFallback = await batchFetchPublicationSectionEvents(stillNeed, searchableUrls) + pubLog('after_searchable_fallback', { + fromSearchFallback: fromSearchFallback.size, + stillNeedBefore: stillNeed.length, + relayCount: searchableUrls.length + }) + for (const [k, ev] of fromSearchFallback) { + resolved.set(k, ev) + client.addEventToCache(ev) + if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev) + } + } + } + const missing = refsToLoad.filter((r) => !resolved.has(refKey(r))) pubLog('before_fallback', { missing: missing.map((r) => refKey(r)), @@ -249,7 +293,9 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P flushInFlightRef.current = false // While a batch was in flight, debounced runFlush() calls may have returned early // (flush lock). Drain any keys that accumulated so scroll-triggered sections still load. - if (pendingRef.current.size > 0) { + // IMPORTANT: if relay URLs are not ready yet, do NOT spin in a tight retry loop. + // The relayReady effect will trigger requestKeys() once relays are available. + if (pendingRef.current.size > 0 && relayUrlsRef.current.length > 0) { if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current) debounceTimerRef.current = setTimeout(() => { debounceTimerRef.current = null @@ -277,7 +323,7 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P if (!relayReady || orderedKeys.length === 0) return // Full list: scroll-IO may have fired before relays were ready; those keys were re-queued idle. requestKeys(orderedKeys) - }, [relayReady, orderedKeys, requestKeys]) + }, [relayReady, orderedKeysSignature, requestKeys]) const failedKeys = useMemo( () => [...rows.entries()].filter(([, v]) => v.status === 'error').map(([k]) => k), diff --git a/src/lib/publication-section-fetch.ts b/src/lib/publication-section-fetch.ts index e8739492..b25fa16e 100644 --- a/src/lib/publication-section-fetch.ts +++ b/src/lib/publication-section-fetch.ts @@ -3,8 +3,10 @@ import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate' import { buildComprehensiveRelayList } from '@/lib/relay-list-builder' import { normalizeUrl } from '@/lib/url' import client, { queryService } from '@/services/client.service' +import { ExtendedKind } from '@/constants' import type { Event, Filter } from 'nostr-tools' import { nip19 } from 'nostr-tools' +import { kinds } from 'nostr-tools' /** Parsed a/e reference from publication index tags (same shape as PublicationIndex uses). */ export type PublicationSectionRef = { @@ -82,7 +84,8 @@ function collectRelayHints(refs: PublicationSectionRef[]): string[] { export async function buildPublicationSectionRelayUrls( indexEvent: Event, refs: PublicationSectionRef[], - maxRelays = 22 + maxRelays = 22, + includeSearchableRelays = false ): Promise { const hints = collectRelayHints(refs) const urls = await buildComprehensiveRelayList({ @@ -92,7 +95,7 @@ export async function buildPublicationSectionRelayUrls( includeUserOwnRelays: true, includeProfileFetchRelays: true, includeFastReadRelays: true, - includeSearchableRelays: false, + includeSearchableRelays, includeFavoriteRelays: true, includeLocalRelays: true }) @@ -101,6 +104,13 @@ export async function buildPublicationSectionRelayUrls( const IDS_CHUNK = 44 const D_TAGS_CHUNK = 28 +const SECTION_KIND_FALLBACK_CANDIDATES = [ + ExtendedKind.PUBLICATION_CONTENT, // 30041 + ExtendedKind.WIKI_ARTICLE, // 30818 + ExtendedKind.WIKI_ARTICLE_MARKDOWN, // 30817 + kinds.LongFormArticle, // 30023 + kinds.ShortTextNote // 1 +] as number[] function coordinateFromEvent(ev: Event): string { const d = ev.tags.find((t) => t[0] === 'd')?.[1] ?? '' @@ -216,6 +226,73 @@ export async function batchFetchPublicationSectionEvents( if (ev) out.set(key, ev) } + // Fallback for mismatched/legacy kind in `a` tags: + // retry unresolved refs by author + #d across common section kinds. + const unresolvedARefs = aRefs.filter((r) => !out.has(publicationRefKey(r))) + if (unresolvedARefs.length > 0) { + const fallbackGroups = new Map() + for (const r of unresolvedARefs) { + const pubkey = r.pubkey?.toLowerCase() + const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':') + if (!pubkey || !idf) continue + let g = fallbackGroups.get(pubkey) + if (!g) { + g = { pubkey, dTags: [] } + fallbackGroups.set(pubkey, g) + } + g.dTags.push(idf) + } + + const fallbackFilters: Filter[] = [] + for (const g of fallbackGroups.values()) { + const uniqueD = [...new Set(g.dTags)] + for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) { + const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK) + fallbackFilters.push({ + authors: [g.pubkey], + kinds: [...SECTION_KIND_FALLBACK_CANDIDATES], + '#d': dChunk, + limit: dChunk.length * SECTION_KIND_FALLBACK_CANDIDATES.length + }) + } + } + + if (fallbackFilters.length > 0) { + try { + const fallbackEvents = await queryService.fetchEvents(relayUrls, fallbackFilters, { + globalTimeout: 10_000, + eoseTimeout: 2_000, + firstRelayResultGraceMs: false + }) + const byAuthorAndD = new Map() + for (const ev of fallbackEvents) { + const d = ev.tags.find((t) => t[0] === 'd')?.[1] + if (!d) continue + const k = `${ev.pubkey.toLowerCase()}:${d}` + const prev = byAuthorAndD.get(k) + if (!prev || ev.created_at > prev.created_at) byAuthorAndD.set(k, ev) + } + for (const r of unresolvedARefs) { + const key = publicationRefKey(r) + if (out.has(key)) continue + const pubkey = r.pubkey?.toLowerCase() + const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':') + if (!pubkey || !idf) continue + const ev = byAuthorAndD.get(`${pubkey}:${idf}`) + if (ev) out.set(key, ev) + } + } catch (err) { + if (import.meta.env.DEV) { + logger.warn('[PublicationSection] batch_fetch_fallback_error', { + message: err instanceof Error ? err.message : String(err), + filterCount: fallbackFilters.length, + relayCount: relayUrls.length + }) + } + } + } + } + for (const r of aRefs) { const key = publicationRefKey(r) const coord = r.coordinate! diff --git a/src/services/client-query.service.ts b/src/services/client-query.service.ts index 93bd7820..eaabb59b 100644 --- a/src/services/client-query.service.ts +++ b/src/services/client-query.service.ts @@ -28,7 +28,7 @@ import { isHttpRelayUrl, normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url' import { RelaySubscribeOpBatch } from '@/services/relay-operation-log.service' import { patchRelayNoticeForFetchFailures } from '@/services/relay-notice-strike' import type { Filter, Event as NEvent } from 'nostr-tools' -import { SimplePool, EventTemplate, VerifiedEvent } from 'nostr-tools' +import { SimplePool, EventTemplate, VerifiedEvent, nip19 } from 'nostr-tools' import type { AbstractRelay } from 'nostr-tools/abstract-relay' import nip66Service from './nip66.service' import type { ISigner, TSignerType } from '@/types' @@ -40,6 +40,58 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter { return rest as Filter } +const HEX_EVENT_ID_RE = /^[0-9a-f]{64}$/i + +function decodeEventRefForETagFilter(raw: string): string | null { + const trimmed = raw.trim() + if (!trimmed) return null + const withoutPrefix = trimmed.toLowerCase().startsWith('nostr:') ? trimmed.slice(6).trim() : trimmed + if (HEX_EVENT_ID_RE.test(withoutPrefix)) return withoutPrefix.toLowerCase() + try { + const decoded = nip19.decode(withoutPrefix) + if (decoded.type === 'note') return decoded.data + if (decoded.type === 'nevent') return decoded.data.id + } catch { + /* ignore */ + } + return null +} + +function sanitizeETagFilter(filter: Filter): Filter | null { + const f = { ...filter } as Filter & { '#e'?: string[]; '#E'?: string[] } + const rawLower = Array.isArray(f['#e']) ? f['#e'] : [] + const rawUpper = Array.isArray(f['#E']) ? f['#E'] : [] + if (rawLower.length === 0 && rawUpper.length === 0) return f + const rawAll = [...rawLower, ...rawUpper] + const decoded = [ + ...new Set( + rawAll + .map((v) => decodeEventRefForETagFilter(String(v))) + .filter((v): v is string => !!v) + ) + ] + if (import.meta.env.DEV && decoded.length !== rawAll.length) { + const invalidSample = rawAll + .map((v) => String(v)) + .filter((v) => !decodeEventRefForETagFilter(v)) + .slice(0, 3) + logger.info('[QueryService] sanitized invalid #e/#E refs before REQ', { + inputCount: rawAll.length, + decodedCount: decoded.length, + invalidSample + }) + } + if (decoded.length === 0) return null + f['#e'] = decoded + delete f['#E'] + return f +} + +function sanitizeFiltersBeforeReq(filter: Filter | Filter[]): Filter[] { + const asArray = Array.isArray(filter) ? filter : [filter] + return asArray.map(sanitizeETagFilter).filter((f): f is Filter => !!f) +} + export interface QueryOptions { eoseTimeout?: number globalTimeout?: number @@ -198,6 +250,10 @@ export class QueryService { onevent?: (evt: NEvent) => void, options?: QueryOptions ): Promise { + const sanitizedFilters = sanitizeFiltersBeforeReq(filter) + if (sanitizedFilters.length === 0) return [] + const effectiveFilter: Filter | Filter[] = + Array.isArray(filter) ? sanitizedFilters : sanitizedFilters[0] const eoseTimeout = options?.eoseTimeout ?? 500 const globalTimeout = options?.globalTimeout ?? 10000 const replaceableRace = options?.replaceableRace ?? false @@ -213,11 +269,11 @@ export class QueryService { globalTimeout, replaceableRace, immediateReturn, - filter: Array.isArray(filter) ? filter : [filter] + filter: sanitizedFilters }) } - const filtersForGrace = Array.isArray(filter) ? filter : [filter] + const filtersForGrace = sanitizedFilters const maxLimitForGrace = Math.max(...filtersForGrace.map((f) => (f.limit ?? 0) as number), 0) const isSingleEventFetchForGrace = maxLimitForGrace === 1 const useImplicitFeedFirstRelayGrace = @@ -261,7 +317,7 @@ export class QueryService { : Promise.allSettled( httpRelayBases.map(async (base) => { try { - const evts = await queryIndexRelay(base, filter, { + const evts = await queryIndexRelay(base, effectiveFilter, { signal: abortHttp.signal, onHardFailure: () => this.onRelayConnectionFailure?.(base) }) @@ -339,7 +395,7 @@ export class QueryService { const wsSub = this.subscribe( wsQueryUrls, - filter, + effectiveFilter, { onevent: (evt) => { eventCount++ @@ -355,7 +411,7 @@ export class QueryService { firstResultTime = Date.now() } - const filters = Array.isArray(filter) ? filter : [filter] + const filters = sanitizedFilters const maxLimit = Math.max(...filters.map((f) => (f.limit ?? 0) as number), 0) const isSingleEventFetch = maxLimit === 1 const hasIdFilter = filters.some(f => f.ids && f.ids.length > 0) @@ -453,9 +509,13 @@ export class QueryService { callbacks: SubscribeCallbacks, relayOpMeta?: { source: string; logLevel?: 'info' | 'debug' } ): { close: () => void } { + const filters = sanitizeFiltersBeforeReq(filter) + if (filters.length === 0) { + queueMicrotask(() => callbacks.oneose?.(true)) + return { close: () => {} } + } const originalDedupedRelays = Array.from(new Set(urls)) let relays = originalDedupedRelays - const filters = Array.isArray(filter) ? filter : [filter] const stripSocialBlockedRelays = SOCIAL_KIND_BLOCKED_RELAY_URLS.length > 0 &&