Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
17664c7f02
  1. 32
      src/components/Note/AsciidocArticle/AsciidocArticle.tsx
  2. 39
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  3. 2
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  4. 76
      src/hooks/usePublicationSectionLoader.ts
  5. 81
      src/lib/publication-section-fetch.ts
  6. 74
      src/services/client-query.service.ts

32
src/components/Note/AsciidocArticle/AsciidocArticle.tsx

@ -927,25 +927,19 @@ export default function AsciidocArticle({ @@ -927,25 +927,19 @@ export default function AsciidocArticle({
return match
})
// Handle plain HTTP/HTTPS URLs in text (not in <a> tags, not YouTube, not relay) - convert to regular links
// NO WebPreview conversion for AsciiDoc articles
const httpUrlRegex = /https?:\/\/[^\s<>"']+/g
htmlString = htmlString.replace(httpUrlRegex, (match) => {
// Only replace if not already in a tag (basic check)
if (!match.includes('<') && !match.includes('>')) {
// Skip if it's a YouTube URL or relay URL (already handled)
if (isYouTubeUrl(match) || isWebsocketUrl(match)) {
return match
}
// Skip if it's an image or media URL (handled separately)
if (isImage(match) || isVideo(match) || isAudio(match)) {
return match
}
// Convert to regular link - NO WebPreview
const cleanedUrl = cleanUrl(match)
return `<a href="${cleanedUrl}" class="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words" target="_blank" rel="noopener noreferrer">${match}</a>`
}
return match
// Handle plain HTTP/HTTPS URLs in text nodes only (not inside existing HTML tags/attributes).
// Regex-linkifying the full HTML string can corrupt existing anchor tags.
htmlString = htmlString.replace(/>([^<]+)</g, (_fullMatch, textContent) => {
const httpUrlRegex = /https?:\/\/[^\s<>"']+/g
const replacedText = textContent.replace(httpUrlRegex, (rawUrl: string) => {
// Skip URLs that are handled elsewhere.
if (isYouTubeUrl(rawUrl) || isWebsocketUrl(rawUrl)) return rawUrl
if (isImage(rawUrl) || isVideo(rawUrl) || isAudio(rawUrl)) return rawUrl
const cleanedUrl = cleanUrl(rawUrl)
if (!cleanedUrl) return rawUrl
return `<a href="${cleanedUrl}" class="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words" target="_blank" rel="noopener noreferrer">${rawUrl}</a>`
})
return `>${replacedText}<`
})
setParsedHtml(htmlString)

39
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -2943,8 +2943,10 @@ function parseMarkdownContentMarked( @@ -2943,8 +2943,10 @@ function parseMarkdownContentMarked(
const src = String(token.href ?? '')
const cleaned = cleanUrl(src)
if (!cleaned) break
// Inline context: avoid block image/media mounts inside <p>/<li>/<th>/<td>.
// Standalone image paragraphs are handled separately in renderParagraph().
const label = String(token.text ?? src)
if (isVideo(cleaned) || isAudio(cleaned)) {
// Inline context: do NOT mount block media players inside paragraph flow.
out.push(
<a
key={`${key}-media-link`}
@ -2953,38 +2955,25 @@ function parseMarkdownContentMarked( @@ -2953,38 +2955,25 @@ function parseMarkdownContentMarked(
rel="noopener noreferrer"
className="text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words"
>
{src}
{label}
</a>
)
break
}
if (!isImage(cleaned) || !isSafeMediaUrl(cleaned)) {
// Non-HTTP image tokens (e.g. npub...) must not be passed to image/media components.
out.push(
<span key={`${key}-img-fallback`} className="break-words">
{src}
</span>
)
out.push(<span key={`${key}-img-fallback`} className="break-words">{label}</span>)
break
}
const identifier = getImageIdentifier?.(cleaned)
const thumbnail =
imageThumbnailMap?.get(cleaned) ??
(identifier ? imageThumbnailMap?.get(`__img_id:${identifier}`) : undefined)
const imageUrl = thumbnail || src
const imageIdx = imageIndexMap.get(cleaned)
out.push(
<Image
key={`${key}-img`}
image={{ url: imageUrl, pubkey: eventPubkey }}
alt={token.text || 'image'}
className="w-full rounded-lg cursor-zoom-in my-0"
classNames={{ wrapper: 'my-2 block max-w-[400px] mx-auto' }}
onClick={(e: React.MouseEvent) => {
e.stopPropagation()
if (typeof imageIdx === 'number') openLightbox(imageIdx)
}}
/>
<a
key={`${key}-img-link`}
href={src}
target="_blank"
rel="noopener noreferrer"
className="text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words"
>
{label}
</a>
)
break
}

2
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -626,7 +626,7 @@ export default function PublicationIndex({ @@ -626,7 +626,7 @@ export default function PublicationIndex({
)
}
const eventKind = ref.kind || ref.event.kind
const eventKind = ref.event?.kind ?? ref.kind ?? 0
const effectiveParentImageUrl = !isNested ? metadata.image : parentImageUrl
if (eventKind === ExtendedKind.PUBLICATION) {

76
src/hooks/usePublicationSectionLoader.ts

@ -15,6 +15,7 @@ import type { Event } from 'nostr-tools' @@ -15,6 +15,7 @@ import type { Event } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
const PUB_SEC_LOG = '[PublicationSection]'
const SINGLE_REF_FALLBACK_TIMEOUT_MS = 7000
function pubLog(message: string, data?: Record<string, unknown>) {
if (!import.meta.env.DEV) return
if (data) logger.info(`${PUB_SEC_LOG} ${message}`, data)
@ -60,13 +61,18 @@ async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise< @@ -60,13 +61,18 @@ async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise<
}
async function fetchSingleRefFallback(ref: PublicationSectionRef): Promise<Event | undefined> {
const withTimeout = <T,>(p: Promise<T>, ms: number): Promise<T | undefined> =>
new Promise((resolve) => {
const t = setTimeout(() => resolve(undefined), ms)
p.then((v) => resolve(v)).catch(() => resolve(undefined)).finally(() => clearTimeout(t))
})
try {
if (ref.type === 'a' && ref.coordinate) {
const bech32 = generateBech32IdFromATag(['a', ref.coordinate, ref.relay || '', ''])
if (bech32) return await eventService.fetchEvent(bech32)
if (bech32) return await withTimeout(eventService.fetchEvent(bech32), SINGLE_REF_FALLBACK_TIMEOUT_MS)
}
if (ref.type === 'e' && ref.eventId) {
return await eventService.fetchEvent(ref.eventId)
return await withTimeout(eventService.fetchEvent(ref.eventId), SINGLE_REF_FALLBACK_TIMEOUT_MS)
}
} catch {
/* ignore */
@ -87,36 +93,51 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P @@ -87,36 +93,51 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P
}
return keys
}, [referencesData])
const orderedKeysSignature = useMemo(() => orderedKeys.join('|'), [orderedKeys])
const [rows, setRows] = useState<Map<string, PublicationSectionRow>>(() => new Map())
const rowsRef = useRef(rows)
rowsRef.current = rows
useEffect(() => {
const m = new Map<string, PublicationSectionRow>()
for (const ref of referencesData) {
const k = refKey(ref)
if (!k) continue
m.set(k, { ref, status: 'idle' })
}
setRows(m)
}, [referencesData])
// Preserve per-key load state across rerenders to avoid reinitializing rows to idle
// when parent components recreate reference objects.
setRows((prev) => {
const next = new Map<string, PublicationSectionRow>()
for (const ref of referencesData) {
const k = refKey(ref)
if (!k) continue
const existing = prev.get(k)
if (existing) {
next.set(k, { ...existing, ref })
} else {
next.set(k, { ref, status: 'idle' })
}
}
return next
})
}, [orderedKeysSignature, referencesData])
const relayUrlsRef = useRef<string[]>([])
const searchableRelayUrlsRef = useRef<string[]>([])
const [relayReady, setRelayReady] = useState(false)
useEffect(() => {
let cancelled = false
void (async () => {
const urls = await buildPublicationSectionRelayUrls(indexEvent, referencesData)
const [urls, searchableUrls] = await Promise.all([
buildPublicationSectionRelayUrls(indexEvent, referencesData, 22, false),
buildPublicationSectionRelayUrls(indexEvent, referencesData, 40, true)
])
if (cancelled) return
relayUrlsRef.current = urls
searchableRelayUrlsRef.current = searchableUrls
setRelayReady(true)
})()
return () => {
cancelled = true
}
}, [indexEvent, referencesData])
}, [indexEvent.id, orderedKeysSignature])
const pendingRef = useRef(new Set<string>())
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
@ -136,7 +157,11 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P @@ -136,7 +157,11 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P
for (const k of keys) {
const row = snapshot.get(k)
if (!row) continue
if (row.status === 'loaded' && row.event) continue
// Auto-queue should only process idle rows.
// - loaded rows are done
// - loading rows are already in-flight
// - error rows require explicit retry via retry button
if (row.status !== 'idle') continue
refsToLoad.push(row.ref)
}
@ -202,6 +227,25 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P @@ -202,6 +227,25 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P
}
}
stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r)))
if (stillNeed.length > 0) {
const searchableUrls = searchableRelayUrlsRef.current
const hasAdditionalSearchable = searchableUrls.some((u) => !urls.includes(u))
if (hasAdditionalSearchable) {
const fromSearchFallback = await batchFetchPublicationSectionEvents(stillNeed, searchableUrls)
pubLog('after_searchable_fallback', {
fromSearchFallback: fromSearchFallback.size,
stillNeedBefore: stillNeed.length,
relayCount: searchableUrls.length
})
for (const [k, ev] of fromSearchFallback) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
}
}
}
const missing = refsToLoad.filter((r) => !resolved.has(refKey(r)))
pubLog('before_fallback', {
missing: missing.map((r) => refKey(r)),
@ -249,7 +293,9 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P @@ -249,7 +293,9 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P
flushInFlightRef.current = false
// While a batch was in flight, debounced runFlush() calls may have returned early
// (flush lock). Drain any keys that accumulated so scroll-triggered sections still load.
if (pendingRef.current.size > 0) {
// IMPORTANT: if relay URLs are not ready yet, do NOT spin in a tight retry loop.
// The relayReady effect will trigger requestKeys() once relays are available.
if (pendingRef.current.size > 0 && relayUrlsRef.current.length > 0) {
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current)
debounceTimerRef.current = setTimeout(() => {
debounceTimerRef.current = null
@ -277,7 +323,7 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P @@ -277,7 +323,7 @@ export function usePublicationSectionLoader(indexEvent: Event, referencesData: P
if (!relayReady || orderedKeys.length === 0) return
// Full list: scroll-IO may have fired before relays were ready; those keys were re-queued idle.
requestKeys(orderedKeys)
}, [relayReady, orderedKeys, requestKeys])
}, [relayReady, orderedKeysSignature, requestKeys])
const failedKeys = useMemo(
() => [...rows.entries()].filter(([, v]) => v.status === 'error').map(([k]) => k),

81
src/lib/publication-section-fetch.ts

@ -3,8 +3,10 @@ import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate' @@ -3,8 +3,10 @@ import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate'
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder'
import { normalizeUrl } from '@/lib/url'
import client, { queryService } from '@/services/client.service'
import { ExtendedKind } from '@/constants'
import type { Event, Filter } from 'nostr-tools'
import { nip19 } from 'nostr-tools'
import { kinds } from 'nostr-tools'
/** Parsed a/e reference from publication index tags (same shape as PublicationIndex uses). */
export type PublicationSectionRef = {
@ -82,7 +84,8 @@ function collectRelayHints(refs: PublicationSectionRef[]): string[] { @@ -82,7 +84,8 @@ function collectRelayHints(refs: PublicationSectionRef[]): string[] {
export async function buildPublicationSectionRelayUrls(
indexEvent: Event,
refs: PublicationSectionRef[],
maxRelays = 22
maxRelays = 22,
includeSearchableRelays = false
): Promise<string[]> {
const hints = collectRelayHints(refs)
const urls = await buildComprehensiveRelayList({
@ -92,7 +95,7 @@ export async function buildPublicationSectionRelayUrls( @@ -92,7 +95,7 @@ export async function buildPublicationSectionRelayUrls(
includeUserOwnRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: true,
includeSearchableRelays: false,
includeSearchableRelays,
includeFavoriteRelays: true,
includeLocalRelays: true
})
@ -101,6 +104,13 @@ export async function buildPublicationSectionRelayUrls( @@ -101,6 +104,13 @@ export async function buildPublicationSectionRelayUrls(
const IDS_CHUNK = 44
const D_TAGS_CHUNK = 28
const SECTION_KIND_FALLBACK_CANDIDATES = [
ExtendedKind.PUBLICATION_CONTENT, // 30041
ExtendedKind.WIKI_ARTICLE, // 30818
ExtendedKind.WIKI_ARTICLE_MARKDOWN, // 30817
kinds.LongFormArticle, // 30023
kinds.ShortTextNote // 1
] as number[]
function coordinateFromEvent(ev: Event): string {
const d = ev.tags.find((t) => t[0] === 'd')?.[1] ?? ''
@ -216,6 +226,73 @@ export async function batchFetchPublicationSectionEvents( @@ -216,6 +226,73 @@ export async function batchFetchPublicationSectionEvents(
if (ev) out.set(key, ev)
}
// Fallback for mismatched/legacy kind in `a` tags:
// retry unresolved refs by author + #d across common section kinds.
const unresolvedARefs = aRefs.filter((r) => !out.has(publicationRefKey(r)))
if (unresolvedARefs.length > 0) {
const fallbackGroups = new Map<string, { pubkey: string; dTags: string[] }>()
for (const r of unresolvedARefs) {
const pubkey = r.pubkey?.toLowerCase()
const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':')
if (!pubkey || !idf) continue
let g = fallbackGroups.get(pubkey)
if (!g) {
g = { pubkey, dTags: [] }
fallbackGroups.set(pubkey, g)
}
g.dTags.push(idf)
}
const fallbackFilters: Filter[] = []
for (const g of fallbackGroups.values()) {
const uniqueD = [...new Set(g.dTags)]
for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK)
fallbackFilters.push({
authors: [g.pubkey],
kinds: [...SECTION_KIND_FALLBACK_CANDIDATES],
'#d': dChunk,
limit: dChunk.length * SECTION_KIND_FALLBACK_CANDIDATES.length
})
}
}
if (fallbackFilters.length > 0) {
try {
const fallbackEvents = await queryService.fetchEvents(relayUrls, fallbackFilters, {
globalTimeout: 10_000,
eoseTimeout: 2_000,
firstRelayResultGraceMs: false
})
const byAuthorAndD = new Map<string, Event>()
for (const ev of fallbackEvents) {
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
if (!d) continue
const k = `${ev.pubkey.toLowerCase()}:${d}`
const prev = byAuthorAndD.get(k)
if (!prev || ev.created_at > prev.created_at) byAuthorAndD.set(k, ev)
}
for (const r of unresolvedARefs) {
const key = publicationRefKey(r)
if (out.has(key)) continue
const pubkey = r.pubkey?.toLowerCase()
const idf = r.identifier ?? r.coordinate?.split(':').slice(2).join(':')
if (!pubkey || !idf) continue
const ev = byAuthorAndD.get(`${pubkey}:${idf}`)
if (ev) out.set(key, ev)
}
} catch (err) {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] batch_fetch_fallback_error', {
message: err instanceof Error ? err.message : String(err),
filterCount: fallbackFilters.length,
relayCount: relayUrls.length
})
}
}
}
}
for (const r of aRefs) {
const key = publicationRefKey(r)
const coord = r.coordinate!

74
src/services/client-query.service.ts

@ -28,7 +28,7 @@ import { isHttpRelayUrl, normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url' @@ -28,7 +28,7 @@ import { isHttpRelayUrl, normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url'
import { RelaySubscribeOpBatch } from '@/services/relay-operation-log.service'
import { patchRelayNoticeForFetchFailures } from '@/services/relay-notice-strike'
import type { Filter, Event as NEvent } from 'nostr-tools'
import { SimplePool, EventTemplate, VerifiedEvent } from 'nostr-tools'
import { SimplePool, EventTemplate, VerifiedEvent, nip19 } from 'nostr-tools'
import type { AbstractRelay } from 'nostr-tools/abstract-relay'
import nip66Service from './nip66.service'
import type { ISigner, TSignerType } from '@/types'
@ -40,6 +40,58 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter { @@ -40,6 +40,58 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter {
return rest as Filter
}
const HEX_EVENT_ID_RE = /^[0-9a-f]{64}$/i
function decodeEventRefForETagFilter(raw: string): string | null {
const trimmed = raw.trim()
if (!trimmed) return null
const withoutPrefix = trimmed.toLowerCase().startsWith('nostr:') ? trimmed.slice(6).trim() : trimmed
if (HEX_EVENT_ID_RE.test(withoutPrefix)) return withoutPrefix.toLowerCase()
try {
const decoded = nip19.decode(withoutPrefix)
if (decoded.type === 'note') return decoded.data
if (decoded.type === 'nevent') return decoded.data.id
} catch {
/* ignore */
}
return null
}
function sanitizeETagFilter(filter: Filter): Filter | null {
const f = { ...filter } as Filter & { '#e'?: string[]; '#E'?: string[] }
const rawLower = Array.isArray(f['#e']) ? f['#e'] : []
const rawUpper = Array.isArray(f['#E']) ? f['#E'] : []
if (rawLower.length === 0 && rawUpper.length === 0) return f
const rawAll = [...rawLower, ...rawUpper]
const decoded = [
...new Set(
rawAll
.map((v) => decodeEventRefForETagFilter(String(v)))
.filter((v): v is string => !!v)
)
]
if (import.meta.env.DEV && decoded.length !== rawAll.length) {
const invalidSample = rawAll
.map((v) => String(v))
.filter((v) => !decodeEventRefForETagFilter(v))
.slice(0, 3)
logger.info('[QueryService] sanitized invalid #e/#E refs before REQ', {
inputCount: rawAll.length,
decodedCount: decoded.length,
invalidSample
})
}
if (decoded.length === 0) return null
f['#e'] = decoded
delete f['#E']
return f
}
function sanitizeFiltersBeforeReq(filter: Filter | Filter[]): Filter[] {
const asArray = Array.isArray(filter) ? filter : [filter]
return asArray.map(sanitizeETagFilter).filter((f): f is Filter => !!f)
}
export interface QueryOptions {
eoseTimeout?: number
globalTimeout?: number
@ -198,6 +250,10 @@ export class QueryService { @@ -198,6 +250,10 @@ export class QueryService {
onevent?: (evt: NEvent) => void,
options?: QueryOptions
): Promise<NEvent[]> {
const sanitizedFilters = sanitizeFiltersBeforeReq(filter)
if (sanitizedFilters.length === 0) return []
const effectiveFilter: Filter | Filter[] =
Array.isArray(filter) ? sanitizedFilters : sanitizedFilters[0]
const eoseTimeout = options?.eoseTimeout ?? 500
const globalTimeout = options?.globalTimeout ?? 10000
const replaceableRace = options?.replaceableRace ?? false
@ -213,11 +269,11 @@ export class QueryService { @@ -213,11 +269,11 @@ export class QueryService {
globalTimeout,
replaceableRace,
immediateReturn,
filter: Array.isArray(filter) ? filter : [filter]
filter: sanitizedFilters
})
}
const filtersForGrace = Array.isArray(filter) ? filter : [filter]
const filtersForGrace = sanitizedFilters
const maxLimitForGrace = Math.max(...filtersForGrace.map((f) => (f.limit ?? 0) as number), 0)
const isSingleEventFetchForGrace = maxLimitForGrace === 1
const useImplicitFeedFirstRelayGrace =
@ -261,7 +317,7 @@ export class QueryService { @@ -261,7 +317,7 @@ export class QueryService {
: Promise.allSettled(
httpRelayBases.map(async (base) => {
try {
const evts = await queryIndexRelay(base, filter, {
const evts = await queryIndexRelay(base, effectiveFilter, {
signal: abortHttp.signal,
onHardFailure: () => this.onRelayConnectionFailure?.(base)
})
@ -339,7 +395,7 @@ export class QueryService { @@ -339,7 +395,7 @@ export class QueryService {
const wsSub = this.subscribe(
wsQueryUrls,
filter,
effectiveFilter,
{
onevent: (evt) => {
eventCount++
@ -355,7 +411,7 @@ export class QueryService { @@ -355,7 +411,7 @@ export class QueryService {
firstResultTime = Date.now()
}
const filters = Array.isArray(filter) ? filter : [filter]
const filters = sanitizedFilters
const maxLimit = Math.max(...filters.map((f) => (f.limit ?? 0) as number), 0)
const isSingleEventFetch = maxLimit === 1
const hasIdFilter = filters.some(f => f.ids && f.ids.length > 0)
@ -453,9 +509,13 @@ export class QueryService { @@ -453,9 +509,13 @@ export class QueryService {
callbacks: SubscribeCallbacks,
relayOpMeta?: { source: string; logLevel?: 'info' | 'debug' }
): { close: () => void } {
const filters = sanitizeFiltersBeforeReq(filter)
if (filters.length === 0) {
queueMicrotask(() => callbacks.oneose?.(true))
return { close: () => {} }
}
const originalDedupedRelays = Array.from(new Set(urls))
let relays = originalDedupedRelays
const filters = Array.isArray(filter) ? filter : [filter]
const stripSocialBlockedRelays =
SOCIAL_KIND_BLOCKED_RELAY_URLS.length > 0 &&

Loading…
Cancel
Save