Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
70e6ffd3f0
  1. 64
      src/components/Embedded/EmbeddedNote.tsx
  2. 198
      src/components/NoteList/index.tsx
  3. 21
      src/components/ReplyNoteList/index.tsx
  4. 14
      src/hooks/useFetchEvent.tsx
  5. 45
      src/lib/event.ts
  6. 1
      src/lib/relay-list-builder.ts
  7. 88
      src/pages/secondary/NotePage/NotFound.tsx
  8. 192
      src/services/client-events.service.ts
  9. 5
      src/services/client.service.ts
  10. 2
      src/services/note-stats.service.ts
  11. 1
      src/services/spell.service.ts
  12. 223
      src/services/web.service.ts
  13. 10
      vite.config.ts

64
src/components/Embedded/EmbeddedNote.tsx

@ -5,6 +5,7 @@ import { useFetchEvent } from '@/hooks' @@ -5,6 +5,7 @@ import { useFetchEvent } from '@/hooks'
import { normalizeUrl } from '@/lib/url'
import { cn } from '@/lib/utils'
import client from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import { useTranslation } from 'react-i18next'
import { useEffect, useMemo, useState } from 'react'
import { Event, nip19 } from 'nostr-tools'
@ -273,21 +274,11 @@ function EmbeddedNoteNotFound({ @@ -273,21 +274,11 @@ function EmbeddedNoteNotFound({
null | 'unparseable' | 'no_relays' | 'searched'
>(null)
// Calculate which external relays would be tried when user clicks "Try external relays".
// IMPORTANT: For embedded events, we should search:
// 1. Containing event author's relays (outboxes + inboxes)
// 2. Relay hints from containing event (e, a, q tags - 3rd position)
// 3. Bech32 hints + embedded event author's relays
// 4. Relays where embedded event was seen
// 5. SEARCHABLE_RELAY_URLS
// Relays for "Try external relays": hints + searchable + FAST_READ.
// Initial embed fetch uses short per-relay timeouts; this pass uses longer timeouts (see fetchEventWithExternalRelays).
// We intentionally include FAST_READ again so slow/default relays get a second chance.
useEffect(() => {
const getExternalRelays = async () => {
const alreadyTriedRelaysSet = new Set<string>()
;[...FAST_READ_RELAY_URLS].forEach(url => {
const normalized = normalizeUrl(url)
if (normalized) alreadyTriedRelaysSet.add(normalized)
})
let hintRelays: string[] = []
let extractedHexEventId: string | null = null
@ -347,30 +338,27 @@ function EmbeddedNoteNotFound({ @@ -347,30 +338,27 @@ function EmbeddedNoteNotFound({
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
// Combine hints with SEARCHABLE_RELAY_URLS (always include as fallback)
// Normalize SEARCHABLE_RELAY_URLS for comparison
const normalizedSearchableRelays = SEARCHABLE_RELAY_URLS
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
// Combine all potential relays (hints + searchable)
const allPotentialRelays = new Set([...normalizedHints, ...normalizedSearchableRelays])
const normalizedFastRead = FAST_READ_RELAY_URLS
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
// Filter out relays that were already tried
const externalRelays = Array.from(allPotentialRelays).filter(
relay => !alreadyTriedRelaysSet.has(relay)
const externalRelays = Array.from(
new Set([...normalizedHints, ...normalizedSearchableRelays, ...normalizedFastRead])
)
// Deduplicate final relay list
setExternalRelays(externalRelays)
logger.debug('External relays calculated', {
noteId,
hintRelaysCount: normalizedHints.length,
searchableRelaysCount: normalizedSearchableRelays.length,
alreadyTriedCount: alreadyTriedRelaysSet.size,
fastReadRelaysCount: normalizedFastRead.length,
externalRelaysCount: externalRelays.length,
externalRelays: externalRelays.slice(0, 10) // Log first 10
externalRelays: externalRelays.slice(0, 10)
})
}
@ -399,7 +387,28 @@ function EmbeddedNoteNotFound({ @@ -399,7 +387,28 @@ function EmbeddedNoteNotFound({
setExternalSearchDetail(null)
let found: Event | undefined
try {
const idLog = hexEventId ?? hexEventIdFromNoteId(noteId) ?? noteId.slice(0, 16)
const idHex = hexEventId ?? hexEventIdFromNoteId(noteId)
if (idHex) {
const fromDb = await indexedDb.getEventFromPublicationStore(idHex)
if (fromDb) {
client.addEventToCache(fromDb)
found = fromDb
onEventFound?.(fromDb)
logger.info('Event found in IndexedDB (try-harder)', { noteId })
}
}
if (!found) {
const retried = await client.fetchEventForceRetry(noteId)
if (retried) {
found = retried
onEventFound?.(retried)
logger.info('Event found after fetchEventForceRetry', { noteId })
}
}
if (!found) {
const idLog = idHex ?? noteId.slice(0, 16)
logger.info('Searching external relays', {
noteId,
hexOrHint: idLog,
@ -408,12 +417,15 @@ function EmbeddedNoteNotFound({ @@ -408,12 +417,15 @@ function EmbeddedNoteNotFound({
})
const event = await client.fetchEventWithExternalRelays(noteId, externalRelays)
if (event) {
logger.info('Event found on external relay', { noteId })
found = event
client.addEventToCache(event)
onEventFound?.(event)
} else {
}
}
if (!found) {
logger.info('Event not found on external relays', {
noteId,
relayCount: externalRelays.length

198
src/components/NoteList/index.tsx

@ -2,7 +2,7 @@ import NewNotesButton from '@/components/NewNotesButton' @@ -2,7 +2,7 @@ import NewNotesButton from '@/components/NewNotesButton'
import { Button } from '@/components/ui/button'
import { ExtendedKind, FIRST_RELAY_RESULT_GRACE_MS } from '@/constants'
import {
getEmbeddedNoteBech32Ids,
collectEmbeddedEventPrefetchTargets,
getReplaceableCoordinateFromEvent,
isMentioningMutedUsers,
isReplaceableEvent,
@ -566,18 +566,22 @@ const NoteList = forwardRef( @@ -566,18 +566,22 @@ const NoteList = forwardRef(
const evs = lastEventsForTimelinePrefetchRef.current
if (evs.length === 0) return
const initialEmbeddedEventIds = new Set<string>()
evs.slice(0, 50).forEach((ev: Event) => {
extractEmbeddedEventIds(ev).forEach((id: string) => initialEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(initialEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
const { hexIds, nip19Pointers } = mergePrefetchTargetsFromEvents(evs.slice(0, 50))
const hexIdsToFetch = hexIds.filter((id) => !prefetchedEventIdsRef.current.has(id))
const nip19ToFetch = nip19Pointers.filter((p) => !prefetchedEventIdsRef.current.has(p))
if (hexIdsToFetch.length > 0 || nip19ToFetch.length > 0) {
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.add(p))
const run = async () => {
try {
await client.prefetchHexEventIds(hexIdsToFetch)
await Promise.all(nip19ToFetch.map((p) => client.fetchEvent(p)))
} catch {
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.delete(p))
}
}
void run()
}
}, 450)
} else if (eosed) {
@ -874,25 +878,22 @@ const NoteList = forwardRef( @@ -874,25 +878,22 @@ const NoteList = forwardRef(
}
schedulePrefetch(() => {
// CRITICAL: Prefetch embedded events for newly loaded events (throttled)
const newEmbeddedEventIds = new Set<string>()
// Only prefetch for first 30 events to reduce load
newEvents.slice(0, 30).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => newEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(newEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking)
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
const { hexIds, nip19Pointers } = mergePrefetchTargetsFromEvents(newEvents.slice(0, 30))
const hexIdsToFetch = hexIds.filter((id) => !prefetchedEventIdsRef.current.has(id))
const nip19ToFetch = nip19Pointers.filter((p) => !prefetchedEventIdsRef.current.has(p))
if (hexIdsToFetch.length === 0 && nip19ToFetch.length === 0) return
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.add(p))
const run = async () => {
try {
await client.prefetchHexEventIds(hexIdsToFetch)
await Promise.all(nip19ToFetch.map((p) => client.fetchEvent(p)))
} catch {
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.delete(p))
}
}
void run()
})
}
} catch (_error) {
@ -942,41 +943,15 @@ const NoteList = forwardRef( @@ -942,41 +943,15 @@ const NoteList = forwardRef(
const prefetchedEventIdsRef = useRef<Set<string>>(new Set())
const prefetchEmbeddedEventsTimeoutRef = useRef<NodeJS.Timeout | null>(null)
// Helper function to extract all embedded event IDs from an event
const extractEmbeddedEventIds = useCallback((evt: Event): string[] => {
const eventIds: string[] = []
// 1. Extract from 'e' tags (event references)
evt.tags
.filter((tag) => tag[0] === 'e' && tag[1] && tag[1].length === 64)
.forEach((tag) => {
const eventId = tag[1]
if (eventId && /^[0-9a-f]{64}$/.test(eventId)) {
eventIds.push(eventId)
}
})
// 2. Extract from 'a' tags (addressable events) - get event ID if present
evt.tags
.filter((tag) => tag[0] === 'a' && tag[3]) // tag[3] is the event ID for version tracking
.forEach((tag) => {
const eventId = tag[3]
if (eventId && /^[0-9a-f]{64}$/.test(eventId)) {
eventIds.push(eventId)
const mergePrefetchTargetsFromEvents = useCallback((evts: Event[]) => {
const hex = new Set<string>()
const nip19 = new Set<string>()
for (const e of evts) {
const t = collectEmbeddedEventPrefetchTargets(e)
t.hexIds.forEach((id) => hex.add(id))
t.nip19Pointers.forEach((p) => nip19.add(p))
}
})
// 3. Extract from content (nostr: links)
// Note: getEmbeddedNoteBech32Ids returns hex IDs (despite the name)
const embeddedNoteIds = getEmbeddedNoteBech32Ids(evt)
embeddedNoteIds.forEach((id) => {
// The function already returns hex IDs, so use them directly
if (id && /^[0-9a-f]{64}$/.test(id)) {
eventIds.push(id)
}
})
return Array.from(new Set(eventIds)) // Deduplicate
return { hexIds: Array.from(hex), nip19Pointers: Array.from(nip19) }
}, [])
// CRITICAL: Prefetch embedded events for visible events
@ -989,39 +964,22 @@ const NoteList = forwardRef( @@ -989,39 +964,22 @@ const NoteList = forwardRef(
// Debounce embedded event prefetching by 400ms to reduce frequency during rapid scrolling
prefetchEmbeddedEventsTimeoutRef.current = setTimeout(() => {
// Extract embedded event IDs from visible events (first 40, reduced to reduce load)
const visibleEmbeddedEventIds = new Set<string>()
filteredEvents.slice(0, 40).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => visibleEmbeddedEventIds.add(id))
})
// Also extract from upcoming events (next 80, reduced to reduce load)
const upcomingEmbeddedEventIds = new Set<string>()
events.slice(0, 80).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => upcomingEmbeddedEventIds.add(id))
})
// Combine visible and upcoming
const allEmbeddedEventIds = Array.from(
new Set([...visibleEmbeddedEventIds, ...upcomingEmbeddedEventIds])
const visibleTargets = mergePrefetchTargetsFromEvents(filteredEvents.slice(0, 40))
const upcomingTargets = mergePrefetchTargetsFromEvents(events.slice(0, 80))
const hexIds = Array.from(
new Set([...visibleTargets.hexIds, ...upcomingTargets.hexIds])
)
if (allEmbeddedEventIds.length === 0) return
// Filter out already prefetched event IDs
const eventIdsToFetch = allEmbeddedEventIds.filter(
(id) => !prefetchedEventIdsRef.current.has(id)
const nip19Pointers = Array.from(
new Set([...visibleTargets.nip19Pointers, ...upcomingTargets.nip19Pointers])
)
if (eventIdsToFetch.length === 0) return
const hexIdsToFetch = hexIds.filter((id) => !prefetchedEventIdsRef.current.has(id))
const nip19ToFetch = nip19Pointers.filter((p) => !prefetchedEventIdsRef.current.has(p))
if (hexIdsToFetch.length === 0 && nip19ToFetch.length === 0) return
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.add(p))
// Batch fetch embedded events in background (non-blocking)
// Use requestIdleCallback if available to avoid blocking scroll
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
@ -1031,10 +989,16 @@ const NoteList = forwardRef( @@ -1031,10 +989,16 @@ const NoteList = forwardRef(
}
scheduleFetch(() => {
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
const run = async () => {
try {
await client.prefetchHexEventIds(hexIdsToFetch)
await Promise.all(nip19ToFetch.map((p) => client.fetchEvent(p)))
} catch {
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.delete(p))
}
}
void run()
})
}, 400) // Debounce by 400ms to reduce frequency during rapid scrolling
@ -1044,7 +1008,7 @@ const NoteList = forwardRef( @@ -1044,7 +1008,7 @@ const NoteList = forwardRef(
prefetchEmbeddedEventsTimeoutRef.current = null
}
}
}, [filteredEvents, events, extractEmbeddedEventIds])
}, [filteredEvents, events, mergePrefetchTargetsFromEvents])
// Also prefetch when loading more events (scrolling down)
// Throttled to reduce frequency during rapid scrolling
@ -1059,19 +1023,16 @@ const NoteList = forwardRef( @@ -1059,19 +1023,16 @@ const NoteList = forwardRef(
// Debounce embedded-event prefetch for newly revealed rows (profiles use NoteFeed batcher above)
prefetchNewEventsTimeoutRef.current = setTimeout(() => {
// CRITICAL: Prefetch embedded events for newly loaded events (reduced scope)
const newlyLoadedEmbeddedEventIds = new Set<string>()
events.slice(showCount, showCount + 50).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => newlyLoadedEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(newlyLoadedEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
const { hexIds, nip19Pointers } = mergePrefetchTargetsFromEvents(
events.slice(showCount, showCount + 50)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking) using requestIdleCallback
const hexIdsToFetch = hexIds.filter((id) => !prefetchedEventIdsRef.current.has(id))
const nip19ToFetch = nip19Pointers.filter((p) => !prefetchedEventIdsRef.current.has(p))
if (hexIdsToFetch.length === 0 && nip19ToFetch.length === 0) return
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.add(p))
const scheduleFetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
requestIdleCallback(callback, { timeout: 500 })
@ -1081,12 +1042,17 @@ const NoteList = forwardRef( @@ -1081,12 +1042,17 @@ const NoteList = forwardRef(
}
scheduleFetch(() => {
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
})
const run = async () => {
try {
await client.prefetchHexEventIds(hexIdsToFetch)
await Promise.all(nip19ToFetch.map((p) => client.fetchEvent(p)))
} catch {
hexIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
nip19ToFetch.forEach((p) => prefetchedEventIdsRef.current.delete(p))
}
}
void run()
})
}, 400) // Debounce by 400ms to reduce frequency during rapid scrolling
return () => {
@ -1095,7 +1061,7 @@ const NoteList = forwardRef( @@ -1095,7 +1061,7 @@ const NoteList = forwardRef(
prefetchNewEventsTimeoutRef.current = null
}
}
}, [events.length, showCount, loading, hasMore])
}, [events.length, showCount, loading, hasMore, mergePrefetchTargetsFromEvents])
const showNewEvents = () => {
setEvents((oldEvents) => [...newEvents, ...oldEvents])

21
src/components/ReplyNoteList/index.tsx

@ -237,20 +237,35 @@ function ReplyNoteList({ @@ -237,20 +237,35 @@ function ReplyNoteList({
relay: client.getEventHint(event.id)
}
} else {
root = { type: 'E', id: event.id, pubkey: event.pubkey }
const eid = event.id
root = {
type: 'E',
id: /^[0-9a-f]{64}$/i.test(eid) ? eid.toLowerCase() : eid,
pubkey: event.pubkey
}
}
const rootETag = getRootETag(event)
if (rootETag) {
const [, rootEventHexId, , , rootEventPubkey] = rootETag
if (rootEventHexId && rootEventPubkey) {
root = { type: 'E', id: rootEventHexId, pubkey: rootEventPubkey }
const hid = rootEventHexId
root = {
type: 'E',
id: /^[0-9a-f]{64}$/i.test(hid) ? hid.toLowerCase() : hid,
pubkey: rootEventPubkey
}
} else {
const rootEventId = generateBech32IdFromETag(rootETag)
if (rootEventId) {
const rootEvent = await eventService.fetchEvent(rootEventId)
if (rootEvent) {
root = { type: 'E', id: rootEvent.id, pubkey: rootEvent.pubkey }
const rid = rootEvent.id
root = {
type: 'E',
id: /^[0-9a-f]{64}$/i.test(rid) ? rid.toLowerCase() : rid,
pubkey: rootEvent.pubkey
}
}
}
}

14
src/hooks/useFetchEvent.tsx

@ -55,8 +55,12 @@ export function useFetchEvent(eventId?: string, initialEvent?: Event) { @@ -55,8 +55,12 @@ export function useFetchEvent(eventId?: string, initialEvent?: Event) {
const fetchEvent = async () => {
try {
// fetchEvent uses DataLoader which handles caching automatically
const fetchedEvent = await eventService.fetchEvent(eventId)
// First load: DataLoader dedupes. Refetches (incl. session-waiter) clear a prior undefined so
// timeline-cached events resolve after the embed mounted first.
const fetchedEvent =
skipShortcuts
? await eventService.fetchEventForceRetry(eventId)
: await eventService.fetchEvent(eventId)
if (fetchedEvent && !isEventDeleted(fetchedEvent)) {
setEvent(fetchedEvent)
addReplies([fetchedEvent])
@ -77,5 +81,11 @@ export function useFetchEvent(eventId?: string, initialEvent?: Event) { @@ -77,5 +81,11 @@ export function useFetchEvent(eventId?: string, initialEvent?: Event) {
}
}, [isEventDeleted, event])
// Parent notes often render before the embedded event arrives from the same timeline; refetch when it hits session cache.
useEffect(() => {
if (!eventId || event !== undefined) return undefined
return eventService.subscribeWhenSessionHasEvent(eventId, refetch)
}, [eventId, event, refetch])
return { isFetching, error, event, refetch }
}

45
src/lib/event.ts

@ -1,5 +1,5 @@ @@ -1,5 +1,5 @@
import { CALENDAR_EVENT_KINDS, ExtendedKind } from '@/constants'
import { EMBEDDED_MENTION_REGEX, NOSTR_EMBEDDED_NOTE_REGEX } from '@/lib/content-patterns'
import { EMBEDDED_EVENT_REGEX, EMBEDDED_MENTION_REGEX, NOSTR_EMBEDDED_NOTE_REGEX } from '@/lib/content-patterns'
import { cleanUrl } from '@/lib/url'
import client from '@/services/client.service'
import { TImetaInfo } from '@/types'
@ -251,6 +251,49 @@ export function getEmbeddedNoteBech32Ids(event: Event) { @@ -251,6 +251,49 @@ export function getEmbeddedNoteBech32Ids(event: Event) {
return embeddedNoteBech32Ids
}
/**
* Collect targets to prefetch so embedded notes (and reply roots) resolve into session cache.
* - `hexIds`: lowercase event ids (e tags, a-tag snapshot, nostr:note1 / nevent1 in content).
* - `nip19Pointers`: bech32 strings (e.g. naddr) for per-pointer fetches not batchable as a single `ids` filter.
*/
export function collectEmbeddedEventPrefetchTargets(event: Event): {
hexIds: string[]
nip19Pointers: string[]
} {
const hexSet = new Set<string>()
const nip19Set = new Set<string>()
const addHex = (id: string | undefined) => {
if (!id) return
const t = id.trim().toLowerCase()
if (/^[0-9a-f]{64}$/.test(t)) hexSet.add(t)
}
for (const tag of event.tags) {
if (tag[0] === 'e' && tag[1]) addHex(tag[1])
if (tag[0] === 'a' && tag[3]) addHex(tag[3])
}
for (const full of event.content.match(EMBEDDED_EVENT_REGEX) ?? []) {
const colon = full.indexOf(':')
if (colon < 0) continue
const bech32 = full.slice(colon + 1)
try {
const { type, data } = nip19.decode(bech32)
if (type === 'note') addHex(data)
else if (type === 'nevent') addHex(data.id)
else if (type === 'naddr') nip19Set.add(bech32)
} catch {
/* ignore */
}
}
return {
hexIds: Array.from(hexSet),
nip19Pointers: Array.from(nip19Set)
}
}
export function getEmbeddedPubkeys(event: Event) {
const cache = EVENT_EMBEDDED_PUBKEYS_CACHE.get(event.id)
if (cache) return cache

1
src/lib/relay-list-builder.ts

@ -306,6 +306,7 @@ export async function buildReplyReadRelayList( @@ -306,6 +306,7 @@ export async function buildReplyReadRelayList(
userPubkey,
relayHints: threadRelayHints,
includeFastReadRelays: true,
includeSearchableRelays: true,
includeLocalRelays: true,
blockedRelays
})

88
src/pages/secondary/NotePage/NotFound.tsx

@ -3,6 +3,7 @@ import { Button } from '@/components/ui/button' @@ -3,6 +3,7 @@ import { Button } from '@/components/ui/button'
import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants'
import { normalizeUrl } from '@/lib/url'
import client from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import { AlertCircle, Search } from 'lucide-react'
import { nip19 } from 'nostr-tools'
import { useEffect, useState } from 'react'
@ -22,26 +23,18 @@ export default function NotFound({ @@ -22,26 +23,18 @@ export default function NotFound({
const [externalRelays, setExternalRelays] = useState<string[]>([])
const [hexEventId, setHexEventId] = useState<string | null>(null)
// Calculate which external relays would be tried (excluding already-tried relays)
// Hints + seen + searchable + FAST_READ (second pass uses longer timeouts; include defaults again)
useEffect(() => {
if (!bech32Id) return
const getExternalRelays = async () => {
try {
// Get all relays that have already been tried (FAST_READ_RELAY_URLS)
// These are the relays used in the initial fetch
const alreadyTriedRelaysSet = new Set<string>()
;[...FAST_READ_RELAY_URLS].forEach(url => {
const normalized = normalizeUrl(url)
if (normalized) alreadyTriedRelaysSet.add(normalized)
})
let bech32HintRelays: string[] = [] // Relay hints from bech32 (highest priority)
let extractedHexEventId: string | null = null
// CRITICAL: Parse relay hints from bech32 ID FIRST (highest priority)
// These are explicit hints from the bech32 address and should always be used
if (!/^[0-9a-f]{64}$/.test(bech32Id)) {
if (!/^[0-9a-f]{64}$/i.test(bech32Id)) {
try {
const { type, data } = nip19.decode(bech32Id)
@ -76,7 +69,7 @@ export default function NotFound({ @@ -76,7 +69,7 @@ export default function NotFound({
logger.error('Failed to parse bech32 ID for relay hints', { error: err, bech32Id })
}
} else {
extractedHexEventId = bech32Id
extractedHexEventId = bech32Id.toLowerCase()
}
setHexEventId(extractedHexEventId)
@ -94,19 +87,22 @@ export default function NotFound({ @@ -94,19 +87,22 @@ export default function NotFound({
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
// Normalize SEARCHABLE_RELAY_URLS (fallback)
const normalizedSearchableRelays = SEARCHABLE_RELAY_URLS
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
// CRITICAL: Preserve order - bech32 hints first, then seen, then searchable
// This ensures relay hints from bech32 are shown first in the UI
// Order matters: bech32 hints (explicit) > seen relays > searchable (fallback)
const orderedExternalRelays = [
...normalizedBech32Hints.filter(r => !alreadyTriedRelaysSet.has(r)),
...normalizedSeenRelays.filter(r => !alreadyTriedRelaysSet.has(r) && !normalizedBech32Hints.includes(r)),
...normalizedSearchableRelays.filter(r => !alreadyTriedRelaysSet.has(r) && !normalizedBech32Hints.includes(r) && !normalizedSeenRelays.includes(r))
]
const normalizedFastRead = FAST_READ_RELAY_URLS
.map(url => normalizeUrl(url))
.filter((url): url is string => Boolean(url))
const orderedExternalRelays = Array.from(
new Set([
...normalizedBech32Hints,
...normalizedSeenRelays,
...normalizedSearchableRelays,
...normalizedFastRead
])
)
setExternalRelays(orderedExternalRelays)
@ -115,10 +111,10 @@ export default function NotFound({ @@ -115,10 +111,10 @@ export default function NotFound({
bech32HintCount: normalizedBech32Hints.length,
seenRelayCount: normalizedSeenRelays.length,
searchableRelaysCount: normalizedSearchableRelays.length,
alreadyTriedCount: alreadyTriedRelaysSet.size,
fastReadRelaysCount: normalizedFastRead.length,
externalRelaysCount: orderedExternalRelays.length,
bech32Hints: normalizedBech32Hints,
externalRelays: orderedExternalRelays.slice(0, 10) // Log first 10
externalRelays: orderedExternalRelays.slice(0, 10)
})
} catch (error) {
logger.error('Error calculating external relays (NotFound)', {
@ -144,21 +140,56 @@ export default function NotFound({ @@ -144,21 +140,56 @@ export default function NotFound({
}
setIsSearchingExternal(true)
let found = false
try {
const idHex =
hexEventId ??
(/^[0-9a-f]{64}$/i.test(bech32Id) ? bech32Id.toLowerCase() : null) ??
(() => {
try {
const { type, data } = nip19.decode(bech32Id)
if (type === 'note') return data as string
if (type === 'nevent') return data.id
} catch {
/* ignore */
}
return null
})()
if (idHex) {
const fromDb = await indexedDb.getEventFromPublicationStore(idHex)
if (fromDb) {
client.addEventToCache(fromDb)
onEventFound?.(fromDb)
found = true
logger.info('Event found in IndexedDB (NotFound try-harder)', { bech32Id })
}
}
if (!found) {
const retried = await client.fetchEventForceRetry(bech32Id)
if (retried) {
onEventFound?.(retried)
found = true
logger.info('Event found after fetchEventForceRetry (NotFound)', { bech32Id })
}
}
if (!found) {
logger.info('Searching external relays (NotFound)', {
bech32Id,
hexEventId,
hexEventId: idHex ?? hexEventId,
relayCount: externalRelays.length,
relays: externalRelays.slice(0, 5) // Log first 5 relays
relays: externalRelays.slice(0, 5)
})
const event = await client.fetchEventWithExternalRelays(bech32Id, externalRelays)
if (event) {
logger.info('Event found on external relay (NotFound)', { bech32Id, hexEventId })
if (onEventFound) {
onEventFound(event)
}
client.addEventToCache(event)
onEventFound?.(event)
found = true
} else {
logger.info('Event not found on external relays (NotFound)', {
bech32Id,
@ -166,13 +197,16 @@ export default function NotFound({ @@ -166,13 +197,16 @@ export default function NotFound({
relayCount: externalRelays.length
})
}
}
} catch (error) {
logger.error('External relay fetch failed (NotFound)', { error, bech32Id, hexEventId, externalRelays })
} finally {
setIsSearchingExternal(false)
if (!found) {
setTriedExternal(true)
}
}
}
const hasExternalRelays = externalRelays.length > 0

192
src/services/client-events.service.ts

@ -31,10 +31,14 @@ async function buildComprehensiveRelayListForEvents( @@ -31,10 +31,14 @@ async function buildComprehensiveRelayListForEvents(
})
}
const PREFETCH_HEX_IDS_CHUNK = 48
export class EventService {
private queryService: QueryService
private eventCacheMap = new Map<string, Promise<NEvent | undefined>>()
private sessionEventCache = new LRUCache<string, NEvent>({ max: 500, ttl: 1000 * 60 * 30 })
/** Callbacks waiting for an event id to appear in {@link sessionEventCache} (e.g. embed loads before timeline caches the note). */
private sessionEventWaiters = new Map<string, Set<() => void>>()
private eventDataLoader: DataLoader<string, NEvent | undefined>
private fetchEventFromBigRelaysDataloader: DataLoader<string, NEvent | undefined>
@ -50,15 +54,71 @@ export class EventService { @@ -50,15 +54,71 @@ export class EventService {
)
}
/**
* Lowercase hex id for note/nevent/raw hex; `null` for naddr or invalid ids.
*/
private resolveHexWaiterKey(id: string): string | null {
const trimmed = id.trim()
if (/^[0-9a-f]{64}$/i.test(trimmed)) return trimmed.toLowerCase()
try {
const { type, data } = nip19.decode(trimmed)
if (type === 'note') return data
if (type === 'nevent') return data.id
} catch {
/* invalid */
}
return null
}
private notifySessionEventWaiters(hexId: string): void {
const waiters = this.sessionEventWaiters.get(hexId)
if (!waiters?.size) return
for (const cb of [...waiters]) {
try {
cb()
} catch (e) {
logger.warn('[EventService] sessionEventWaiter failed', { hexId: hexId.slice(0, 8), e })
}
}
}
/**
* When an event with this id is added to the session cache, invoke `callback` (and when already cached).
* Only supports hex, note1, and nevent1 (not naddr).
*/
subscribeWhenSessionHasEvent(eventId: string, callback: () => void): () => void {
const hex = this.resolveHexWaiterKey(eventId)
if (!hex) return () => {}
if (this.sessionEventCache.has(hex)) {
queueMicrotask(() => callback())
}
let set = this.sessionEventWaiters.get(hex)
if (!set) {
set = new Set()
this.sessionEventWaiters.set(hex, set)
}
set.add(callback)
return () => {
set!.delete(callback)
if (set!.size === 0) {
this.sessionEventWaiters.delete(hex)
}
}
}
/**
* Fetch single event by ID (hex, note1, nevent1, naddr1)
*/
async fetchEvent(id: string): Promise<NEvent | undefined> {
const trimmed = id.trim()
let hexId: string | undefined
if (/^[0-9a-f]{64}$/.test(id)) {
hexId = id
if (/^[0-9a-f]{64}$/i.test(trimmed)) {
hexId = trimmed.toLowerCase()
} else {
const { type, data } = nip19.decode(id)
try {
const { type, data } = nip19.decode(trimmed)
switch (type) {
case 'note':
hexId = data
@ -69,21 +129,102 @@ export class EventService { @@ -69,21 +129,102 @@ export class EventService {
case 'naddr':
break
}
} catch {
return undefined
}
}
if (hexId) {
const fromSession = this.sessionEventCache.get(hexId)
if (fromSession) return fromSession
const cachedPromise = this.eventCacheMap.get(hexId)
if (cachedPromise) return cachedPromise
if (cachedPromise) {
const resolved = await cachedPromise
if (resolved) return resolved
const fromSessionAfterMiss = this.sessionEventCache.get(hexId)
if (fromSessionAfterMiss) return fromSessionAfterMiss
const fromDb = await indexedDb.getEventFromPublicationStore(hexId)
if (fromDb) {
this.addEventToCache(fromDb)
return fromDb
}
// Prior load() finished with undefined but left the promise in cacheMap — never retrying.
this.eventDataLoader.clear(hexId)
}
}
const loaded = await this.eventDataLoader.load(hexId ?? trimmed)
if (hexId) {
const fromSessionAfter = this.sessionEventCache.get(hexId)
if (fromSessionAfter) return fromSessionAfter
}
return loaded
}
/**
* Invalidate DataLoader cache for this id so the next fetch hits IndexedDB/relays again.
* (Otherwise a prior `undefined` result stays cached forever.)
*/
private clearDataloaderCacheForFetchId(id: string): void {
const trimmed = id.trim()
if (/^[0-9a-f]{64}$/i.test(trimmed)) {
this.eventDataLoader.clear(trimmed.toLowerCase())
return
}
try {
const { type, data } = nip19.decode(trimmed)
if (type === 'note') {
this.eventDataLoader.clear(data)
} else if (type === 'nevent') {
this.eventDataLoader.clear(data.id)
} else {
this.eventDataLoader.clear(trimmed)
}
} catch {
/* ignore */
}
return this.eventDataLoader.load(hexId ?? id)
}
/**
* Force retry fetch event
*/
async fetchEventForceRetry(eventId: string): Promise<NEvent | undefined> {
return await this.fetchEvent(eventId)
this.clearDataloaderCacheForFetchId(eventId)
return this.fetchEvent(eventId)
}
/**
* Batch-prefetch events by hex id into session cache (single REQ per chunk).
* Used by feeds so embedded notes resolve without N parallel fetches.
*/
async prefetchHexEventIds(rawIds: readonly string[]): Promise<void> {
const hexIds = [
...new Set(
rawIds
.map((id) => id.trim().toLowerCase())
.filter((id) => /^[0-9a-f]{64}$/.test(id))
)
]
const toFetch = hexIds.filter((id) => !this.sessionEventCache.has(id))
if (toFetch.length === 0) return
const relayUrls = await buildComprehensiveRelayListForEvents(undefined, [], [], [])
if (!relayUrls.length) return
for (let i = 0; i < toFetch.length; i += PREFETCH_HEX_IDS_CHUNK) {
const chunk = toFetch.slice(i, i + PREFETCH_HEX_IDS_CHUNK)
const events = await this.queryService.query(
relayUrls,
{ ids: chunk, limit: chunk.length },
undefined,
{
immediateReturn: false,
eoseTimeout: 2500,
globalTimeout: 12000
}
)
for (const ev of events) {
this.addEventToCache(ev)
}
}
}
/**
@ -164,7 +305,15 @@ export class EventService { @@ -164,7 +305,15 @@ export class EventService {
addEventToCache(event: NEvent): void {
const cleanEvent = { ...event }
delete (cleanEvent as any).relayStatuses
this.sessionEventCache.set(event.id, cleanEvent)
// REQ filters and nip19 decode use lowercase hex; some relays/clients emit uppercase ids.
// Session lookups and waiters must use the same canonical key or embeds miss events already on the timeline.
const id =
/^[0-9a-f]{64}$/i.test(cleanEvent.id) ? cleanEvent.id.toLowerCase() : cleanEvent.id
if (id !== cleanEvent.id) {
;(cleanEvent as NEvent).id = id
}
this.sessionEventCache.set(id, cleanEvent as NEvent)
this.notifySessionEventWaiters(id)
}
/**
@ -227,6 +376,7 @@ export class EventService { @@ -227,6 +376,7 @@ export class EventService {
this.eventDataLoader.clearAll()
this.sessionEventCache.clear()
this.eventCacheMap.clear()
this.sessionEventWaiters.clear()
this.fetchEventFromBigRelaysDataloader.clearAll()
logger.info('[EventService] In-memory caches cleared')
}
@ -238,8 +388,8 @@ export class EventService { @@ -238,8 +388,8 @@ export class EventService {
let filter: Filter | undefined
let relays: string[] = []
if (/^[0-9a-f]{64}$/.test(id)) {
filter = { ids: [id], limit: 1 }
if (/^[0-9a-f]{64}$/i.test(id)) {
filter = { ids: [id.toLowerCase()], limit: 1 }
} else {
const { type, data } = nip19.decode(id)
switch (type) {
@ -301,6 +451,14 @@ export class EventService { @@ -301,6 +451,14 @@ export class EventService {
return event
}
// Another code path (e.g. feed prefetch) may have populated session while we were in-flight.
if (filter.ids?.length === 1) {
const raw = filter.ids[0]
const key = /^[0-9a-f]{64}$/i.test(raw) ? raw.toLowerCase() : raw
const sess = this.sessionEventCache.get(key)
if (sess) return sess
}
return undefined
}
@ -343,8 +501,8 @@ export class EventService { @@ -343,8 +501,8 @@ export class EventService {
// This is especially important for non-replaceable events (not in 10000-19999 or 30000-39999 ranges)
const events = await this.queryService.query(relayUrls, filter, undefined, {
immediateReturn: isSingleEventById, // Return immediately when found
eoseTimeout: isSingleEventById ? 100 : 500,
globalTimeout: isSingleEventById ? 3000 : 10000
eoseTimeout: isSingleEventById ? 1500 : 500,
globalTimeout: isSingleEventById ? 12000 : 10000
})
const event = events.sort((a, b) => b.created_at - a.created_at)[0]
@ -378,17 +536,21 @@ export class EventService { @@ -378,17 +536,21 @@ export class EventService {
limit: ids.length
}, undefined, {
immediateReturn: isSingleEventFetch, // Return immediately when found
eoseTimeout: isSingleEventFetch ? 100 : 500,
globalTimeout: isSingleEventFetch ? 3000 : 10000
eoseTimeout: isSingleEventFetch ? 1500 : 500,
globalTimeout: isSingleEventFetch ? 12000 : 10000
})
const eventsMap = new Map<string, NEvent>()
for (const event of events) {
eventsMap.set(event.id, event)
const key = /^[0-9a-f]{64}$/i.test(event.id) ? event.id.toLowerCase() : event.id
eventsMap.set(key, event)
// Note: We can't track which relay returned which event in batch queries,
// but events are still cached and will be found in future queries
}
return ids.map((id) => eventsMap.get(id))
return ids.map((id) => {
const k = /^[0-9a-f]{64}$/i.test(id) ? id.toLowerCase() : id
return eventsMap.get(k)
})
}
}

5
src/services/client.service.ts

@ -1843,6 +1843,11 @@ class ClientService extends EventTarget { @@ -1843,6 +1843,11 @@ class ClientService extends EventTarget {
return this.eventService.fetchEventForceRetry(eventId)
}
/** Batch-prefetch by hex id into session cache (feed embeds). */
async prefetchHexEventIds(hexIds: readonly string[]): Promise<void> {
return this.eventService.prefetchHexEventIds(hexIds)
}
async fetchEventWithExternalRelays(eventId: string, externalRelays: string[]): Promise<NEvent | undefined> {
return this.eventService.fetchEventWithExternalRelays(eventId, externalRelays)
}

2
src/services/note-stats.service.ts

@ -324,7 +324,7 @@ class NoteStatsService { @@ -324,7 +324,7 @@ class NoteStatsService {
}
}
if (emoji.startsWith(':') && emoji.endsWith(':')) {
if (typeof emoji === 'string' && emoji.startsWith(':') && emoji.endsWith(':')) {
const emojiInfos = getEmojiInfosFromEmojiTags(evt.tags)
const shortcode = emoji.split(':')[1]
const emojiInfo = emojiInfos.find((info) => info.shortcode === shortcode)

1
src/services/spell.service.ts

@ -6,7 +6,6 @@ import { ExtendedKind, FAST_WRITE_RELAY_URLS } from '@/constants' @@ -6,7 +6,6 @@ import { ExtendedKind, FAST_WRITE_RELAY_URLS } from '@/constants'
import { getRelayUrlsWithFavoritesFastReadAndInbox } from '@/lib/favorites-feed-relays'
import { tagNameEquals } from '@/lib/tag'
import logger from '@/lib/logger'
import type { TRelayList } from '@/types'
import { normalizeUrl } from '@/lib/url'
import type { Event } from 'nostr-tools'
import type { Filter } from 'nostr-tools'

223
src/services/web.service.ts

@ -2,109 +2,100 @@ import { TWebMetadata } from '@/types' @@ -2,109 +2,100 @@ import { TWebMetadata } from '@/types'
import DataLoader from 'dataloader'
import logger from '@/lib/logger'
class WebService {
static instance: WebService
private webMetadataDataLoader = new DataLoader<string, TWebMetadata>(
async (urls) => {
return await Promise.all(
urls.map(async (url) => {
// Check if we should use proxy server to avoid CORS issues
// Uses the same proxy as wikistr (configured via VITE_PROXY_SERVER build arg)
// Since jumble and wikistr run on the same server, they share the same proxy endpoint
// Default to relative path /sites/ if VITE_PROXY_SERVER is not set (like wikistr does)
const proxyServer = import.meta.env.VITE_PROXY_SERVER
const proxyBase = proxyServer?.trim() || '/sites/'
const isProxyUrl = url.includes('/sites/') || url.includes('/sites/?url=')
// Build proxy URL - handle both full URLs and relative paths
let fetchUrl = url
if (!isProxyUrl) {
if (proxyBase.startsWith('http://') || proxyBase.startsWith('https://')) {
// Full URL - ensure it ends with / for query param usage
const proxyUrl = proxyBase.endsWith('/') ? proxyBase : `${proxyBase}/`
fetchUrl = `${proxyUrl}sites/?url=${encodeURIComponent(url)}`
} else {
// Relative path - ensure it ends with / for query param usage
const basePath = proxyBase.endsWith('/') ? proxyBase : (proxyBase || '/sites/')
fetchUrl = `${basePath}?url=${encodeURIComponent(url)}`
}
logger.info('[WebService] Using proxy for OG fetch', { originalUrl: url, proxyUrl: fetchUrl, proxyBase })
} else {
logger.info('[WebService] URL already proxied, using as-is', { url, fetchUrl })
}
/** True when HTML is the Vite/React dev shell or another SPA stub, not the target page. */
function htmlLooksLikeLocalDevAppShell(html: string): boolean {
const head = html.slice(0, 8000)
return (
head.includes('injectIntoGlobalHook') ||
head.includes('/@vite/') ||
head.includes('@vite/client') ||
head.includes('@react-refresh')
)
}
try {
const HTML_FETCH_HEADERS = {
Accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'User-Agent': 'Mozilla/5.0 (compatible; Jumble/1.0; +https://jumble.imwald.eu)'
}
// Add timeout and better error handling
// Use 35 second timeout (proxy has 30s, add buffer for network latency)
// This matches wikistr's timeout and allows Puppeteer to execute JavaScript
async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise<string | null> {
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), 35000) // 35 second timeout for proxy
// Fetch with appropriate headers
// Note: credentials: 'omit' prevents sending cookies, which avoids SameSite warnings
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
try {
const res = await fetch(fetchUrl, {
signal: controller.signal,
mode: 'cors',
credentials: 'omit',
headers: {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'User-Agent': 'Mozilla/5.0 (compatible; Jumble/1.0; +https://jumble.imwald.eu)'
}
headers: HTML_FETCH_HEADERS
})
if (!res.ok) return null
const html = await res.text()
if (html.length < 50) return null
if (htmlLooksLikeLocalDevAppShell(html)) return null
return html
} catch {
return null
} finally {
clearTimeout(timeoutId)
}
}
if (!res.ok) {
logger.warn('[WebService] Fetch failed with non-OK status', { url, fetchUrl, status: res.status, statusText: res.statusText })
return {}
function buildOgProxyFetchUrl(originalUrl: string, proxyServer: string): string {
if (proxyServer.startsWith('http://') || proxyServer.startsWith('https://')) {
const base = proxyServer.endsWith('/') ? proxyServer : `${proxyServer}/`
return `${base}sites/?url=${encodeURIComponent(originalUrl)}`
}
const basePath = proxyServer.endsWith('/') ? proxyServer : `${proxyServer}/`
return `${basePath}?url=${encodeURIComponent(originalUrl)}`
}
const html = await res.text()
/**
* OG HTML: always use `VITE_PROXY_SERVER` first when set; if that fails or is unset, fetch the page directly.
*/
async function fetchHtmlForOpenGraph(originalUrl: string): Promise<{ html: string; via: string } | null> {
const isAlreadyProxyRequest =
originalUrl.includes('/sites/') || originalUrl.includes('/sites/?url=')
// Check if we got a valid HTML response (not an error page or redirect)
if (html.length < 100) {
logger.warn('[WebService] Received suspiciously short HTML response', { url, fetchUrl, htmlLength: html.length })
if (isAlreadyProxyRequest) {
const html = await tryFetchHtml(originalUrl, 35_000)
return html ? { html, via: originalUrl } : null
}
// Log a snippet of the HTML to debug (first 500 chars)
logger.info('[WebService] Received HTML response', {
url,
fetchUrl,
htmlLength: html.length,
htmlSnippet: html.substring(0, 200)
const proxyServer = import.meta.env.VITE_PROXY_SERVER?.trim()
if (proxyServer) {
const proxyFetchUrl = buildOgProxyFetchUrl(originalUrl, proxyServer)
logger.debug('[WebService] OG fetch via VITE_PROXY_SERVER', { originalUrl, proxyFetchUrl })
let html = await tryFetchHtml(proxyFetchUrl, 35_000)
if (html) {
return { html, via: proxyFetchUrl }
}
logger.debug('[WebService] OG proxy unavailable or bad response; trying direct fetch', {
originalUrl
})
html = await tryFetchHtml(originalUrl, 15_000)
return html ? { html, via: 'direct' } : null
}
const html = await tryFetchHtml(originalUrl, 15_000)
return html ? { html, via: 'direct' } : null
}
function parseOpenGraphFromHtml(html: string, pageUrl: string): TWebMetadata {
const parser = new DOMParser()
const doc = parser.parseFromString(html, 'text/html')
// Check for OG tags
const ogTitleMeta = doc.querySelector('meta[property="og:title"]')
const ogDescMeta = doc.querySelector('meta[property="og:description"]')
const ogImageMeta = doc.querySelector('meta[property="og:image"]')
const titleTag = doc.querySelector('title')
logger.info('[WebService] Found meta tags', {
url,
hasOgTitle: !!ogTitleMeta,
hasOgDesc: !!ogDescMeta,
hasOgImage: !!ogImageMeta,
hasTitleTag: !!titleTag,
ogTitleContent: ogTitleMeta?.getAttribute('content')?.substring(0, 100),
titleTagContent: titleTag?.textContent?.substring(0, 100)
})
let title =
ogTitleMeta?.getAttribute('content') ||
titleTag?.textContent
// Filter out common redirect/loading titles (including variations with ellipsis)
let title = ogTitleMeta?.getAttribute('content') || titleTag?.textContent
if (title) {
const trimmedTitle = title.trim()
if (/^(Redirecting|Loading|Please wait|Redirect)(\.\.\.|…)?$/i.test(trimmedTitle) ||
if (
/^(Redirecting|Loading|Please wait|Redirect)(\.\.\.|…)?$/i.test(trimmedTitle) ||
trimmedTitle === '...' ||
trimmedTitle === '…') {
trimmedTitle === '…'
) {
title = undefined
}
}
@ -112,64 +103,82 @@ class WebService { @@ -112,64 +103,82 @@ class WebService {
const description =
doc.querySelector('meta[property="og:description"]')?.getAttribute('content') ||
(doc.querySelector('meta[name="description"]') as HTMLMetaElement | null)?.content
let image = (doc.querySelector('meta[property="og:image"]') as HTMLMetaElement | null)
?.content
// Convert relative image URLs to absolute URLs by prepending the domain
let image = (doc.querySelector('meta[property="og:image"]') as HTMLMetaElement | null)?.content
if (image) {
try {
const urlObj = new URL(url)
// Check if image is a relative URL (starts with / or doesn't have a protocol)
const urlObj = new URL(pageUrl)
if (image.startsWith('/')) {
// Absolute path on same domain
image = `${urlObj.protocol}//${urlObj.host}${image}`
} else if (!image.match(/^https?:\/\//)) {
// Relative path (e.g., "images/og.jpg")
// Resolve relative to the URL's path
const basePath = urlObj.pathname.substring(0, urlObj.pathname.lastIndexOf('/') + 1)
image = `${urlObj.protocol}//${urlObj.host}${basePath}${image}`
}
// Filter out favicon URLs - we want OG images, not favicons
const imageLower = image.toLowerCase()
if (imageLower.includes('/favicon') || imageLower.endsWith('/favicon.ico') || imageLower.endsWith('/favicon.svg')) {
logger.warn('[WebService] Filtered out favicon URL from OG image', { url, image })
if (
imageLower.includes('/favicon') ||
imageLower.endsWith('/favicon.ico') ||
imageLower.endsWith('/favicon.svg')
) {
logger.warn('[WebService] Filtered out favicon URL from OG image', { url: pageUrl, image })
image = undefined
} else {
logger.info('[WebService] Converted relative image URL to absolute', { originalImage: (doc.querySelector('meta[property="og:image"]') as HTMLMetaElement | null)?.content, absoluteImage: image })
}
} catch (error) {
logger.warn('[WebService] Failed to convert relative image URL', { image, url, error })
// Keep original image URL if conversion fails
logger.warn('[WebService] Failed to convert relative image URL', { image, url: pageUrl, error })
}
}
logger.info('[WebService] Extracted OG metadata', { url, title: title?.substring(0, 100), description: description?.substring(0, 100), hasImage: !!image })
// Filter out Jumble's default OG tags if we're fetching a different domain
// This prevents showing Jumble branding for other sites
try {
const urlObj = new URL(url)
const isJumbleDomain = urlObj.hostname === 'jumble.imwald.eu' || urlObj.hostname.includes('jumble')
const isJumbleDefaultTitle = title?.includes('Jumble - Imwald Edition') || title?.includes('Jumble Imwald Edition')
const isJumbleDefaultDesc = description?.includes('A user-friendly Nostr client focused on relay feed browsing')
// If we're fetching a non-jumble domain but got jumble's default OG tags, treat as no OG data
const urlObj = new URL(pageUrl)
const isJumbleDomain =
urlObj.hostname === 'jumble.imwald.eu' || urlObj.hostname.includes('jumble')
const isJumbleDefaultTitle =
title?.includes('Jumble - Imwald Edition') || title?.includes('Jumble Imwald Edition')
const isJumbleDefaultDesc = description?.includes(
'A user-friendly Nostr client focused on relay feed browsing'
)
if (!isJumbleDomain && (isJumbleDefaultTitle || isJumbleDefaultDesc)) {
logger.warn('[WebService] Filtered out Jumble default OG tags for external domain - proxy may be returning wrong page', { url, hostname: urlObj.hostname, title, description: description?.substring(0, 100) })
logger.debug('[WebService] Filtered out Jumble default OG tags for external domain', {
url: pageUrl,
hostname: urlObj.hostname
})
return {}
}
} catch {
// If URL parsing fails, continue with what we have
/* ignore */
}
return { title, description, image }
}
class WebService {
static instance: WebService
private webMetadataDataLoader = new DataLoader<string, TWebMetadata>(
async (urls) => {
return await Promise.all(
urls.map(async (url) => {
try {
const loaded = await fetchHtmlForOpenGraph(url)
if (!loaded) {
logger.debug('[WebService] No HTML for OG metadata', { url })
return {}
}
logger.debug('[WebService] Received HTML for OG', {
url,
via: loaded.via,
htmlLength: loaded.html.length
})
return parseOpenGraphFromHtml(loaded.html, url)
} catch (error) {
// Log errors for debugging
if (error instanceof DOMException && error.name === 'AbortError') {
logger.warn('[WebService] Fetch aborted (timeout)', { url, fetchUrl })
logger.warn('[WebService] Fetch aborted (timeout)', { url })
} else {
logger.error('[WebService] Failed to fetch OG metadata', { url, fetchUrl, error })
logger.error('[WebService] Failed to fetch OG metadata', { url, error })
}
return {}
}

10
vite.config.ts

@ -56,6 +56,16 @@ export default defineConfig({ @@ -56,6 +56,16 @@ export default defineConfig({
'@': path.resolve(__dirname, './src')
}
},
server: {
// OG/link preview uses `/sites/?url=…`. Without this, Vite serves `index.html` and WebService parses the app shell.
// Run the scraper on 8090 per PROXY_SETUP.md, or rely on allorigins fallback in dev (web.service.ts).
proxy: {
'/sites': {
target: 'http://127.0.0.1:8090',
changeOrigin: true
}
}
},
build: {
rollupOptions: {
output: {

Loading…
Cancel
Save