Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
d3e02ed233
  1. 7
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  2. 17
      src/constants.ts
  3. 35
      src/hooks/usePublicationSectionLoader.ts
  4. 62
      src/hooks/useQuoteEvents.tsx
  5. 85
      src/lib/publication-section-fetch.ts
  6. 57
      src/services/client.service.ts

7
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -21,6 +21,10 @@ import { upsertRenderedPublicationEvents } from '@/lib/publication-rendered-even
interface PublicationReference { interface PublicationReference {
coordinate?: string coordinate?: string
/**
* Optional historical snapshot id (`a` tag field 4) or direct `e` tag id.
* For `a` references this is metadata only and MUST NOT drive section fetches.
*/
eventId?: string eventId?: string
event?: Event event?: Event
kind?: number kind?: number
@ -138,6 +142,9 @@ export default function PublicationIndex({
refs.push({ refs.push({
type: 'a', type: 'a',
coordinate: parsed.coordinate, coordinate: parsed.coordinate,
// `a[3]` is historization metadata for this coordinate revision only.
// Keep it for diagnostics/UI context; fetches resolve by coordinate, not by this id.
eventId: tag[3],
kind: parsed.kind, kind: parsed.kind,
pubkey: parsed.pubkey, pubkey: parsed.pubkey,
identifier: parsed.identifier, identifier: parsed.identifier,

17
src/constants.ts

@ -477,10 +477,23 @@ export function isSocialKindBlockedKind(kind: number): boolean {
return SOCIAL_KIND_BLOCKED_KIND_SET.has(kind) return SOCIAL_KIND_BLOCKED_KIND_SET.has(kind)
} }
/** True when the filter is unrestricted by kind or includes any {@link SOCIAL_KIND_BLOCKED_KINDS}. */ /**
* True when a filter should avoid relays that do not carry social-note surface.
*
* Important: kindless lookup filters (e.g. `ids`, `authors + #d`) are often used for
* publication / replaceable resolution and must keep relays like thecitadel in scope.
*/
export function relayFilterIncludesSocialKindBlockedKind(filter: Filter): boolean { export function relayFilterIncludesSocialKindBlockedKind(filter: Filter): boolean {
const k = filter.kinds const k = filter.kinds
if (k === undefined) return true if (k === undefined) {
const ids = Array.isArray(filter.ids) ? filter.ids.length : 0
const dTags = Array.isArray((filter as Record<string, unknown>)['#d'])
? ((filter as Record<string, unknown>)['#d'] as unknown[]).length
: 0
// Scoped lookups are not "broad social feed" queries.
if (ids > 0 || dTags > 0) return false
return true
}
const arr = Array.isArray(k) ? k : [k] const arr = Array.isArray(k) ? k : [k]
return arr.some((kind) => SOCIAL_KIND_BLOCKED_KIND_SET.has(kind)) return arr.some((kind) => SOCIAL_KIND_BLOCKED_KIND_SET.has(kind))
} }

35
src/hooks/usePublicationSectionLoader.ts

@ -48,6 +48,18 @@ function signatureOfRefs(refs: PublicationSectionRef[]): string {
return refs.map((r) => publicationRefKey(r)).join('|') return refs.map((r) => publicationRefKey(r)).join('|')
} }
function dedupeRelayUrls(urls: string[]): string[] {
const out: string[] = []
const seen = new Set<string>()
for (const url of urls) {
const u = (url || '').trim()
if (!u || seen.has(u)) continue
seen.add(u)
out.push(u)
}
return out
}
export function usePublicationSectionLoader(indexEvent: Event, refs: PublicationSectionRef[]) { export function usePublicationSectionLoader(indexEvent: Event, refs: PublicationSectionRef[]) {
const indexId = indexEvent.id const indexId = indexEvent.id
const refsSignature = useMemo(() => signatureOfRefs(refs), [refs]) const refsSignature = useMemo(() => signatureOfRefs(refs), [refs])
@ -168,7 +180,8 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
if (row.type === 'e' && row.eventId) { if (row.type === 'e' && row.eventId) {
const hex = resolvePublicationEventIdToHex(row.eventId) const hex = resolvePublicationEventIdToHex(row.eventId)
if (hex) ev = await indexedDb.getEventFromPublicationStore(hex) if (hex) ev = await indexedDb.getEventFromPublicationStore(hex)
} else if (row.coordinate) { }
if (!ev && row.coordinate) {
ev = await indexedDb.getPublicationEvent(row.coordinate) ev = await indexedDb.getPublicationEvent(row.coordinate)
} }
if (ev) byDb.set(row.key, ev) if (ev) byDb.set(row.key, ev)
@ -227,6 +240,7 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
await Promise.all( await Promise.all(
unresolved.map(async (row) => { unresolved.map(async (row) => {
try { try {
// Only `e` refs are fetched by event id; `a` refs resolve by coordinate.
if (row.type === 'e' && row.eventId) { if (row.type === 'e' && row.eventId) {
const ev = await withTimeout( const ev = await withTimeout(
eventService.fetchEvent(row.eventId), eventService.fetchEvent(row.eventId),
@ -238,7 +252,13 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
if (row.coordinate) { if (row.coordinate) {
const parsed = parsePublicationATagCoordinate(row.coordinate) const parsed = parsePublicationATagCoordinate(row.coordinate)
if (!parsed) return if (!parsed) return
const relaysToTry = row.relay ? [row.relay] : relayUrls // Relay hints in `a` tags are often stale. Keep the hint first, but also try
// current section relay sets so one dead hinted relay cannot force a false miss.
const relaysToTry = dedupeRelayUrls(
row.relay
? [row.relay, ...relayUrls, ...fallbackRelayUrls]
: [...relayUrls, ...fallbackRelayUrls]
)
const ev = await withTimeout( const ev = await withTimeout(
queryService queryService
.fetchEvents( .fetchEvents(
@ -323,14 +343,13 @@ export function usePublicationSectionLoader(indexEvent: Event, refs: Publication
if (relayUrls.length === 0) return if (relayUrls.length === 0) return
const sig = `${indexId}:${refsSignature}` const sig = `${indexId}:${refsSignature}`
if (autoLoadedSignatureRef.current === sig) return if (autoLoadedSignatureRef.current === sig) return
autoLoadedSignatureRef.current = sig
const idleKeys = rows.filter((r) => r.status === 'idle').map((r) => r.key) const idleKeys = rows.filter((r) => r.status === 'idle').map((r) => r.key)
if (idleKeys.length > 0) { if (idleKeys.length === 0) return
if (import.meta.env.DEV) { autoLoadedSignatureRef.current = sig
logger.info('[PublicationSection] flush_start', { keys: idleKeys, relayCount: relayUrls.length }) if (import.meta.env.DEV) {
} logger.info('[PublicationSection] flush_start', { keys: idleKeys, relayCount: relayUrls.length })
requestKeys(idleKeys)
} }
requestKeys(idleKeys)
}, [indexId, refsSignature, relayUrls, rows, requestKeys]) }, [indexId, refsSignature, relayUrls, rows, requestKeys])
const referencesWithEvents = useMemo( const referencesWithEvents = useMemo(

62
src/hooks/useQuoteEvents.tsx

@ -91,6 +91,7 @@ export function useQuoteEvents(event: Event | null, enabled: boolean) {
: ev.id : ev.id
const qeIdForTagFilter = const qeIdForTagFilter =
/^[0-9a-f]{64}$/i.test(filterQeId) ? filterQeId.toLowerCase() : filterQeId /^[0-9a-f]{64}$/i.test(filterQeId) ? filterQeId.toLowerCase() : filterQeId
const qeIdIsHexEventId = /^[0-9a-f]{64}$/i.test(qeIdForTagFilter)
const eventCoordinate = isReplaceableEvent(ev.kind) const eventCoordinate = isReplaceableEvent(ev.kind)
? getReplaceableCoordinateFromEvent(ev) ? getReplaceableCoordinateFromEvent(ev)
: `${ev.kind}:${ev.pubkey}:${ev.id}` : `${ev.kind}:${ev.pubkey}:${ev.id}`
@ -98,16 +99,35 @@ export function useQuoteEvents(event: Event | null, enabled: boolean) {
const highlightKinds = [kinds.Highlights] as const const highlightKinds = [kinds.Highlights] as const
const otherBacklinkKinds = [...THREAD_BACKLINK_STREAM_KINDS_WITHOUT_HIGHLIGHT] const otherBacklinkKinds = [...THREAD_BACKLINK_STREAM_KINDS_WITHOUT_HIGHLIGHT]
const { closer, timelineKey } = await client.subscribeTimeline( const subRequests: { urls: string[]; filter: Filter }[] = [
[ {
{ urls: finalRelayUrls,
urls: finalRelayUrls, filter: { '#q': [qeIdForTagFilter], kinds: [kinds.ShortTextNote], limit: LIMIT }
filter: { '#q': [qeIdForTagFilter], kinds: [kinds.ShortTextNote], limit: LIMIT } },
}, {
{ urls: finalRelayUrls,
urls: finalRelayUrls, filter: { '#q': [qeIdForTagFilter], kinds: [...highlightKinds], limit: LIMIT }
filter: { '#q': [qeIdForTagFilter], kinds: [...highlightKinds], limit: LIMIT } },
}, {
urls: finalRelayUrls,
filter: {
'#a': [eventCoordinate],
kinds: [...highlightKinds],
limit: LIMIT
}
},
{
urls: finalRelayUrls,
filter: {
'#a': [eventCoordinate],
kinds: otherBacklinkKinds,
limit: LIMIT
}
}
]
// `#e` tag filters must use 64-hex event ids. For replaceable roots we use `#a`/`#q` only.
if (qeIdIsHexEventId) {
subRequests.push(
{ {
urls: finalRelayUrls, urls: finalRelayUrls,
filter: { filter: {
@ -123,24 +143,12 @@ export function useQuoteEvents(event: Event | null, enabled: boolean) {
kinds: otherBacklinkKinds, kinds: otherBacklinkKinds,
limit: LIMIT limit: LIMIT
} }
},
{
urls: finalRelayUrls,
filter: {
'#a': [eventCoordinate],
kinds: [...highlightKinds],
limit: LIMIT
}
},
{
urls: finalRelayUrls,
filter: {
'#a': [eventCoordinate],
kinds: otherBacklinkKinds,
limit: LIMIT
}
} }
], )
}
const { closer, timelineKey } = await client.subscribeTimeline(
subRequests,
{ {
onEvents: (batch, eosed) => { onEvents: (batch, eosed) => {
if (cancelled) return if (cancelled) return

85
src/lib/publication-section-fetch.ts

@ -110,6 +110,7 @@ const IDS_CHUNK = 44
const D_CHUNK = 28 const D_CHUNK = 28
const ANY_KIND_LIMIT_PER_D = 12 const ANY_KIND_LIMIT_PER_D = 12
const AUTHOR_KIND_SCAN_LIMIT = 200 const AUTHOR_KIND_SCAN_LIMIT = 200
const HINT_RELAY_AUTHOR_KIND_SCAN_LIMIT = 1200
function dTagOf(ev: Event): string | undefined { function dTagOf(ev: Event): string | undefined {
const d = ev.tags.find((t) => (t[0] || '').trim().toLowerCase() === 'd')?.[1] const d = ev.tags.find((t) => (t[0] || '').trim().toLowerCase() === 'd')?.[1]
@ -134,6 +135,7 @@ export async function batchFetchPublicationSectionEvents(
const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && typeof r.kind === 'number') const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && typeof r.kind === 'number')
for (const ref of refs) { for (const ref of refs) {
// Only explicit `e` refs are resolved by id. For `a` refs, tag[3] is historization metadata only.
if (ref.type !== 'e' || !ref.eventId) continue if (ref.type !== 'e' || !ref.eventId) continue
const key = publicationRefKey(ref) const key = publicationRefKey(ref)
const hex = resolvePublicationEventIdToHex(ref.eventId) const hex = resolvePublicationEventIdToHex(ref.eventId)
@ -334,6 +336,89 @@ export async function batchFetchPublicationSectionEvents(
} }
} }
// Secondary hint pass: some relays do not index `#d` reliably for 30040/30041.
// For unresolved refs with an explicit relay hint, scan that same relay by author+kind
// and resolve `d` client-side before doing broader multi-relay fallbacks.
const unresolvedAfterHintPass = aRefs.filter((r) => !out.has(publicationRefKey(r)))
const byHintRelayForScan = new Map<string, PublicationSectionRef[]>()
for (const ref of unresolvedAfterHintPass) {
const relay = normalizeUrl(ref.relay || '') || ref.relay?.trim()
if (!relay) continue
const list = byHintRelayForScan.get(relay)
if (list) list.push(ref)
else byHintRelayForScan.set(relay, [ref])
}
for (const [relay, relayRefs] of byHintRelayForScan) {
const groups = new Map<string, { pubkey: string; kind: number }>()
for (const ref of relayRefs) {
const key = `${ref.pubkey!.toLowerCase()}:${ref.kind!}`
if (!groups.has(key)) {
groups.set(key, { pubkey: ref.pubkey!.toLowerCase(), kind: ref.kind! })
}
}
const scanFilters: Filter[] = []
for (const g of groups.values()) {
scanFilters.push({
authors: [g.pubkey],
kinds: [g.kind],
limit: HINT_RELAY_AUTHOR_KIND_SCAN_LIMIT
})
}
if (scanFilters.length === 0) continue
if (import.meta.env.DEV) {
logger.info('[PublicationSection] relay_hint_author_kind_scan_start', {
relay,
refCount: relayRefs.length,
filterCount: scanFilters.length
})
}
try {
const scanEvents = await queryService.fetchEvents([relay], scanFilters, {
globalTimeout: 10_000,
eoseTimeout: 1_500,
firstRelayResultGraceMs: false
})
const scanByCoord = new Map<string, Event>()
for (const ev of scanEvents) {
const coord = coordinateOfEvent(ev)
if (!coord) continue
for (const k of publicationCoordinateLookupKeys(coord)) {
const prev = scanByCoord.get(k)
if (!prev || ev.created_at > prev.created_at) scanByCoord.set(k, ev)
}
}
for (const ref of relayRefs) {
const key = publicationRefKey(ref)
if (out.has(key)) continue
const coord = ref.coordinate!
let ev: Event | undefined
for (const lk of publicationCoordinateLookupKeys(coord)) {
ev = scanByCoord.get(lk)
if (ev) break
}
if (ev) out.set(key, ev)
}
if (import.meta.env.DEV) {
logger.info('[PublicationSection] relay_hint_author_kind_scan_done', {
relay,
eventsReturned: scanEvents.length,
unresolvedAfterScan: relayRefs
.map((r) => publicationRefKey(r))
.filter((k) => !out.has(k))
.slice(0, 8)
})
}
} catch {
if (import.meta.env.DEV) {
logger.warn('[PublicationSection] relay_hint_author_kind_scan_error', {
relay,
filterCount: scanFilters.length
})
}
}
}
// Last fallback: author + #d across any kind. // Last fallback: author + #d across any kind.
const unresolvedAfterHint = aRefs.filter((r) => !out.has(publicationRefKey(r))) const unresolvedAfterHint = aRefs.filter((r) => !out.has(publicationRefKey(r)))
if (unresolvedAfterHint.length > 0) { if (unresolvedAfterHint.length > 0) {

57
src/services/client.service.ts

@ -26,6 +26,47 @@ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter {
return rest as Filter return rest as Filter
} }
const HEX_EVENT_ID_RE = /^[0-9a-f]{64}$/i
function decodeEventRefForETagFilter(raw: string): string | null {
const trimmed = raw.trim()
if (!trimmed) return null
const withoutPrefix = trimmed.toLowerCase().startsWith('nostr:') ? trimmed.slice(6).trim() : trimmed
if (HEX_EVENT_ID_RE.test(withoutPrefix)) return withoutPrefix.toLowerCase()
try {
const decoded = nip19.decode(withoutPrefix)
if (decoded.type === 'note') return decoded.data
if (decoded.type === 'nevent') return decoded.data.id
} catch {
// ignore malformed refs
}
return null
}
function sanitizeETagFilterForSubscribe(filter: Filter): Filter | null {
const f = { ...filter } as Filter & { '#e'?: string[]; '#E'?: string[] }
const rawLower = Array.isArray(f['#e']) ? f['#e'] : []
const rawUpper = Array.isArray(f['#E']) ? f['#E'] : []
if (rawLower.length === 0 && rawUpper.length === 0) return f
const rawAll = [...rawLower, ...rawUpper]
const decoded = [
...new Set(
rawAll
.map((v) => decodeEventRefForETagFilter(String(v)))
.filter((v): v is string => !!v)
)
]
if (decoded.length === 0) return null
f['#e'] = decoded
delete f['#E']
return f
}
function sanitizeSubscribeFiltersBeforeReq(filter: Filter | Filter[]): Filter[] {
const asArray = Array.isArray(filter) ? filter : [filter]
return asArray.map(sanitizeETagFilterForSubscribe).filter((f): f is Filter => !!f)
}
/** Single key for `pool.seenOn` / query seen-on maps (hex ids are case-insensitive). */ /** Single key for `pool.seenOn` / query seen-on maps (hex ids are case-insensitive). */
function canonicalSeenOnEventId(eventId: string): string { function canonicalSeenOnEventId(eventId: string): string {
const t = eventId.trim() const t = eventId.trim()
@ -82,6 +123,7 @@ import {
Filter, Filter,
kinds, kinds,
matchFilters, matchFilters,
nip19,
Event as NEvent, Event as NEvent,
Relay, Relay,
SimplePool, SimplePool,
@ -1823,7 +1865,20 @@ class ClientService extends EventTarget {
) { ) {
const originalDedupedRelays = Array.from(new Set(urls)) const originalDedupedRelays = Array.from(new Set(urls))
let relays = originalDedupedRelays let relays = originalDedupedRelays
const filters = Array.isArray(filter) ? filter : [filter] const filters = sanitizeSubscribeFiltersBeforeReq(filter)
if (filters.length === 0) {
logger.debug('[relay-req] batch_skip', {
reason: 'no_filters_after_sanitize',
filterSummary: summarizeFiltersForRelayLog(Array.isArray(filter) ? filter : [filter])
})
queueMicrotask(() => {
oneose?.(true)
relayReqLog?.onBatchEnd?.([])
})
return {
close: () => {}
}
}
const stripSocialBlockedRelays = const stripSocialBlockedRelays =
SOCIAL_KIND_BLOCKED_RELAY_URLS.length > 0 && SOCIAL_KIND_BLOCKED_RELAY_URLS.length > 0 &&

Loading…
Cancel
Save