Browse Source

improve publication handling

imwald
Silberengel 1 month ago
parent
commit
507b13d632
  1. 21
      src/components/ContentPreview/index.tsx
  2. 1570
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  3. 16
      src/components/NoteOptions/useMenuActions.tsx
  4. 1
      src/components/ParentNotePreview/index.tsx
  5. 14
      src/components/UniversalContent/Wikilink.tsx
  6. 277
      src/hooks/usePublicationSectionLoader.ts
  7. 29
      src/lib/publication-coordinate.ts
  8. 215
      src/lib/publication-section-fetch.ts
  9. 22
      src/services/indexed-db.service.ts

21
src/components/ContentPreview/index.tsx

@ -46,6 +46,14 @@ const CONTENT_PREVIEW_HOOK_PLACEHOLDER = { @@ -46,6 +46,14 @@ const CONTENT_PREVIEW_HOOK_PLACEHOLDER = {
sig: ''
} as Event
const PARENT_REPLY_POLL_BLURB_MAX = 150
function parentReplyPollQuestionBlurb(content: string): string {
const normalized = content.trim().replace(/\s+/g, ' ')
if (normalized.length <= PARENT_REPLY_POLL_BLURB_MAX) return normalized
return `${normalized.slice(0, PARENT_REPLY_POLL_BLURB_MAX)}`
}
/** Keep spacing/margins on the outer wrapper; put line-clamp on the preview body so it still clamps text. */
function splitPreviewLayoutClasses(className?: string) {
if (!className?.trim()) return { outer: undefined, body: undefined }
@ -66,11 +74,14 @@ export default function ContentPreview({ @@ -66,11 +74,14 @@ export default function ContentPreview({
event,
className,
/** Inline parent lines (e.g. reply thread): zap receipts match compact thread styling. */
previewDensity
previewDensity,
/** Reply-to-parent strip: polls show a short question snippet instead of full poll UI. */
forParentReplyBlurb = false
}: {
event?: Event
className?: string
previewDensity?: 'default' | 'compact'
forParentReplyBlurb?: boolean
}) {
const { t } = useTranslation()
const reactionDisplay = useNotificationReactionDisplay(event ?? CONTENT_PREVIEW_HOOK_PLACEHOLDER)
@ -146,6 +157,14 @@ export default function ContentPreview({ @@ -146,6 +157,14 @@ export default function ContentPreview({
}
if (event.kind === ExtendedKind.POLL) {
if (forParentReplyBlurb) {
const snippet = parentReplyPollQuestionBlurb(event.content ?? '')
return (
<div className={cn('pointer-events-none min-w-0 text-muted-foreground', previewOuter)}>
<div className={cn('min-w-0 truncate', previewBody)}>{snippet || t('Poll')}</div>
</div>
)
}
return withKindRow(<PollPreview event={event} />)
}

1570
src/components/Note/PublicationIndex/PublicationIndex.tsx

File diff suppressed because it is too large Load Diff

16
src/components/NoteOptions/useMenuActions.tsx

@ -423,7 +423,6 @@ export function useMenuActions({ @@ -423,7 +423,6 @@ export function useMenuActions({
return getLongFormArticleMetadataFromEvent(event)
}, [isArticleType, event])
// Extract d-tag for Wikistr URL
const dTag = useMemo(() => {
if (!isArticleType) return ''
return event.tags.find(tag => tag[0] === 'd')?.[1] || ''
@ -548,12 +547,6 @@ export function useMenuActions({ @@ -548,12 +547,6 @@ export function useMenuActions({
}
// View on external sites functions
const handleViewOnWikistr = () => {
if (!dTag) return
closeDrawer()
window.open(`https://wikistr.imwald.eu/${dTag}*${event.pubkey}`, '_blank', 'noopener,noreferrer')
}
const handleViewOnAlexandria = () => {
if (!naddr) return
closeDrawer()
@ -770,7 +763,7 @@ export function useMenuActions({ @@ -770,7 +763,7 @@ export function useMenuActions({
event.kind === ExtendedKind.WIKI_ARTICLE ||
event.kind === ExtendedKind.WIKI_ARTICLE_MARKDOWN
) {
// For 30041, 30040, 30818, 30817: Alexandria and Wikistr
// For 30041, 30040, 30818, 30817: Alexandria
if (naddr) {
actions.push({
icon: BookOpen,
@ -778,13 +771,6 @@ export function useMenuActions({ @@ -778,13 +771,6 @@ export function useMenuActions({
onClick: handleViewOnAlexandria
})
}
if (dTag) {
actions.push({
icon: Globe,
label: t('View on Wikistr'),
onClick: handleViewOnWikistr
})
}
}
}

1
src/components/ParentNotePreview/index.tsx

@ -114,6 +114,7 @@ export default function ParentNotePreview({ @@ -114,6 +114,7 @@ export default function ParentNotePreview({
className="pointer-events-none"
event={finalEvent}
previewDensity={appearance === 'subtle' ? 'compact' : 'default'}
forParentReplyBlurb
/>
</div>
</div>

14
src/components/UniversalContent/Wikilink.tsx

@ -12,11 +12,6 @@ interface WikilinkProps { @@ -12,11 +12,6 @@ interface WikilinkProps {
export default function Wikilink({ dTag, displayText, className }: WikilinkProps) {
const [isOpen, setIsOpen] = useState(false)
const handleWikistrClick = () => {
const url = `https://wikistr.imwald.eu/${dTag}`
window.open(url, '_blank', 'noopener,noreferrer')
}
const handleAlexandriaClick = () => {
const url = `https://next-alexandria.gitcitadel.eu/events?d=${dTag}`
window.open(url, '_blank', 'noopener,noreferrer')
@ -35,15 +30,6 @@ export default function Wikilink({ dTag, displayText, className }: WikilinkProps @@ -35,15 +30,6 @@ export default function Wikilink({ dTag, displayText, className }: WikilinkProps
</CollapsibleTrigger>
<CollapsibleContent className="mt-1">
<div className="bg-muted/30 rounded-md p-2 text-xs space-y-1">
<Button
variant="ghost"
size="sm"
className="w-full justify-start text-xs h-6"
onClick={handleWikistrClick}
>
<ExternalLink className="h-3 w-3 mr-1" />
View on Wikistr
</Button>
<Button
variant="ghost"
size="sm"

277
src/hooks/usePublicationSectionLoader.ts

@ -0,0 +1,277 @@ @@ -0,0 +1,277 @@
import {
batchFetchPublicationSectionEvents,
buildPublicationSectionRelayUrls,
publicationRefKey,
resolvePublicationEventIdToHex,
type PublicationSectionRef
} from '@/lib/publication-section-fetch'
import { generateBech32IdFromATag } from '@/lib/tag'
import { isReplaceableEvent } from '@/lib/event'
import client from '@/services/client.service'
import { eventService } from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import type { Event } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
export type SectionLoadStatus = 'idle' | 'loading' | 'loaded' | 'error'
export type PublicationSectionRow = {
ref: PublicationSectionRef
status: SectionLoadStatus
event?: Event
}
function refKey(ref: PublicationSectionRef): string {
return publicationRefKey(ref)
}
async function hydrateRefsFromIndexedDb(refs: PublicationSectionRef[]): Promise<Map<string, Event>> {
const out = new Map<string, Event>()
for (const ref of refs) {
const key = refKey(ref)
if (!key) continue
try {
if (ref.type === 'a' && ref.coordinate) {
const ev = await indexedDb.getPublicationEvent(ref.coordinate)
if (ev) out.set(key, ev)
} else if (ref.type === 'e' && ref.eventId) {
const hex = resolvePublicationEventIdToHex(ref.eventId)
if (!hex) continue
let ev = await indexedDb.getEventFromPublicationStore(hex)
if (!ev && ref.kind != null && ref.pubkey && isReplaceableEvent(ref.kind)) {
const rep = await indexedDb.getReplaceableEvent(ref.pubkey, ref.kind)
if (rep && rep.id === hex) ev = rep
}
if (ev) out.set(key, ev)
}
} catch {
/* ignore per-ref */
}
}
return out
}
async function fetchSingleRefFallback(ref: PublicationSectionRef): Promise<Event | undefined> {
try {
if (ref.type === 'a' && ref.coordinate) {
const bech32 = generateBech32IdFromATag(['a', ref.coordinate, ref.relay || '', ''])
if (bech32) return await eventService.fetchEvent(bech32)
}
if (ref.type === 'e' && ref.eventId) {
return await eventService.fetchEvent(ref.eventId)
}
} catch {
/* ignore */
}
return undefined
}
/**
* Lazy publication sections: debounced batched REQ (chunked `ids` + grouped `authors`/`kinds`/`#d`),
* IndexedDB first, capped relay list. Call {@link requestKeys} from IntersectionObserver.
*/
export function usePublicationSectionLoader(indexEvent: Event, referencesData: PublicationSectionRef[]) {
const orderedKeys = useMemo(() => {
const keys: string[] = []
for (const r of referencesData) {
const k = refKey(r)
if (k) keys.push(k)
}
return keys
}, [referencesData])
const [rows, setRows] = useState<Map<string, PublicationSectionRow>>(() => new Map())
const rowsRef = useRef(rows)
rowsRef.current = rows
useEffect(() => {
const m = new Map<string, PublicationSectionRow>()
for (const ref of referencesData) {
const k = refKey(ref)
if (!k) continue
m.set(k, { ref, status: 'idle' })
}
setRows(m)
}, [referencesData])
const relayUrlsRef = useRef<string[]>([])
const [relayReady, setRelayReady] = useState(false)
useEffect(() => {
let cancelled = false
void (async () => {
const urls = await buildPublicationSectionRelayUrls(indexEvent, referencesData)
if (cancelled) return
relayUrlsRef.current = urls
setRelayReady(true)
})()
return () => {
cancelled = true
}
}, [indexEvent, referencesData])
const pendingRef = useRef(new Set<string>())
const debounceTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const flushInFlightRef = useRef(false)
const runFlush = useCallback(async () => {
if (flushInFlightRef.current) return
const keys = [...pendingRef.current]
pendingRef.current.clear()
if (keys.length === 0) return
flushInFlightRef.current = true
try {
const snapshot = rowsRef.current
const refsToLoad: PublicationSectionRef[] = []
for (const k of keys) {
const row = snapshot.get(k)
if (!row) continue
if (row.status === 'loaded' && row.event) continue
refsToLoad.push(row.ref)
}
if (refsToLoad.length === 0) return
setRows((prev) => {
const next = new Map(prev)
for (const ref of refsToLoad) {
const k = refKey(ref)
const row = next.get(k)
if (row) next.set(k, { ...row, status: 'loading' })
}
return next
})
const urls = relayUrlsRef.current
const resolved = new Map<string, Event>()
if (urls.length > 0) {
const fromDb = await hydrateRefsFromIndexedDb(refsToLoad)
for (const [k, ev] of fromDb) {
resolved.set(k, ev)
client.addEventToCache(ev)
}
const stillNeed = refsToLoad.filter((r) => !resolved.has(refKey(r)))
if (stillNeed.length > 0) {
const fromNet = await batchFetchPublicationSectionEvents(stillNeed, urls)
for (const [k, ev] of fromNet) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
}
}
}
const missing = refsToLoad.filter((r) => !resolved.has(refKey(r)))
await Promise.all(
missing.map(async (ref) => {
const k = refKey(ref)
const ev = await fetchSingleRefFallback(ref)
if (ev) {
resolved.set(k, ev)
client.addEventToCache(ev)
if (isReplaceableEvent(ev.kind)) void indexedDb.putReplaceableEvent(ev)
}
})
)
setRows((prev) => {
const next = new Map(prev)
for (const ref of refsToLoad) {
const k = refKey(ref)
const row = next.get(k)
if (!row) continue
const ev = resolved.get(k)
if (ev) {
next.set(k, { ...row, event: ev, status: 'loaded' })
} else {
next.set(k, { ...row, status: 'error', event: undefined })
}
}
return next
})
} finally {
flushInFlightRef.current = false
// While a batch was in flight, debounced runFlush() calls may have returned early
// (flush lock). Drain any keys that accumulated so scroll-triggered sections still load.
if (pendingRef.current.size > 0) {
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current)
debounceTimerRef.current = setTimeout(() => {
debounceTimerRef.current = null
void runFlush()
}, 0)
}
}
}, [])
const requestKeys = useCallback(
(keys: string[]) => {
for (const k of keys) {
if (k) pendingRef.current.add(k)
}
if (debounceTimerRef.current) clearTimeout(debounceTimerRef.current)
debounceTimerRef.current = setTimeout(() => {
debounceTimerRef.current = null
void runFlush()
}, 56)
},
[runFlush]
)
useEffect(() => {
if (!relayReady || orderedKeys.length === 0) return
const n = Math.min(3, orderedKeys.length)
requestKeys(orderedKeys.slice(0, n))
}, [relayReady, orderedKeys, requestKeys])
const failedKeys = useMemo(
() => [...rows.entries()].filter(([, v]) => v.status === 'error').map(([k]) => k),
[rows]
)
const retryKeys = useCallback(
(keys: string[]) => {
setRows((prev) => {
const next = new Map(prev)
for (const k of keys) {
const row = next.get(k)
if (row) next.set(k, { ...row, status: 'idle', event: undefined })
}
return next
})
requestKeys(keys)
},
[requestKeys]
)
const referencesWithEvents = useMemo(() => {
return orderedKeys.map((k) => {
const row = rows.get(k)
const ref = row?.ref ?? referencesData.find((r) => refKey(r) === k)!
return {
type: ref.type,
coordinate: ref.coordinate,
eventId: ref.eventId,
kind: ref.kind,
pubkey: ref.pubkey,
identifier: ref.identifier,
relay: ref.relay,
event: row?.event,
loadStatus: row?.status ?? 'idle'
}
})
}, [orderedKeys, rows, referencesData])
return {
orderedKeys,
rows,
relayReady,
requestKeys,
retryKeys,
failedKeys,
referencesWithEvents
}
}

29
src/lib/publication-coordinate.ts

@ -0,0 +1,29 @@ @@ -0,0 +1,29 @@
/** Split `kind:64-hex-pubkey:d…` (d may contain `:`). */
export function splitPublicationCoordinate(coordinate: string): {
kind: number
pubkey: string
d: string
} | null {
const trimmed = coordinate.trim()
const i0 = trimmed.indexOf(':')
const i1 = trimmed.indexOf(':', i0 + 1)
if (i0 < 1 || i1 <= i0 + 1) return null
const kind = parseInt(trimmed.slice(0, i0), 10)
if (Number.isNaN(kind)) return null
const pubkeyRaw = trimmed.slice(i0 + 1, i1)
if (!/^[0-9a-fA-F]{64}$/.test(pubkeyRaw)) return null
const pubkey = pubkeyRaw.toLowerCase()
const d = trimmed.slice(i1 + 1)
return { kind, pubkey, d }
}
/**
* Coordinate strings to try when matching index `a` tags to events (NFC/NFD on `d` only).
* Relays filter `#d` on exact bytes; we still need flexible client-side matching after REQ.
*/
export function publicationCoordinateLookupKeys(coordinate: string): string[] {
const p = splitPublicationCoordinate(coordinate)
if (!p) return [coordinate.trim()]
const ds = [...new Set([p.d, p.d.normalize('NFC'), p.d.normalize('NFD')])]
return [...new Set(ds.map((dt) => `${p.kind}:${p.pubkey}:${dt}`))]
}

215
src/lib/publication-section-fetch.ts

@ -0,0 +1,215 @@ @@ -0,0 +1,215 @@
import { publicationCoordinateLookupKeys } from '@/lib/publication-coordinate'
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder'
import { normalizeUrl } from '@/lib/url'
import client, { queryService } from '@/services/client.service'
import type { Event, Filter } from 'nostr-tools'
import { nip19 } from 'nostr-tools'
/** Parsed a/e reference from publication index tags (same shape as PublicationIndex uses). */
export type PublicationSectionRef = {
type: 'a' | 'e'
coordinate?: string
eventId?: string
kind?: number
pubkey?: string
identifier?: string
relay?: string
}
export function publicationRefKey(ref: PublicationSectionRef): string {
return (ref.coordinate || ref.eventId || '').trim()
}
/**
* Parse NIP-33 `a` coordinate `kind:64-hex-pubkey:d-identifier` where `d` may contain `:`.
* Returns a canonical coordinate with lowercase pubkey for cache / REQ / matching.
*/
export function parsePublicationATagCoordinate(raw: string): {
kind: number
pubkey: string
identifier: string
coordinate: string
} | null {
const trimmed = raw.trim()
const i0 = trimmed.indexOf(':')
const i1 = trimmed.indexOf(':', i0 + 1)
if (i0 < 1 || i1 <= i0 + 1) return null
const kindStr = trimmed.slice(0, i0)
const pubkeyRaw = trimmed.slice(i0 + 1, i1)
const identifier = trimmed.slice(i1 + 1)
const kind = parseInt(kindStr, 10)
if (Number.isNaN(kind) || !/^[0-9a-fA-F]{64}$/.test(pubkeyRaw)) return null
const pubkey = pubkeyRaw.toLowerCase()
return {
kind,
pubkey,
identifier,
coordinate: `${kind}:${pubkey}:${identifier}`
}
}
export function resolvePublicationEventIdToHex(eventId: string): string | undefined {
if (!eventId) return undefined
const trimmed = eventId.trim()
if (/^[0-9a-fA-F]{64}$/.test(trimmed)) return trimmed.toLowerCase()
try {
const decoded = nip19.decode(trimmed)
if (decoded.type === 'note') return decoded.data
if (decoded.type === 'nevent') return decoded.data.id
} catch {
/* ignore */
}
return undefined
}
function collectRelayHints(refs: PublicationSectionRef[]): string[] {
const out: string[] = []
for (const r of refs) {
const h = r.relay?.trim()
if (h && (h.startsWith('wss://') || h.startsWith('ws://'))) {
const n = normalizeUrl(h) || h
out.push(n)
}
}
return out
}
/**
* Focused relay set for publication sections: hints + author + user + profile/fast read, capped.
* Omits full SEARCHABLE list to avoid opening dozens of relays per publication.
*/
export async function buildPublicationSectionRelayUrls(
indexEvent: Event,
refs: PublicationSectionRef[],
maxRelays = 22
): Promise<string[]> {
const hints = collectRelayHints(refs)
const urls = await buildComprehensiveRelayList({
authorPubkey: indexEvent.pubkey,
userPubkey: client.pubkey || undefined,
relayHints: hints,
includeUserOwnRelays: true,
includeProfileFetchRelays: true,
includeFastReadRelays: true,
includeSearchableRelays: false,
includeFavoriteRelays: true,
includeLocalRelays: true
})
return urls.slice(0, maxRelays)
}
const IDS_CHUNK = 44
const D_TAGS_CHUNK = 28
function coordinateFromEvent(ev: Event): string {
const d = ev.tags.find((t) => t[0] === 'd')?.[1] ?? ''
return `${ev.kind}:${ev.pubkey.toLowerCase()}:${d}`
}
/**
* One batched query: chunk `ids` filters and grouped `authors + kinds + #d` filters.
* Caller should hydrate from IndexedDB first. Keys are {@link publicationRefKey}.
*/
export async function batchFetchPublicationSectionEvents(
refs: PublicationSectionRef[],
relayUrls: string[]
): Promise<Map<string, Event>> {
const out = new Map<string, Event>()
if (refs.length === 0 || relayUrls.length === 0) return out
const idRefs: PublicationSectionRef[] = []
const hexByKey = new Map<string, string>()
for (const r of refs) {
if (r.type !== 'e' || !r.eventId) continue
const key = publicationRefKey(r)
if (!key) continue
const hex = resolvePublicationEventIdToHex(r.eventId)
if (hex) {
idRefs.push(r)
hexByKey.set(key, hex)
}
}
const aRefs = refs.filter((r) => r.type === 'a' && r.coordinate && r.pubkey && r.kind != null)
const aGroups = new Map<string, { pubkey: string; kind: number; dTags: string[] }>()
for (const r of aRefs) {
const idf = r.identifier ?? r.coordinate!.split(':').slice(2).join(':')
if (!idf) continue
const gk = `${r.pubkey}:${r.kind}`
let g = aGroups.get(gk)
if (!g) {
g = { pubkey: r.pubkey!, kind: r.kind!, dTags: [] }
aGroups.set(gk, g)
}
g.dTags.push(idf)
}
const filters: Filter[] = []
const hexList = [...new Set([...hexByKey.values()])].filter((id) => /^[0-9a-f]{64}$/.test(id))
for (let i = 0; i < hexList.length; i += IDS_CHUNK) {
const chunk = hexList.slice(i, i + IDS_CHUNK)
filters.push({ ids: chunk, limit: chunk.length })
}
for (const g of aGroups.values()) {
const uniqueD = [...new Set(g.dTags)]
for (let i = 0; i < uniqueD.length; i += D_TAGS_CHUNK) {
const dChunk = uniqueD.slice(i, i + D_TAGS_CHUNK)
filters.push({
authors: [g.pubkey.toLowerCase()],
kinds: [g.kind],
'#d': dChunk,
limit: dChunk.length
})
}
}
if (filters.length === 0) return out
let events: Event[] = []
try {
events = await queryService.fetchEvents(relayUrls, filters, {
globalTimeout: 14_000,
eoseTimeout: 2_500,
/** Do not early-resolve after the first event; this query must wait for the full batch. */
firstRelayResultGraceMs: false
})
} catch {
return out
}
const byId = new Map<string, Event>()
const byCoord = new Map<string, Event>()
for (const ev of events) {
byId.set(ev.id.toLowerCase(), ev)
const d = ev.tags.find((t) => t[0] === 'd')?.[1]
if (d !== undefined && d !== '') {
const base = coordinateFromEvent(ev)
for (const k of publicationCoordinateLookupKeys(base)) {
if (!byCoord.has(k)) byCoord.set(k, ev)
}
}
}
for (const r of idRefs) {
const key = publicationRefKey(r)
const hex = hexByKey.get(key)
if (!hex) continue
const ev = byId.get(hex.toLowerCase())
if (ev) out.set(key, ev)
}
for (const r of aRefs) {
const key = publicationRefKey(r)
const coord = r.coordinate!
let ev: Event | undefined
for (const k of publicationCoordinateLookupKeys(coord)) {
ev = byCoord.get(k)
if (ev) break
}
if (ev) out.set(key, ev)
}
return out
}

22
src/services/indexed-db.service.ts

@ -1,4 +1,8 @@ @@ -1,4 +1,8 @@
import { ExtendedKind } from '@/constants'
import {
publicationCoordinateLookupKeys,
splitPublicationCoordinate
} from '@/lib/publication-coordinate'
import { tagNameEquals } from '@/lib/tag'
import { TNip66RelayDiscovery, TRelayInfo } from '@/types'
import type { Event } from 'nostr-tools'
@ -1009,18 +1013,14 @@ class IndexedDbService { @@ -1009,18 +1013,14 @@ class IndexedDbService {
}
async getPublicationEvent(coordinate: string): Promise<Event | undefined> {
// Parse coordinate (format: kind:pubkey:d-tag)
const coordinateParts = coordinate.split(':')
if (coordinateParts.length >= 2) {
const kind = parseInt(coordinateParts[0])
if (!isNaN(kind)) {
const pubkey = coordinateParts[1]
const d = coordinateParts[2] || undefined
const event = await this.getReplaceableEvent(pubkey, kind, d)
return event || undefined
}
// kind:64-hex-pubkey:d (d may contain ':'); try NFC/NFD d variants for cache hits.
for (const fullCoord of publicationCoordinateLookupKeys(coordinate.trim())) {
const p = splitPublicationCoordinate(fullCoord)
if (!p) continue
const event = await this.getReplaceableEvent(p.pubkey, p.kind, p.d)
if (event) return event
}
return Promise.resolve(undefined)
return undefined
}
async getEventFromPublicationStore(eventId: string): Promise<Event | undefined> {

Loading…
Cancel
Save