Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
a1f053fcf0
  1. 18
      src/components/Content/index.tsx
  2. 12
      src/components/NormalFeed/index.tsx
  3. 15
      src/components/Note/Highlight/index.tsx
  4. 23
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  5. 120
      src/components/NoteList/index.tsx
  6. 5
      src/hooks/useSearchProfiles.tsx
  7. 7
      src/lib/nostr-parser.tsx
  8. 8
      src/lib/url.ts
  9. 2
      src/pages/primary/NoteListPage/RelaysFeed.tsx
  10. 28
      src/pages/primary/SpellsPage/index.tsx
  11. 30
      src/services/client-events.service.ts
  12. 154
      src/services/client-replaceable-events.service.ts
  13. 119
      src/services/client.service.ts
  14. 4
      src/services/content-parser.service.ts
  15. 37
      src/services/relay-operation-log.service.ts
  16. 2
      src/types/index.d.ts

18
src/components/Content/index.tsx

@ -7,7 +7,7 @@ import { emojis, shortcodeToEmoji } from '@tiptap/extension-emoji'
import { getEmojiInfosFromEmojiTags } from '@/lib/tag' import { getEmojiInfosFromEmojiTags } from '@/lib/tag'
import { cn } from '@/lib/utils' import { cn } from '@/lib/utils'
import { getHttpUrlFromITags } from '@/lib/event' import { getHttpUrlFromITags } from '@/lib/event'
import { cleanUrl, isImage, isMedia, isAudio, isVideo } from '@/lib/url' import { cleanUrl, isImage, isMedia, isAudio, isVideo, isPseudoNostrHttpsUrl } from '@/lib/url'
import { TImetaInfo } from '@/types' import { TImetaInfo } from '@/types'
import { Event } from 'nostr-tools' import { Event } from 'nostr-tools'
import { useMemo } from 'react' import { useMemo } from 'react'
@ -113,7 +113,13 @@ export default function Content({
nodes.forEach((node) => { nodes.forEach((node) => {
if (node.type === 'url') { if (node.type === 'url') {
const url = node.data const url = node.data
if ((url.startsWith('http://') || url.startsWith('https://')) && !isImage(url) && !isMedia(url) && !isYouTubeUrl(url)) { if (
(url.startsWith('http://') || url.startsWith('https://')) &&
!isPseudoNostrHttpsUrl(url) &&
!isImage(url) &&
!isMedia(url) &&
!isYouTubeUrl(url)
) {
const cleaned = cleanUrl(url) const cleaned = cleanUrl(url)
if (cleaned && !seenUrls.has(cleaned) && !(iArticleCleaned && cleaned === iArticleCleaned)) { if (cleaned && !seenUrls.has(cleaned) && !(iArticleCleaned && cleaned === iArticleCleaned)) {
links.push(cleaned) links.push(cleaned)
@ -165,7 +171,13 @@ export default function Content({
.filter(tag => tag[0] === 'r' && tag[1]) .filter(tag => tag[0] === 'r' && tag[1])
.forEach(tag => { .forEach(tag => {
const url = tag[1] const url = tag[1]
if ((url.startsWith('http://') || url.startsWith('https://')) && !isImage(url) && !isMedia(url) && !isYouTubeUrl(url)) { if (
(url.startsWith('http://') || url.startsWith('https://')) &&
!isPseudoNostrHttpsUrl(url) &&
!isImage(url) &&
!isMedia(url) &&
!isYouTubeUrl(url)
) {
const cleaned = cleanUrl(url) const cleaned = cleanUrl(url)
// Only include if not already in content links and not already seen in tags // Only include if not already in content links and not already seen in tags
if (cleaned && !contentLinkUrls.has(cleaned) && !seenUrls.has(cleaned)) { if (cleaned && !contentLinkUrls.has(cleaned) && !seenUrls.has(cleaned)) {

12
src/components/NormalFeed/index.tsx

@ -18,6 +18,12 @@ const NormalFeed = forwardRef<TNoteListRef, {
setSubHeader?: (node: React.ReactNode) => void setSubHeader?: (node: React.ReactNode) => void
/** Shown in the subHeader row to the left of the kind filter (mobile primary feed). */ /** Shown in the subHeader row to the left of the kind filter (mobile primary feed). */
onSubHeaderRefresh?: () => void onSubHeaderRefresh?: () => void
/**
* When true with {@link mergeTimelineWhenSubRequestFiltersMatch}, relay URL list can change (e.g. favorites
* hydrate after load) without clearing rows same REQ shape, merge new stream into existing events.
*/
preserveTimelineOnSubRequestsChange?: boolean
mergeTimelineWhenSubRequestFiltersMatch?: boolean
}>(function NormalFeed( }>(function NormalFeed(
{ {
subRequests, subRequests,
@ -25,7 +31,9 @@ const NormalFeed = forwardRef<TNoteListRef, {
relayCapabilityReady = true, relayCapabilityReady = true,
isMainFeed = false, isMainFeed = false,
setSubHeader, setSubHeader,
onSubHeaderRefresh onSubHeaderRefresh,
preserveTimelineOnSubRequestsChange = false,
mergeTimelineWhenSubRequestFiltersMatch = false
}, },
ref ref
) { ) {
@ -109,6 +117,8 @@ const NormalFeed = forwardRef<TNoteListRef, {
hideUntrustedNotes={hideUntrustedNotes} hideUntrustedNotes={hideUntrustedNotes}
areAlgoRelays={areAlgoRelays} areAlgoRelays={areAlgoRelays}
relayCapabilityReady={relayCapabilityReady} relayCapabilityReady={relayCapabilityReady}
preserveTimelineOnSubRequestsChange={preserveTimelineOnSubRequestsChange}
mergeTimelineWhenSubRequestFiltersMatch={mergeTimelineWhenSubRequestFiltersMatch}
/> />
</div> </div>
</> </>

15
src/components/Note/Highlight/index.tsx

@ -7,6 +7,7 @@ import UserAvatar from '@/components/UserAvatar'
import Username from '@/components/Username' import Username from '@/components/Username'
import { useSmartNoteNavigationOptional } from '@/PageManager' import { useSmartNoteNavigationOptional } from '@/PageManager'
import { toNote } from '@/lib/link' import { toNote } from '@/lib/link'
import { isPseudoNostrHttpsUrl } from '@/lib/url'
import { useFetchEvent } from '@/hooks' import { useFetchEvent } from '@/hooks'
import { useEffect, useState, useMemo } from 'react' import { useEffect, useState, useMemo } from 'react'
import { ExtendedKind } from '@/constants' import { ExtendedKind } from '@/constants'
@ -143,8 +144,9 @@ export default function Highlight({
continue continue
} }
// Give 'r' tags lowest priority // Give 'r' tags lowest priority (skip fake `https://nostr:…` r-tags — not web URLs)
if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) { if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) {
if (tag[1] && isPseudoNostrHttpsUrl(tag[1])) continue
sourceTag = tag sourceTag = tag
continue continue
} }
@ -181,8 +183,10 @@ export default function Highlight({
tempSourceEventId = bech32 // Store bech32 for fetching the event tempSourceEventId = bech32 // Store bech32 for fetching the event
tempSourceBech32 = bech32 // Store bech32 for navigation tempSourceBech32 = bech32 // Store bech32 for navigation
} else if (sourceTag[0] === 'r') { } else if (sourceTag[0] === 'r') {
// Check if the r-tag value is a URL or Nostr address // Ignore fake `https://nostr:…` (invalid https; breaks WebPreview)
if (sourceTag[1] && isUrlOrNostrAddress(sourceTag[1])) { if (sourceTag[1] && isPseudoNostrHttpsUrl(sourceTag[1])) {
// no source / no quote card for this tag
} else if (sourceTag[1] && isUrlOrNostrAddress(sourceTag[1])) {
// Try to decode as Nostr address to extract author // Try to decode as Nostr address to extract author
try { try {
const decoded = nip19.decode(sourceTag[1]) const decoded = nip19.decode(sourceTag[1])
@ -249,7 +253,10 @@ export default function Highlight({
const hasSpecialCard = useMemo(() => { const hasSpecialCard = useMemo(() => {
// For r-tags that are regular URLs (http/https), they have OpenGraph cards - always use those // For r-tags that are regular URLs (http/https), they have OpenGraph cards - always use those
if (sourceTag && sourceTag[0] === 'r' && sourceTag[1]) { if (sourceTag && sourceTag[0] === 'r' && sourceTag[1]) {
if (sourceTag[1].startsWith('http://') || sourceTag[1].startsWith('https://')) { if (
(sourceTag[1].startsWith('http://') || sourceTag[1].startsWith('https://')) &&
!isPseudoNostrHttpsUrl(sourceTag[1])
) {
return true // URLs have OpenGraph cards - use full preview return true // URLs have OpenGraph cards - use full preview
} }
} }

23
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -8,7 +8,15 @@ import YoutubeEmbeddedPlayer from '@/components/YoutubeEmbeddedPlayer'
import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata' import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata'
import { toNoteList } from '@/lib/link' import { toNoteList } from '@/lib/link'
import { useMediaExtraction } from '@/hooks' import { useMediaExtraction } from '@/hooks'
import { cleanUrl, isImage, isMedia, isVideo, isAudio, isWebsocketUrl } from '@/lib/url' import {
cleanUrl,
isImage,
isMedia,
isVideo,
isAudio,
isWebsocketUrl,
isPseudoNostrHttpsUrl
} from '@/lib/url'
import { getHttpUrlFromITags, getImetaInfosFromEvent } from '@/lib/event' import { getHttpUrlFromITags, getImetaInfosFromEvent } from '@/lib/event'
import { canonicalizeRssArticleUrl } from '@/lib/rss-article' import { canonicalizeRssArticleUrl } from '@/lib/rss-article'
import { Event, kinds } from 'nostr-tools' import { Event, kinds } from 'nostr-tools'
@ -1848,6 +1856,18 @@ function parseMarkdownContent(
{url} {url}
</a> </a>
) )
} else if (isPseudoNostrHttpsUrl(url)) {
parts.push(
<a
key={`link-${patternIdx}`}
href={url}
className="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words"
target="_blank"
rel="noopener noreferrer"
>
{url}
</a>
)
} else { } else {
parts.push( parts.push(
<div key={`webpreview-${patternIdx}`} className="my-2"> <div key={`webpreview-${patternIdx}`} className="my-2">
@ -3350,6 +3370,7 @@ export default function MarkdownArticle({
.forEach(tag => { .forEach(tag => {
const url = tag[1] const url = tag[1]
if (!url.startsWith('http://') && !url.startsWith('https://')) return if (!url.startsWith('http://') && !url.startsWith('https://')) return
if (isPseudoNostrHttpsUrl(url)) return
if (isImage(url) || isMedia(url)) return if (isImage(url) || isMedia(url)) return
if (isYouTubeUrl(url)) return // Exclude YouTube URLs if (isYouTubeUrl(url)) return // Exclude YouTube URLs

120
src/components/NoteList/index.tsx

@ -218,6 +218,13 @@ const NoteList = forwardRef(
* That stacks subscriptions on strict relays (e.g. 10 subs) and triggers rejections / rate limits. * That stacks subscriptions on strict relays (e.g. 10 subs) and triggers rejections / rate limits.
*/ */
const timelineEstablishedCloserRef = useRef<(() => void) | null>(null) const timelineEstablishedCloserRef = useRef<(() => void) | null>(null)
/** Session snapshot was written to state; log once after commit (see feed-paint layout effect). */
const feedPaintSessionPendingRef = useRef(false)
/** Relay / one-shot data was written to state; log once after commit. */
const feedPaintRelayPendingRef = useRef(false)
const feedPaintRelayMetaRef = useRef<Record<string, unknown> | null>(null)
/** First live `onEvents` paint per timeline init (rows or terminal EOSE). */
const feedPaintLiveRelayDoneRef = useRef(false)
const [feedProfileBatch, setFeedProfileBatch] = useState<{ const [feedProfileBatch, setFeedProfileBatch] = useState<{
profiles: Map<string, TProfile> profiles: Map<string, TProfile>
@ -298,7 +305,11 @@ const NoteList = forwardRef(
return JSON.stringify([...showKinds].sort((a, b) => a - b)) return JSON.stringify([...showKinds].sort((a, b) => a - b))
}, [showKinds]) }, [showKinds])
/** Session snapshot identity: same feed + kind / reply UI toggles so restore matches filtering. */ /**
* Session snapshot identity: feed + kind UI toggles that affect **REQ** / merged rows.
* Do **not** include {@link hideReplies}: Notes vs Replies only changes client-side filtering; the same
* raw timeline should restore for both tabs (otherwise Replies can show cache while Notes looks empty).
*/
const sessionSnapshotIdentityKey = useMemo( const sessionSnapshotIdentityKey = useMemo(
() => () =>
JSON.stringify({ JSON.stringify({
@ -306,10 +317,9 @@ const NoteList = forwardRef(
kinds: showKindsKey, kinds: showKindsKey,
op: showKind1OPs, op: showKind1OPs,
rep: showKind1Replies, rep: showKind1Replies,
c1111: showKind1111, c1111: showKind1111
hr: hideReplies
}), }),
[timelineSubscriptionKey, showKindsKey, showKind1OPs, showKind1Replies, showKind1111, hideReplies] [timelineSubscriptionKey, showKindsKey, showKind1OPs, showKind1Replies, showKind1111]
) )
const showKindsRef = useRef(showKinds) const showKindsRef = useRef(showKinds)
@ -402,6 +412,45 @@ const NoteList = forwardRef(
}) })
}, [events, showCount, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111]) }, [events, showCount, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111])
useLayoutEffect(() => {
if (!feedPaintSessionPendingRef.current && !feedPaintRelayPendingRef.current) return
const shorten = (s: string, max: number) =>
s.length > max ? `${s.slice(0, max)}` : s
const feedKeyShort = shorten(timelineSubscriptionKey, 200)
const snapshotKeyShort = shorten(sessionSnapshotIdentityKey, 160)
if (feedPaintSessionPendingRef.current) {
feedPaintSessionPendingRef.current = false
logger.info('[FeedPaint] Session cache committed (DOM)', {
feedKey: feedKeyShort,
snapshotKey: snapshotKeyShort,
eventCount: events.length,
filteredVisibleRows: filteredEvents.length,
pubkeySlice: pubkey ? `${pubkey.slice(0, 12)}` : undefined
})
}
if (feedPaintRelayPendingRef.current) {
feedPaintRelayPendingRef.current = false
const meta = feedPaintRelayMetaRef.current
feedPaintRelayMetaRef.current = null
logger.info('[FeedPaint] Relay/network results committed (DOM)', {
feedKey: feedKeyShort,
snapshotKey: snapshotKeyShort,
committedEventCount: events.length,
filteredVisibleRows: filteredEvents.length,
pubkeySlice: pubkey ? `${pubkey.slice(0, 12)}` : undefined,
...meta
})
}
}, [
events,
filteredEvents.length,
timelineSubscriptionKey,
sessionSnapshotIdentityKey,
pubkey
])
const filteredNewEvents = useMemo(() => { const filteredNewEvents = useMemo(() => {
const idSet = new Set<string>() const idSet = new Set<string>()
@ -576,6 +625,11 @@ const NoteList = forwardRef(
let effectActive = true let effectActive = true
async function init() { async function init() {
feedPaintSessionPendingRef.current = false
feedPaintRelayPendingRef.current = false
feedPaintRelayMetaRef.current = null
feedPaintLiveRelayDoneRef.current = false
// Re-subscribe with rows visible (e.g. relay URL expansion): don't flash global loading / skeleton. // Re-subscribe with rows visible (e.g. relay URL expansion): don't flash global loading / skeleton.
const keepRowsVisible = const keepRowsVisible =
preserveTimelineOnSubRequestsChange && preserveTimelineOnSubRequestsChange &&
@ -588,6 +642,7 @@ const NoteList = forwardRef(
if (!keepExistingTimelineEvents) { if (!keepExistingTimelineEvents) {
if (restoredFromSession && sessionSnap) { if (restoredFromSession && sessionSnap) {
feedPaintSessionPendingRef.current = true
setEvents(sessionSnap) setEvents(sessionSnap)
lastEventsForTimelinePrefetchRef.current = sessionSnap lastEventsForTimelinePrefetchRef.current = sessionSnap
setNewEvents([]) setNewEvents([])
@ -716,11 +771,25 @@ const NoteList = forwardRef(
} }
setEvents(merged) setEvents(merged)
lastEventsForTimelinePrefetchRef.current = merged lastEventsForTimelinePrefetchRef.current = merged
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'one_shot_fetch',
mergedCount: merged.length,
mergedWithPriorSession: !!(sessionSnap?.length && !userPulledRefresh)
}
} catch (err) { } catch (err) {
if (oneShotDebugLabel) { if (oneShotDebugLabel) {
logger.warn(`[${oneShotDebugLabel}] one-shot fetch threw`, err) logger.warn(`[${oneShotDebugLabel}] one-shot fetch threw`, err)
} }
if (effectActive) setEvents([]) if (effectActive) {
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'one_shot_fetch',
mergedCount: 0,
fetchThrew: true
}
setEvents([])
}
} finally { } finally {
if (effectActive) { if (effectActive) {
setLoading(false) setLoading(false)
@ -761,6 +830,28 @@ const NoteList = forwardRef(
onEvents: (batch: Event[], eosed: boolean) => { onEvents: (batch: Event[], eosed: boolean) => {
if (!effectActive) return if (!effectActive) return
const narrowed = narrowLiveBatch(batch) const narrowed = narrowLiveBatch(batch)
if (!feedPaintLiveRelayDoneRef.current) {
if (narrowed.length > 0) {
feedPaintLiveRelayDoneRef.current = true
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'live_subscription',
mode: 'rows',
narrowedInBatch: narrowed.length,
batchIncoming: batch.length,
eosed
}
} else if (eosed) {
feedPaintLiveRelayDoneRef.current = true
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'live_subscription',
mode: 'eose_no_visible_rows',
batchIncoming: batch.length,
eosed
}
}
}
if (batch.length > 0) { if (batch.length > 0) {
if (narrowed.length > 0) { if (narrowed.length > 0) {
if (preserveTimelineOnSubRequestsChange) { if (preserveTimelineOnSubRequestsChange) {
@ -874,6 +965,10 @@ const NoteList = forwardRef(
timelineEstablishedCloserRef.current = closer timelineEstablishedCloserRef.current = closer
timelineKey = result.timelineKey timelineKey = result.timelineKey
setTimelineKey(timelineKey) setTimelineKey(timelineKey)
// subscribeTimeline resolves once shards are wired; EOSE / merge callbacks can be delayed or
// skipped on edge paths (all relays fail, strict NOTICE closes, etc.). Do not keep the global
// skeleton until the first onEvents(..., eosed) — that can freeze the feed indefinitely.
setLoading(false)
return closer return closer
} catch (_error) { } catch (_error) {
setLoading(false) setLoading(false)
@ -1393,9 +1488,18 @@ const NoteList = forwardRef(
<NoteCardLoadingSkeleton key={i} /> <NoteCardLoadingSkeleton key={i} />
))} ))}
</div> </div>
) : events.length > 0 && (hasMore || loading) ? ( ) : events.length > 0 && hasMore ? (
<div ref={bottomRef}> <div
<NoteCardLoadingSkeleton /> ref={bottomRef}
className={
filteredEvents.length === 0 && !loading
? 'min-h-[35vh] py-4'
: loading
? 'min-h-8'
: 'min-h-4'
}
>
{loading ? <NoteCardLoadingSkeleton /> : null}
</div> </div>
) : events.length > 0 ? ( ) : events.length > 0 ? (
<div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div> <div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div>

5
src/hooks/useSearchProfiles.tsx

@ -4,14 +4,15 @@ import { TProfile } from '@/types'
import { useEffect, useState } from 'react' import { useEffect, useState } from 'react'
export function useSearchProfiles(search: string, limit: number) { export function useSearchProfiles(search: string, limit: number) {
const [isFetching, setIsFetching] = useState(true) const [isFetching, setIsFetching] = useState(false)
const [error, setError] = useState<Error | null>(null) const [error, setError] = useState<Error | null>(null)
const [profiles, setProfiles] = useState<TProfile[]>([]) const [profiles, setProfiles] = useState<TProfile[]>([])
useEffect(() => { useEffect(() => {
const fetchProfiles = async () => { const fetchProfiles = async () => {
if (!search) { if (!search.trim()) {
setProfiles([]) setProfiles([])
setIsFetching(false)
return return
} }

7
src/lib/nostr-parser.tsx

@ -7,7 +7,7 @@ import { EmbeddedMention, EmbeddedNote } from '@/components/Embedded'
import ImageGallery from '@/components/ImageGallery' import ImageGallery from '@/components/ImageGallery'
import WebPreview from '@/components/WebPreview' import WebPreview from '@/components/WebPreview'
import { BookstrContent } from '@/components/Bookstr/BookstrContent' import { BookstrContent } from '@/components/Bookstr/BookstrContent'
import { cleanUrl, isImage, isMedia } from '@/lib/url' import { cleanUrl, isImage, isMedia, isPseudoNostrHttpsUrl } from '@/lib/url'
import { getImetaInfosFromEvent } from '@/lib/event' import { getImetaInfosFromEvent } from '@/lib/event'
import { parsePaytoUri } from '@/lib/payto' import { parsePaytoUri } from '@/lib/payto'
import PaytoLink from '@/components/PaytoLink' import PaytoLink from '@/components/PaytoLink'
@ -155,7 +155,10 @@ export function parseNostrContent(content: string, event?: Event): ParsedNostrCo
} }
const cleanedUrl = cleanUrl(url) const cleanedUrl = cleanUrl(url)
if (isPseudoNostrHttpsUrl(url)) {
continue
}
// Check if it's an image // Check if it's an image
if (isImage(cleanedUrl)) { if (isImage(cleanedUrl)) {
allMatches.push({ allMatches.push({

8
src/lib/url.ts

@ -103,6 +103,14 @@ export function simplifyUrl(url: string): string {
.replace(/\/$/, '') .replace(/\/$/, '')
} }
/**
* Some events use r-tags like `https://nostr:nevent1…` not a real http(s) URL (the `nostr:` NIP-21
* scheme is pasted after `https://`). Those strings pass a naive `https://` check and break WebPreview.
*/
export function isPseudoNostrHttpsUrl(url: string): boolean {
return /^https?:\/\/nostr:/i.test(url.trim())
}
export function isLocalNetworkUrl(urlString: string): boolean { export function isLocalNetworkUrl(urlString: string): boolean {
try { try {
const url = new URL(urlString) const url = new URL(urlString)

2
src/pages/primary/NoteListPage/RelaysFeed.tsx

@ -107,6 +107,8 @@ const RelaysFeed = forwardRef<
isMainFeed isMainFeed
setSubHeader={setSubHeader} setSubHeader={setSubHeader}
onSubHeaderRefresh={onSubHeaderRefresh} onSubHeaderRefresh={onSubHeaderRefresh}
preserveTimelineOnSubRequestsChange
mergeTimelineWhenSubRequestFiltersMatch
/> />
) )
}) })

28
src/pages/primary/SpellsPage/index.tsx

@ -904,6 +904,10 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickSpell = useCallback( const pickSpell = useCallback(
(spell: Event | null) => { (spell: Event | null) => {
setSpellPickerOpen(false)
if (spell && selectedSpell?.id === spell.id && !selectedFauxSpell) {
return
}
if (spell) { if (spell) {
logSpellFeedPickerSelection(`kind777:${getSpellName(spell)}`, { logSpellFeedPickerSelection(`kind777:${getSpellName(spell)}`, {
spellId: spell.id, spellId: spell.id,
@ -913,10 +917,9 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
} }
setSelectedSpell(spell) setSelectedSpell(spell)
setSelectedFauxSpell(null) setSelectedFauxSpell(null)
setSpellPickerOpen(false)
navigatePrimary('spells') navigatePrimary('spells')
}, },
[logSpellFeedPickerSelection, navigatePrimary] [logSpellFeedPickerSelection, navigatePrimary, selectedSpell?.id, selectedFauxSpell]
) )
const clearSpellSelection = useCallback(() => { const clearSpellSelection = useCallback(() => {
@ -929,20 +932,27 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickFauxSpell = useCallback( const pickFauxSpell = useCallback(
(name: FauxSpellName | null) => { (name: FauxSpellName | null) => {
setSpellPickerOpen(false)
if (name) { if (name) {
// Re-selecting the same built-in feed from the picker should not clear + resubscribe (toggle used to call
// pickFauxSpell(null) and wipe the timeline when the row was already selected).
if (selectedFauxSpell === name && selectedSpell === null) {
return
}
logSpellFeedPickerSelection(`faux:${name}`, { fauxSpell: name }) logSpellFeedPickerSelection(`faux:${name}`, { fauxSpell: name })
fauxSpellUrlSyncFromPickerRef.current = name fauxSpellUrlSyncFromPickerRef.current = name
setSelectedFauxSpell(name)
setSelectedSpell(null)
navigatePrimary('spells', { spell: name })
} else { } else {
logSpellFeedPickerSelection('(cleared faux)', { clearedFaux: true }) logSpellFeedPickerSelection('(cleared faux)', { clearedFaux: true })
fauxSpellUrlSyncFromPickerRef.current = null fauxSpellUrlSyncFromPickerRef.current = null
setSelectedFauxSpell(null)
setSelectedSpell(null)
navigatePrimary('spells')
} }
setSelectedFauxSpell(name)
setSelectedSpell(null)
setSpellPickerOpen(false)
if (name) navigatePrimary('spells', { spell: name })
else navigatePrimary('spells')
}, },
[logSpellFeedPickerSelection, navigatePrimary] [logSpellFeedPickerSelection, navigatePrimary, selectedFauxSpell, selectedSpell]
) )
const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey) const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey)
@ -1004,7 +1014,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
'hover:bg-accent focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring', 'hover:bg-accent focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring',
selected && 'bg-accent/50' selected && 'bg-accent/50'
)} )}
onClick={() => pickFauxSpell(selected ? null : name)} onClick={() => pickFauxSpell(name)}
> >
<span className="flex size-4 shrink-0 items-center justify-center"> <span className="flex size-4 shrink-0 items-center justify-center">
{selected ? <Check className="size-4" aria-hidden /> : null} {selected ? <Check className="size-4" aria-hidden /> : null}

30
src/services/client-events.service.ts

@ -357,6 +357,36 @@ export class EventService {
return e return e
} }
/**
* Pubkeys whose session-cached kind 0 matches a name / display_name / nip-05 substring (for search without IDB).
*/
searchSessionProfilePubkeys(query: string, limit: number): string[] {
const q = query.trim().toLowerCase()
if (!q || limit <= 0) return []
const out: string[] = []
for (const ev of this.sessionMetadataByPubkey.values()) {
if (shouldDropEventOnIngest(ev)) continue
if (out.length >= limit) break
try {
const o = JSON.parse(ev.content) as Record<string, unknown>
const blob = [
o.display_name,
o.name,
typeof o.nip05 === 'string' ? o.nip05 : ''
]
.map((x) => (typeof x === 'string' ? x : ''))
.join(' ')
.toLowerCase()
if (blob.includes(q)) {
out.push(ev.pubkey.toLowerCase())
}
} catch {
/* invalid JSON */
}
}
return out
}
/** /**
* Get events from session cache matching search * Get events from session cache matching search
*/ */

154
src/services/client-replaceable-events.service.ts

@ -149,6 +149,17 @@ export class ReplaceableEventService {
}) })
try { try {
if (kind === kinds.Metadata && !d) {
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
return sessionEv
}
}
// If we have containing event relays and this is a profile, we need to use a custom relay list // If we have containing event relays and this is a profile, we need to use a custom relay list
// Otherwise, use DataLoader (which batches IndexedDB checks and network fetches) // Otherwise, use DataLoader (which batches IndexedDB checks and network fetches)
let event: NEvent | undefined let event: NEvent | undefined
@ -292,28 +303,39 @@ export class ReplaceableEventService {
* Checks IndexedDB first, then network * Checks IndexedDB first, then network
*/ */
async fetchReplaceableEventsFromProfileFetchRelays(pubkeys: string[], kind: number): Promise<(NEvent | undefined)[]> { async fetchReplaceableEventsFromProfileFetchRelays(pubkeys: string[], kind: number): Promise<(NEvent | undefined)[]> {
const results: (NEvent | undefined)[] = [] const results: (NEvent | undefined)[] = new Array(pubkeys.length)
const misses: { pubkey: string; index: number }[] = [] const needsIndexedDb: { pubkey: string; index: number }[] = []
// Check IndexedDB in parallel for (let index = 0; index < pubkeys.length; index++) {
const indexedDbPromises = pubkeys.map(async (pubkey, index) => { const pubkey = pubkeys[index]
try { if (kind === kinds.Metadata) {
const event = await indexedDb.getReplaceableEvent(pubkey, kind) const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (event) { if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
results[index] = event results[index] = sessionEv
return { index, event } this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
continue
} }
} catch {
// Ignore errors
} }
misses.push({ pubkey, index }) needsIndexedDb.push({ pubkey, index })
return null }
})
await Promise.allSettled(
await Promise.allSettled(indexedDbPromises) needsIndexedDb.map(async ({ pubkey, index }) => {
try {
// Find what's still missing and fetch from network const event = await indexedDb.getReplaceableEvent(pubkey, kind)
const stillMissing = misses.filter(({ index }) => results[index] === undefined) if (event) {
results[index] = event
}
} catch {
/* ignore */
}
})
)
const stillMissing = needsIndexedDb.filter(({ index }) => results[index] === undefined)
if (stillMissing.length > 0) { if (stillMissing.length > 0) {
const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany( const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany(
stillMissing.map(({ pubkey }) => ({ pubkey, kind })) stillMissing.map(({ pubkey }) => ({ pubkey, kind }))
@ -327,7 +349,7 @@ export class ReplaceableEventService {
} }
}) })
} }
return results return results
} }
@ -367,45 +389,53 @@ export class ReplaceableEventService {
}) })
} }
// Step 1: Batch check IndexedDB for all requested events
const groups = new Map<number, string[]>()
params.forEach(({ pubkey, kind }) => {
if (!groups.has(kind)) {
groups.set(kind, [])
}
groups.get(kind)!.push(pubkey)
})
const results: (NEvent | null)[] = new Array(params.length).fill(null) const results: (NEvent | null)[] = new Array(params.length).fill(null)
const eventsMap = new Map<string, NEvent>() const eventsMap = new Map<string, NEvent>()
for (let i = 0; i < params.length; i++) {
const { pubkey, kind } = params[i]
if (kind !== kinds.Metadata) continue
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
results[i] = sessionEv
eventsMap.set(`${pubkey}:${kind}`, sessionEv)
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
}
}
const idbByKind = new Map<number, { pubkey: string; index: number }[]>()
params.forEach(({ pubkey, kind }, index) => {
if (results[index] != null) return
if (!idbByKind.has(kind)) {
idbByKind.set(kind, [])
}
idbByKind.get(kind)!.push({ pubkey, index })
})
const missingParams: { pubkey: string; kind: number; index: number }[] = [] const missingParams: { pubkey: string; kind: number; index: number }[] = []
// Batch IndexedDB checks by kind
await Promise.allSettled( await Promise.allSettled(
Array.from(groups.entries()).map(async ([kind, pubkeys]) => { Array.from(idbByKind.entries()).map(async ([kind, items]) => {
const pubkeys = items.map((x) => x.pubkey)
try { try {
// Use batched IndexedDB query
const indexedDbEvents = await indexedDb.getManyReplaceableEvents(pubkeys, kind) const indexedDbEvents = await indexedDb.getManyReplaceableEvents(pubkeys, kind)
// Only log at debug level to reduce noise during rapid scrolling
logger.debug('[ReplaceableEventService] IndexedDB batch query completed', { logger.debug('[ReplaceableEventService] IndexedDB batch query completed', {
kind, kind,
pubkeyCount: pubkeys.length, pubkeyCount: pubkeys.length,
foundCount: indexedDbEvents.filter(e => e !== null && e !== undefined).length foundCount: indexedDbEvents.filter((e) => e !== null && e !== undefined).length
}) })
// Map IndexedDB results back to params items.forEach(({ pubkey, index }, idx) => {
pubkeys.forEach((pubkey, idx) => { const event = indexedDbEvents[idx]
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind) if (event && event !== null) {
if (paramIndex >= 0) { results[index] = event
const event = indexedDbEvents[idx] eventsMap.set(`${pubkey}:${kind}`, event)
if (event && event !== null) { this.refreshInBackground(pubkey, kind).catch(() => {})
results[paramIndex] = event } else {
eventsMap.set(`${pubkey}:${kind}`, event) missingParams.push({ pubkey, kind, index })
// Refresh in background
this.refreshInBackground(pubkey, kind).catch(() => {})
} else {
missingParams.push({ pubkey, kind, index: paramIndex })
}
} }
}) })
} catch (error) { } catch (error) {
@ -413,20 +443,16 @@ export class ReplaceableEventService {
kind, kind,
error: error instanceof Error ? error.message : String(error) error: error instanceof Error ? error.message : String(error)
}) })
// If IndexedDB fails, mark all as missing for (const { pubkey, index } of items) {
pubkeys.forEach((pubkey) => { missingParams.push({ pubkey, kind, index })
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind) }
if (paramIndex >= 0) {
missingParams.push({ pubkey, kind, index: paramIndex })
}
})
} }
}) })
) )
// Step 2: Only fetch missing events from network // Step 2: Only fetch missing events from network
if (missingParams.length === 0) { if (missingParams.length === 0) {
logger.debug('[ReplaceableEventService] All events found in IndexedDB, skipping network fetch', { logger.debug('[ReplaceableEventService] All events resolved (session + IndexedDB), skipping network fetch', {
totalCount: params.length totalCount: params.length
}) })
return results return results
@ -794,6 +820,18 @@ export class ReplaceableEventService {
logger.error('[ReplaceableEventService] Invalid id - no pubkey extracted', { id }) logger.error('[ReplaceableEventService] Invalid id - no pubkey extracted', { id })
throw new Error('Invalid id') throw new Error('Invalid id')
} }
if (!_skipCache) {
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind: kinds.Metadata },
Promise.resolve(sessionEv)
)
await this.indexProfile(sessionEv)
return sessionEv
}
}
// CRITICAL: Always use relay hints from bech32 addresses (nprofile, naddr, nevent) when available // CRITICAL: Always use relay hints from bech32 addresses (nprofile, naddr, nevent) when available
// Relay hints should have highest priority and always be included // Relay hints should have highest priority and always be included

119
src/services/client.service.ts

@ -89,6 +89,18 @@ function summarizeFiltersForRelayLog(filters: Filter[]): Record<string, unknown>
if (f['#t']?.length) out.tTagCount = f['#t'].length if (f['#t']?.length) out.tTagCount = f['#t'].length
return out return out
} }
/** Hostname (+ path when not "/") for readable publish / retry console lines. */
function relayHostForUserLog(url: string): string {
const n = normalizeUrl(url) || url
try {
const u = new URL(n.replace(/^wss:/i, 'https:').replace(/^ws:/i, 'http:'))
const path = u.pathname && u.pathname !== '/' ? u.pathname.replace(/\/$/, '') : ''
return path ? `${u.host}${path}` : u.host
} catch {
return n
}
}
import { EventService } from './client-events.service' import { EventService } from './client-events.service'
import { ReplaceableEventService } from './client-replaceable-events.service' import { ReplaceableEventService } from './client-replaceable-events.service'
import { MacroService, createBookstrService } from './client-macro.service' import { MacroService, createBookstrService } from './client-macro.service'
@ -344,14 +356,50 @@ class ClientService extends EventTarget {
} }
const failedOutboxes = userOutboxUrls.filter((u) => !hadSuccess.has(norm(u))) const failedOutboxes = userOutboxUrls.filter((u) => !hadSuccess.has(norm(u)))
if (failedOutboxes.length === 0) return if (failedOutboxes.length === 0) return
logger.info('[PublishEvent] Outbox relay(s) failed; retrying once after delay', {
eventId: event.id?.slice(0, 8), const statusHint = (url: string): string => {
kind: event.kind, const n = norm(url)
failedCount: failedOutboxes.length, const forUrl = relayStatuses.filter((r) => norm(r.url) === n)
delayMs: OUTBOX_PUBLISH_RETRY_DELAY_MS const failMsgs = forUrl.filter((r) => !r.success).map((r) => r.error).filter(Boolean)
}) if (failMsgs.length) return failMsgs[failMsgs.length - 1]!
return 'No OK from this relay (timeout, connection failed, or still pending when the first wave ended)'
}
const okOutboxes = userOutboxUrls.filter((u) => hadSuccess.has(norm(u)))
const eventHint =
event.id && /^[0-9a-f]{64}$/i.test(event.id)
? `note id ${event.id.slice(0, 12)}… (kind ${event.kind})`
: `kind ${event.kind} note`
const failedSummary = failedOutboxes
.map((u) => `${relayHostForUserLog(u)}${statusHint(u)}`)
.join('\n')
const okSummary =
okOutboxes.length > 0
? okOutboxes.map((u) => `${relayHostForUserLog(u)}`).join('\n')
: ' (none)'
logger.info(
`[Publish] NIP-65 write relays (your outboxes): ${failedOutboxes.length} did not confirm ${eventHint}. ` +
`Retrying only those in ${OUTBOX_PUBLISH_RETRY_DELAY_MS / 1000}s (one more try each).\n` +
`Not OK:\n${failedSummary}\n` +
`Confirmed on first publish wave:\n${okSummary}`,
{
delaySeconds: OUTBOX_PUBLISH_RETRY_DELAY_MS / 1000,
failed: failedOutboxes.map((url) => ({
url,
host: relayHostForUserLog(url),
reason: statusHint(url)
})),
confirmed: okOutboxes.map((url) => ({ url, host: relayHostForUserLog(url) }))
}
)
await new Promise<void>((r) => setTimeout(r, OUTBOX_PUBLISH_RETRY_DELAY_MS)) await new Promise<void>((r) => setTimeout(r, OUTBOX_PUBLISH_RETRY_DELAY_MS))
await this.publishEvent(failedOutboxes, event, { skipOutboxRetry: true }) await this.publishEvent(failedOutboxes, event, {
skipOutboxRetry: true,
publishBatchLabel: 'NIP-65 outbox retry — 2nd attempt (failed write relays only)'
})
} }
private async prioritizePublishUrlList( private async prioritizePublishUrlList(
@ -717,8 +765,14 @@ class ClientService extends EventTarget {
/** One failed publish or subscribe connection per normalized URL (accumulates until {@link SESSION_RELAY_FAILURE_STRIKE_THRESHOLD}). */ /** One failed publish or subscribe connection per normalized URL (accumulates until {@link SESSION_RELAY_FAILURE_STRIKE_THRESHOLD}). */
/** NOTICE "failed to fetch events" (relay DB/backend) — same session strike as a failed connection. */ /** NOTICE "failed to fetch events" (relay DB/backend) — same session strike as a failed connection. */
private recordRelayNoticeFetchFailure(url: string, noticeMessage: string) { private recordRelayNoticeFetchFailure(url: string, noticeMessage: string) {
const n = normalizeUrl(url) || url
if (!n) return
const prev = this.publishStrikeCount.get(n) ?? 0
if (prev >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD) {
return
}
logger.info('[Relay] NOTICE failed-fetch → session strike', { logger.info('[Relay] NOTICE failed-fetch → session strike', {
url, url: n,
noticeSnippet: noticeMessage.slice(0, 220) noticeSnippet: noticeMessage.slice(0, 220)
}) })
this.recordSessionRelayFailure(url) this.recordSessionRelayFailure(url)
@ -922,9 +976,21 @@ class ClientService extends EventTarget {
} else { } else {
logger.debug('[PublishEvent] Unique relays', { count: uniqueRelayUrls.length, relays: uniqueRelayUrls.slice(0, 5) }) logger.debug('[PublishEvent] Unique relays', { count: uniqueRelayUrls.length, relays: uniqueRelayUrls.slice(0, 5) })
} }
const publishBatchSource = publishExtras?.publishBatchLabel
? `publish — ${publishExtras.publishBatchLabel}`
: 'ClientService.publishEvent'
if (publishExtras?.publishBatchLabel) {
const idBit =
event.id && /^[0-9a-f]{64}$/i.test(event.id) ? `${event.id.slice(0, 12)}` : '(unsigned or no id)'
logger.info(`[Publish] ${publishExtras.publishBatchLabel}`, {
readable: `Kind ${event.kind} note ${idBit}${uniqueRelayUrls.length} relay(s): ${uniqueRelayUrls.map(relayHostForUserLog).join(', ')}`,
targets: uniqueRelayUrls.map((url) => ({ where: relayHostForUserLog(url), url }))
})
}
const relayStatuses: { url: string; success: boolean; error?: string }[] = [] const relayStatuses: { url: string; success: boolean; error?: string }[] = []
const publishOpBatch = new RelayPublishOpBatch('ClientService.publishEvent', event.id, uniqueRelayUrls) const publishOpBatch = new RelayPublishOpBatch(publishBatchSource, event.id, uniqueRelayUrls)
publishOpBatch.logBegin() publishOpBatch.logBegin()
// eslint-disable-next-line @typescript-eslint/no-this-alias // eslint-disable-next-line @typescript-eslint/no-this-alias
@ -1166,9 +1232,14 @@ class ClientService extends EventTarget {
void client void client
.retryFailedOutboxPublishesOnce(event, userOutboxUrls, relayStatuses) .retryFailedOutboxPublishesOnce(event, userOutboxUrls, relayStatuses)
.catch((err) => .catch((err) =>
logger.warn('[PublishEvent] Outbox retry pass failed', { logger.warn(
error: err instanceof Error ? err.message : String(err) '[Publish] NIP-65 outbox retry (2nd attempt) failed — check the network or relay logs above',
}) {
error: err instanceof Error ? err.message : String(err),
eventKind: event.kind,
eventId: event.id && /^[0-9a-f]{64}$/i.test(event.id) ? `${event.id.slice(0, 16)}` : event.id
}
)
) )
}) })
} }
@ -2223,8 +2294,26 @@ class ClientService extends EventTarget {
} }
async searchNpubsFromLocal(query: string, limit: number = 100) { async searchNpubsFromLocal(query: string, limit: number = 100) {
const result = await this.userIndex.searchAsync(query, { limit }) const seen = new Set<string>()
return result.map((pubkey) => pubkeyToNpub(pubkey as string)).filter(Boolean) as string[] const out: string[] = []
const pushNpub = (npub: string) => {
if (!npub || seen.has(npub) || out.length >= limit) return
seen.add(npub)
out.push(npub)
}
for (const pk of this.eventService.searchSessionProfilePubkeys(query, limit)) {
const npub = pubkeyToNpub(pk)
if (npub) pushNpub(npub)
}
if (out.length >= limit) return out
const remaining = limit - out.length
const result = await this.userIndex.searchAsync(query, { limit: remaining * 4 })
for (const pubkey of result) {
const npub = pubkeyToNpub(pubkey as string)
if (npub) pushNpub(npub)
if (out.length >= limit) break
}
return out
} }
/** /**

4
src/services/content-parser.service.ts

@ -9,6 +9,7 @@ import { getImetaInfosFromEvent } from '@/lib/event'
import { URL_REGEX, ExtendedKind } from '@/constants' import { URL_REGEX, ExtendedKind } from '@/constants'
import { TImetaInfo } from '@/types' import { TImetaInfo } from '@/types'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { isPseudoNostrHttpsUrl } from '@/lib/url'
export interface ParsedContent { export interface ParsedContent {
html: string html: string
@ -982,6 +983,7 @@ class ContentParserService {
// Give 'r' tags lowest priority // Give 'r' tags lowest priority
if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) { if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) {
if (tag[1] && isPseudoNostrHttpsUrl(tag[1])) continue
sourceTag = tag sourceTag = tag
continue continue
} }
@ -1009,7 +1011,7 @@ class ContentParserService {
relays: relay ? [relay] : [] relays: relay ? [relay] : []
}) })
}) })
} else if (sourceTag[0] === 'r') { } else if (sourceTag[0] === 'r' && sourceTag[1] && !isPseudoNostrHttpsUrl(sourceTag[1])) {
sources.push({ sources.push({
type: 'url', type: 'url',
value: sourceTag[1], value: sourceTag[1],

37
src/services/relay-operation-log.service.ts

@ -1,8 +1,20 @@
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { normalizeUrl } from '@/lib/url'
import type { Filter } from 'nostr-tools' import type { Filter } from 'nostr-tools'
let batchSeq = 0 let batchSeq = 0
function relayHostForPublishLog(url: string): string {
const n = normalizeUrl(url) || url
try {
const u = new URL(n.replace(/^wss:/i, 'https:').replace(/^ws:/i, 'http:'))
const path = u.pathname && u.pathname !== '/' ? u.pathname.replace(/\/$/, '') : ''
return path ? `${u.host}${path}` : u.host
} catch {
return n
}
}
function nextBatchId(prefix: string): string { function nextBatchId(prefix: string): string {
return `${prefix}-${Date.now().toString(36)}-${(++batchSeq).toString(36)}` return `${prefix}-${Date.now().toString(36)}-${(++batchSeq).toString(36)}`
} }
@ -214,6 +226,20 @@ export class RelayPublishOpBatch {
) )
const ok = this.results.filter((r) => r.ok) const ok = this.results.filter((r) => r.ok)
const fail = this.results.filter((r) => !r.ok) const fail = this.results.filter((r) => !r.ok)
const sorted = this.results.sort((a, b) => a.cmdIndex - b.cmdIndex)
const readableSummary =
fail.length === 0
? `All ${ok.length} relay(s) accepted the publish.`
: [
`${fail.length} relay(s) failed:`,
...fail.map(
(r) =>
`${relayHostForPublishLog(r.relayUrl)}${(r.error && String(r.error).trim()) || 'rejected or error'}`
),
ok.length > 0 ? `${ok.length} relay(s) OK: ${ok.map((r) => relayHostForPublishLog(r.relayUrl)).join(', ')}` : ''
]
.filter(Boolean)
.join('\n')
logger.info('[RelayOp] publish_batch_end', { logger.info('[RelayOp] publish_batch_end', {
batchId: this.batchId, batchId: this.batchId,
source: this.source, source: this.source,
@ -222,16 +248,23 @@ export class RelayPublishOpBatch {
elapsedMs, elapsedMs,
okCount: ok.length, okCount: ok.length,
failCount: fail.length, failCount: fail.length,
readableSummary,
byState: { byState: {
ok: { count: ok.length, relays: ok.map((r) => r.relayUrl), cmdIndices: ok.map((r) => r.cmdIndex) }, ok: {
count: ok.length,
relays: ok.map((r) => r.relayUrl),
hosts: ok.map((r) => relayHostForPublishLog(r.relayUrl)),
cmdIndices: ok.map((r) => r.cmdIndex)
},
fail: { fail: {
count: fail.length, count: fail.length,
relays: fail.map((r) => r.relayUrl), relays: fail.map((r) => r.relayUrl),
hosts: fail.map((r) => relayHostForPublishLog(r.relayUrl)),
cmdIndices: fail.map((r) => r.cmdIndex), cmdIndices: fail.map((r) => r.cmdIndex),
errors: fail.map((r) => r.error ?? '') errors: fail.map((r) => r.error ?? '')
} }
}, },
results: this.results.sort((a, b) => a.cmdIndex - b.cmdIndex) results: sorted
}) })
} }
} }

2
src/types/index.d.ts vendored

@ -186,6 +186,8 @@ export type TPublishEventExtras = {
favoriteRelayUrls?: string[] favoriteRelayUrls?: string[]
/** When true (internal): only publish to the given URLs; do not merge outboxes or schedule outbox retry. */ /** When true (internal): only publish to the given URLs; do not merge outboxes or schedule outbox retry. */
skipOutboxRetry?: boolean skipOutboxRetry?: boolean
/** Shown in relay batch logs and an info line (e.g. "NIP-65 outbox retry — 2nd attempt"). */
publishBatchLabel?: string
} }
export type TNoteListMode = 'posts' | 'postsAndReplies' | 'you' | 'bookmarksAndHashtags' export type TNoteListMode = 'posts' | 'postsAndReplies' | 'you' | 'bookmarksAndHashtags'

Loading…
Cancel
Save