Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
a1f053fcf0
  1. 18
      src/components/Content/index.tsx
  2. 12
      src/components/NormalFeed/index.tsx
  3. 15
      src/components/Note/Highlight/index.tsx
  4. 23
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  5. 120
      src/components/NoteList/index.tsx
  6. 5
      src/hooks/useSearchProfiles.tsx
  7. 7
      src/lib/nostr-parser.tsx
  8. 8
      src/lib/url.ts
  9. 2
      src/pages/primary/NoteListPage/RelaysFeed.tsx
  10. 28
      src/pages/primary/SpellsPage/index.tsx
  11. 30
      src/services/client-events.service.ts
  12. 154
      src/services/client-replaceable-events.service.ts
  13. 119
      src/services/client.service.ts
  14. 4
      src/services/content-parser.service.ts
  15. 37
      src/services/relay-operation-log.service.ts
  16. 2
      src/types/index.d.ts

18
src/components/Content/index.tsx

@ -7,7 +7,7 @@ import { emojis, shortcodeToEmoji } from '@tiptap/extension-emoji' @@ -7,7 +7,7 @@ import { emojis, shortcodeToEmoji } from '@tiptap/extension-emoji'
import { getEmojiInfosFromEmojiTags } from '@/lib/tag'
import { cn } from '@/lib/utils'
import { getHttpUrlFromITags } from '@/lib/event'
import { cleanUrl, isImage, isMedia, isAudio, isVideo } from '@/lib/url'
import { cleanUrl, isImage, isMedia, isAudio, isVideo, isPseudoNostrHttpsUrl } from '@/lib/url'
import { TImetaInfo } from '@/types'
import { Event } from 'nostr-tools'
import { useMemo } from 'react'
@ -113,7 +113,13 @@ export default function Content({ @@ -113,7 +113,13 @@ export default function Content({
nodes.forEach((node) => {
if (node.type === 'url') {
const url = node.data
if ((url.startsWith('http://') || url.startsWith('https://')) && !isImage(url) && !isMedia(url) && !isYouTubeUrl(url)) {
if (
(url.startsWith('http://') || url.startsWith('https://')) &&
!isPseudoNostrHttpsUrl(url) &&
!isImage(url) &&
!isMedia(url) &&
!isYouTubeUrl(url)
) {
const cleaned = cleanUrl(url)
if (cleaned && !seenUrls.has(cleaned) && !(iArticleCleaned && cleaned === iArticleCleaned)) {
links.push(cleaned)
@ -165,7 +171,13 @@ export default function Content({ @@ -165,7 +171,13 @@ export default function Content({
.filter(tag => tag[0] === 'r' && tag[1])
.forEach(tag => {
const url = tag[1]
if ((url.startsWith('http://') || url.startsWith('https://')) && !isImage(url) && !isMedia(url) && !isYouTubeUrl(url)) {
if (
(url.startsWith('http://') || url.startsWith('https://')) &&
!isPseudoNostrHttpsUrl(url) &&
!isImage(url) &&
!isMedia(url) &&
!isYouTubeUrl(url)
) {
const cleaned = cleanUrl(url)
// Only include if not already in content links and not already seen in tags
if (cleaned && !contentLinkUrls.has(cleaned) && !seenUrls.has(cleaned)) {

12
src/components/NormalFeed/index.tsx

@ -18,6 +18,12 @@ const NormalFeed = forwardRef<TNoteListRef, { @@ -18,6 +18,12 @@ const NormalFeed = forwardRef<TNoteListRef, {
setSubHeader?: (node: React.ReactNode) => void
/** Shown in the subHeader row to the left of the kind filter (mobile primary feed). */
onSubHeaderRefresh?: () => void
/**
* When true with {@link mergeTimelineWhenSubRequestFiltersMatch}, relay URL list can change (e.g. favorites
* hydrate after load) without clearing rows same REQ shape, merge new stream into existing events.
*/
preserveTimelineOnSubRequestsChange?: boolean
mergeTimelineWhenSubRequestFiltersMatch?: boolean
}>(function NormalFeed(
{
subRequests,
@ -25,7 +31,9 @@ const NormalFeed = forwardRef<TNoteListRef, { @@ -25,7 +31,9 @@ const NormalFeed = forwardRef<TNoteListRef, {
relayCapabilityReady = true,
isMainFeed = false,
setSubHeader,
onSubHeaderRefresh
onSubHeaderRefresh,
preserveTimelineOnSubRequestsChange = false,
mergeTimelineWhenSubRequestFiltersMatch = false
},
ref
) {
@ -109,6 +117,8 @@ const NormalFeed = forwardRef<TNoteListRef, { @@ -109,6 +117,8 @@ const NormalFeed = forwardRef<TNoteListRef, {
hideUntrustedNotes={hideUntrustedNotes}
areAlgoRelays={areAlgoRelays}
relayCapabilityReady={relayCapabilityReady}
preserveTimelineOnSubRequestsChange={preserveTimelineOnSubRequestsChange}
mergeTimelineWhenSubRequestFiltersMatch={mergeTimelineWhenSubRequestFiltersMatch}
/>
</div>
</>

15
src/components/Note/Highlight/index.tsx

@ -7,6 +7,7 @@ import UserAvatar from '@/components/UserAvatar' @@ -7,6 +7,7 @@ import UserAvatar from '@/components/UserAvatar'
import Username from '@/components/Username'
import { useSmartNoteNavigationOptional } from '@/PageManager'
import { toNote } from '@/lib/link'
import { isPseudoNostrHttpsUrl } from '@/lib/url'
import { useFetchEvent } from '@/hooks'
import { useEffect, useState, useMemo } from 'react'
import { ExtendedKind } from '@/constants'
@ -143,8 +144,9 @@ export default function Highlight({ @@ -143,8 +144,9 @@ export default function Highlight({
continue
}
// Give 'r' tags lowest priority
// Give 'r' tags lowest priority (skip fake `https://nostr:…` r-tags — not web URLs)
if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) {
if (tag[1] && isPseudoNostrHttpsUrl(tag[1])) continue
sourceTag = tag
continue
}
@ -181,8 +183,10 @@ export default function Highlight({ @@ -181,8 +183,10 @@ export default function Highlight({
tempSourceEventId = bech32 // Store bech32 for fetching the event
tempSourceBech32 = bech32 // Store bech32 for navigation
} else if (sourceTag[0] === 'r') {
// Check if the r-tag value is a URL or Nostr address
if (sourceTag[1] && isUrlOrNostrAddress(sourceTag[1])) {
// Ignore fake `https://nostr:…` (invalid https; breaks WebPreview)
if (sourceTag[1] && isPseudoNostrHttpsUrl(sourceTag[1])) {
// no source / no quote card for this tag
} else if (sourceTag[1] && isUrlOrNostrAddress(sourceTag[1])) {
// Try to decode as Nostr address to extract author
try {
const decoded = nip19.decode(sourceTag[1])
@ -249,7 +253,10 @@ export default function Highlight({ @@ -249,7 +253,10 @@ export default function Highlight({
const hasSpecialCard = useMemo(() => {
// For r-tags that are regular URLs (http/https), they have OpenGraph cards - always use those
if (sourceTag && sourceTag[0] === 'r' && sourceTag[1]) {
if (sourceTag[1].startsWith('http://') || sourceTag[1].startsWith('https://')) {
if (
(sourceTag[1].startsWith('http://') || sourceTag[1].startsWith('https://')) &&
!isPseudoNostrHttpsUrl(sourceTag[1])
) {
return true // URLs have OpenGraph cards - use full preview
}
}

23
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -8,7 +8,15 @@ import YoutubeEmbeddedPlayer from '@/components/YoutubeEmbeddedPlayer' @@ -8,7 +8,15 @@ import YoutubeEmbeddedPlayer from '@/components/YoutubeEmbeddedPlayer'
import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata'
import { toNoteList } from '@/lib/link'
import { useMediaExtraction } from '@/hooks'
import { cleanUrl, isImage, isMedia, isVideo, isAudio, isWebsocketUrl } from '@/lib/url'
import {
cleanUrl,
isImage,
isMedia,
isVideo,
isAudio,
isWebsocketUrl,
isPseudoNostrHttpsUrl
} from '@/lib/url'
import { getHttpUrlFromITags, getImetaInfosFromEvent } from '@/lib/event'
import { canonicalizeRssArticleUrl } from '@/lib/rss-article'
import { Event, kinds } from 'nostr-tools'
@ -1848,6 +1856,18 @@ function parseMarkdownContent( @@ -1848,6 +1856,18 @@ function parseMarkdownContent(
{url}
</a>
)
} else if (isPseudoNostrHttpsUrl(url)) {
parts.push(
<a
key={`link-${patternIdx}`}
href={url}
className="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words"
target="_blank"
rel="noopener noreferrer"
>
{url}
</a>
)
} else {
parts.push(
<div key={`webpreview-${patternIdx}`} className="my-2">
@ -3350,6 +3370,7 @@ export default function MarkdownArticle({ @@ -3350,6 +3370,7 @@ export default function MarkdownArticle({
.forEach(tag => {
const url = tag[1]
if (!url.startsWith('http://') && !url.startsWith('https://')) return
if (isPseudoNostrHttpsUrl(url)) return
if (isImage(url) || isMedia(url)) return
if (isYouTubeUrl(url)) return // Exclude YouTube URLs

120
src/components/NoteList/index.tsx

@ -218,6 +218,13 @@ const NoteList = forwardRef( @@ -218,6 +218,13 @@ const NoteList = forwardRef(
* That stacks subscriptions on strict relays (e.g. 10 subs) and triggers rejections / rate limits.
*/
const timelineEstablishedCloserRef = useRef<(() => void) | null>(null)
/** Session snapshot was written to state; log once after commit (see feed-paint layout effect). */
const feedPaintSessionPendingRef = useRef(false)
/** Relay / one-shot data was written to state; log once after commit. */
const feedPaintRelayPendingRef = useRef(false)
const feedPaintRelayMetaRef = useRef<Record<string, unknown> | null>(null)
/** First live `onEvents` paint per timeline init (rows or terminal EOSE). */
const feedPaintLiveRelayDoneRef = useRef(false)
const [feedProfileBatch, setFeedProfileBatch] = useState<{
profiles: Map<string, TProfile>
@ -298,7 +305,11 @@ const NoteList = forwardRef( @@ -298,7 +305,11 @@ const NoteList = forwardRef(
return JSON.stringify([...showKinds].sort((a, b) => a - b))
}, [showKinds])
/** Session snapshot identity: same feed + kind / reply UI toggles so restore matches filtering. */
/**
* Session snapshot identity: feed + kind UI toggles that affect **REQ** / merged rows.
* Do **not** include {@link hideReplies}: Notes vs Replies only changes client-side filtering; the same
* raw timeline should restore for both tabs (otherwise Replies can show cache while Notes looks empty).
*/
const sessionSnapshotIdentityKey = useMemo(
() =>
JSON.stringify({
@ -306,10 +317,9 @@ const NoteList = forwardRef( @@ -306,10 +317,9 @@ const NoteList = forwardRef(
kinds: showKindsKey,
op: showKind1OPs,
rep: showKind1Replies,
c1111: showKind1111,
hr: hideReplies
c1111: showKind1111
}),
[timelineSubscriptionKey, showKindsKey, showKind1OPs, showKind1Replies, showKind1111, hideReplies]
[timelineSubscriptionKey, showKindsKey, showKind1OPs, showKind1Replies, showKind1111]
)
const showKindsRef = useRef(showKinds)
@ -402,6 +412,45 @@ const NoteList = forwardRef( @@ -402,6 +412,45 @@ const NoteList = forwardRef(
})
}, [events, showCount, shouldHideEvent, showKinds, showKind1OPs, showKind1Replies, showKind1111])
useLayoutEffect(() => {
if (!feedPaintSessionPendingRef.current && !feedPaintRelayPendingRef.current) return
const shorten = (s: string, max: number) =>
s.length > max ? `${s.slice(0, max)}` : s
const feedKeyShort = shorten(timelineSubscriptionKey, 200)
const snapshotKeyShort = shorten(sessionSnapshotIdentityKey, 160)
if (feedPaintSessionPendingRef.current) {
feedPaintSessionPendingRef.current = false
logger.info('[FeedPaint] Session cache committed (DOM)', {
feedKey: feedKeyShort,
snapshotKey: snapshotKeyShort,
eventCount: events.length,
filteredVisibleRows: filteredEvents.length,
pubkeySlice: pubkey ? `${pubkey.slice(0, 12)}` : undefined
})
}
if (feedPaintRelayPendingRef.current) {
feedPaintRelayPendingRef.current = false
const meta = feedPaintRelayMetaRef.current
feedPaintRelayMetaRef.current = null
logger.info('[FeedPaint] Relay/network results committed (DOM)', {
feedKey: feedKeyShort,
snapshotKey: snapshotKeyShort,
committedEventCount: events.length,
filteredVisibleRows: filteredEvents.length,
pubkeySlice: pubkey ? `${pubkey.slice(0, 12)}` : undefined,
...meta
})
}
}, [
events,
filteredEvents.length,
timelineSubscriptionKey,
sessionSnapshotIdentityKey,
pubkey
])
const filteredNewEvents = useMemo(() => {
const idSet = new Set<string>()
@ -576,6 +625,11 @@ const NoteList = forwardRef( @@ -576,6 +625,11 @@ const NoteList = forwardRef(
let effectActive = true
async function init() {
feedPaintSessionPendingRef.current = false
feedPaintRelayPendingRef.current = false
feedPaintRelayMetaRef.current = null
feedPaintLiveRelayDoneRef.current = false
// Re-subscribe with rows visible (e.g. relay URL expansion): don't flash global loading / skeleton.
const keepRowsVisible =
preserveTimelineOnSubRequestsChange &&
@ -588,6 +642,7 @@ const NoteList = forwardRef( @@ -588,6 +642,7 @@ const NoteList = forwardRef(
if (!keepExistingTimelineEvents) {
if (restoredFromSession && sessionSnap) {
feedPaintSessionPendingRef.current = true
setEvents(sessionSnap)
lastEventsForTimelinePrefetchRef.current = sessionSnap
setNewEvents([])
@ -716,11 +771,25 @@ const NoteList = forwardRef( @@ -716,11 +771,25 @@ const NoteList = forwardRef(
}
setEvents(merged)
lastEventsForTimelinePrefetchRef.current = merged
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'one_shot_fetch',
mergedCount: merged.length,
mergedWithPriorSession: !!(sessionSnap?.length && !userPulledRefresh)
}
} catch (err) {
if (oneShotDebugLabel) {
logger.warn(`[${oneShotDebugLabel}] one-shot fetch threw`, err)
}
if (effectActive) setEvents([])
if (effectActive) {
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'one_shot_fetch',
mergedCount: 0,
fetchThrew: true
}
setEvents([])
}
} finally {
if (effectActive) {
setLoading(false)
@ -761,6 +830,28 @@ const NoteList = forwardRef( @@ -761,6 +830,28 @@ const NoteList = forwardRef(
onEvents: (batch: Event[], eosed: boolean) => {
if (!effectActive) return
const narrowed = narrowLiveBatch(batch)
if (!feedPaintLiveRelayDoneRef.current) {
if (narrowed.length > 0) {
feedPaintLiveRelayDoneRef.current = true
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'live_subscription',
mode: 'rows',
narrowedInBatch: narrowed.length,
batchIncoming: batch.length,
eosed
}
} else if (eosed) {
feedPaintLiveRelayDoneRef.current = true
feedPaintRelayPendingRef.current = true
feedPaintRelayMetaRef.current = {
variant: 'live_subscription',
mode: 'eose_no_visible_rows',
batchIncoming: batch.length,
eosed
}
}
}
if (batch.length > 0) {
if (narrowed.length > 0) {
if (preserveTimelineOnSubRequestsChange) {
@ -874,6 +965,10 @@ const NoteList = forwardRef( @@ -874,6 +965,10 @@ const NoteList = forwardRef(
timelineEstablishedCloserRef.current = closer
timelineKey = result.timelineKey
setTimelineKey(timelineKey)
// subscribeTimeline resolves once shards are wired; EOSE / merge callbacks can be delayed or
// skipped on edge paths (all relays fail, strict NOTICE closes, etc.). Do not keep the global
// skeleton until the first onEvents(..., eosed) — that can freeze the feed indefinitely.
setLoading(false)
return closer
} catch (_error) {
setLoading(false)
@ -1393,9 +1488,18 @@ const NoteList = forwardRef( @@ -1393,9 +1488,18 @@ const NoteList = forwardRef(
<NoteCardLoadingSkeleton key={i} />
))}
</div>
) : events.length > 0 && (hasMore || loading) ? (
<div ref={bottomRef}>
<NoteCardLoadingSkeleton />
) : events.length > 0 && hasMore ? (
<div
ref={bottomRef}
className={
filteredEvents.length === 0 && !loading
? 'min-h-[35vh] py-4'
: loading
? 'min-h-8'
: 'min-h-4'
}
>
{loading ? <NoteCardLoadingSkeleton /> : null}
</div>
) : events.length > 0 ? (
<div className="text-center text-sm text-muted-foreground mt-2">{t('no more notes')}</div>

5
src/hooks/useSearchProfiles.tsx

@ -4,14 +4,15 @@ import { TProfile } from '@/types' @@ -4,14 +4,15 @@ import { TProfile } from '@/types'
import { useEffect, useState } from 'react'
export function useSearchProfiles(search: string, limit: number) {
const [isFetching, setIsFetching] = useState(true)
const [isFetching, setIsFetching] = useState(false)
const [error, setError] = useState<Error | null>(null)
const [profiles, setProfiles] = useState<TProfile[]>([])
useEffect(() => {
const fetchProfiles = async () => {
if (!search) {
if (!search.trim()) {
setProfiles([])
setIsFetching(false)
return
}

7
src/lib/nostr-parser.tsx

@ -7,7 +7,7 @@ import { EmbeddedMention, EmbeddedNote } from '@/components/Embedded' @@ -7,7 +7,7 @@ import { EmbeddedMention, EmbeddedNote } from '@/components/Embedded'
import ImageGallery from '@/components/ImageGallery'
import WebPreview from '@/components/WebPreview'
import { BookstrContent } from '@/components/Bookstr/BookstrContent'
import { cleanUrl, isImage, isMedia } from '@/lib/url'
import { cleanUrl, isImage, isMedia, isPseudoNostrHttpsUrl } from '@/lib/url'
import { getImetaInfosFromEvent } from '@/lib/event'
import { parsePaytoUri } from '@/lib/payto'
import PaytoLink from '@/components/PaytoLink'
@ -155,7 +155,10 @@ export function parseNostrContent(content: string, event?: Event): ParsedNostrCo @@ -155,7 +155,10 @@ export function parseNostrContent(content: string, event?: Event): ParsedNostrCo
}
const cleanedUrl = cleanUrl(url)
if (isPseudoNostrHttpsUrl(url)) {
continue
}
// Check if it's an image
if (isImage(cleanedUrl)) {
allMatches.push({

8
src/lib/url.ts

@ -103,6 +103,14 @@ export function simplifyUrl(url: string): string { @@ -103,6 +103,14 @@ export function simplifyUrl(url: string): string {
.replace(/\/$/, '')
}
/**
* Some events use r-tags like `https://nostr:nevent1…` not a real http(s) URL (the `nostr:` NIP-21
* scheme is pasted after `https://`). Those strings pass a naive `https://` check and break WebPreview.
*/
export function isPseudoNostrHttpsUrl(url: string): boolean {
return /^https?:\/\/nostr:/i.test(url.trim())
}
export function isLocalNetworkUrl(urlString: string): boolean {
try {
const url = new URL(urlString)

2
src/pages/primary/NoteListPage/RelaysFeed.tsx

@ -107,6 +107,8 @@ const RelaysFeed = forwardRef< @@ -107,6 +107,8 @@ const RelaysFeed = forwardRef<
isMainFeed
setSubHeader={setSubHeader}
onSubHeaderRefresh={onSubHeaderRefresh}
preserveTimelineOnSubRequestsChange
mergeTimelineWhenSubRequestFiltersMatch
/>
)
})

28
src/pages/primary/SpellsPage/index.tsx

@ -904,6 +904,10 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -904,6 +904,10 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickSpell = useCallback(
(spell: Event | null) => {
setSpellPickerOpen(false)
if (spell && selectedSpell?.id === spell.id && !selectedFauxSpell) {
return
}
if (spell) {
logSpellFeedPickerSelection(`kind777:${getSpellName(spell)}`, {
spellId: spell.id,
@ -913,10 +917,9 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -913,10 +917,9 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
}
setSelectedSpell(spell)
setSelectedFauxSpell(null)
setSpellPickerOpen(false)
navigatePrimary('spells')
},
[logSpellFeedPickerSelection, navigatePrimary]
[logSpellFeedPickerSelection, navigatePrimary, selectedSpell?.id, selectedFauxSpell]
)
const clearSpellSelection = useCallback(() => {
@ -929,20 +932,27 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -929,20 +932,27 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const pickFauxSpell = useCallback(
(name: FauxSpellName | null) => {
setSpellPickerOpen(false)
if (name) {
// Re-selecting the same built-in feed from the picker should not clear + resubscribe (toggle used to call
// pickFauxSpell(null) and wipe the timeline when the row was already selected).
if (selectedFauxSpell === name && selectedSpell === null) {
return
}
logSpellFeedPickerSelection(`faux:${name}`, { fauxSpell: name })
fauxSpellUrlSyncFromPickerRef.current = name
setSelectedFauxSpell(name)
setSelectedSpell(null)
navigatePrimary('spells', { spell: name })
} else {
logSpellFeedPickerSelection('(cleared faux)', { clearedFaux: true })
fauxSpellUrlSyncFromPickerRef.current = null
setSelectedFauxSpell(null)
setSelectedSpell(null)
navigatePrimary('spells')
}
setSelectedFauxSpell(name)
setSelectedSpell(null)
setSpellPickerOpen(false)
if (name) navigatePrimary('spells', { spell: name })
else navigatePrimary('spells')
},
[logSpellFeedPickerSelection, navigatePrimary]
[logSpellFeedPickerSelection, navigatePrimary, selectedFauxSpell, selectedSpell]
)
const selectedSpellIsOwn = !!(pubkey && selectedSpell && selectedSpell.pubkey === pubkey)
@ -1004,7 +1014,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1004,7 +1014,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
'hover:bg-accent focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring',
selected && 'bg-accent/50'
)}
onClick={() => pickFauxSpell(selected ? null : name)}
onClick={() => pickFauxSpell(name)}
>
<span className="flex size-4 shrink-0 items-center justify-center">
{selected ? <Check className="size-4" aria-hidden /> : null}

30
src/services/client-events.service.ts

@ -357,6 +357,36 @@ export class EventService { @@ -357,6 +357,36 @@ export class EventService {
return e
}
/**
* Pubkeys whose session-cached kind 0 matches a name / display_name / nip-05 substring (for search without IDB).
*/
searchSessionProfilePubkeys(query: string, limit: number): string[] {
const q = query.trim().toLowerCase()
if (!q || limit <= 0) return []
const out: string[] = []
for (const ev of this.sessionMetadataByPubkey.values()) {
if (shouldDropEventOnIngest(ev)) continue
if (out.length >= limit) break
try {
const o = JSON.parse(ev.content) as Record<string, unknown>
const blob = [
o.display_name,
o.name,
typeof o.nip05 === 'string' ? o.nip05 : ''
]
.map((x) => (typeof x === 'string' ? x : ''))
.join(' ')
.toLowerCase()
if (blob.includes(q)) {
out.push(ev.pubkey.toLowerCase())
}
} catch {
/* invalid JSON */
}
}
return out
}
/**
* Get events from session cache matching search
*/

154
src/services/client-replaceable-events.service.ts

@ -149,6 +149,17 @@ export class ReplaceableEventService { @@ -149,6 +149,17 @@ export class ReplaceableEventService {
})
try {
if (kind === kinds.Metadata && !d) {
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
return sessionEv
}
}
// If we have containing event relays and this is a profile, we need to use a custom relay list
// Otherwise, use DataLoader (which batches IndexedDB checks and network fetches)
let event: NEvent | undefined
@ -292,28 +303,39 @@ export class ReplaceableEventService { @@ -292,28 +303,39 @@ export class ReplaceableEventService {
* Checks IndexedDB first, then network
*/
async fetchReplaceableEventsFromProfileFetchRelays(pubkeys: string[], kind: number): Promise<(NEvent | undefined)[]> {
const results: (NEvent | undefined)[] = []
const misses: { pubkey: string; index: number }[] = []
// Check IndexedDB in parallel
const indexedDbPromises = pubkeys.map(async (pubkey, index) => {
try {
const event = await indexedDb.getReplaceableEvent(pubkey, kind)
if (event) {
results[index] = event
return { index, event }
const results: (NEvent | undefined)[] = new Array(pubkeys.length)
const needsIndexedDb: { pubkey: string; index: number }[] = []
for (let index = 0; index < pubkeys.length; index++) {
const pubkey = pubkeys[index]
if (kind === kinds.Metadata) {
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
results[index] = sessionEv
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
continue
}
} catch {
// Ignore errors
}
misses.push({ pubkey, index })
return null
})
await Promise.allSettled(indexedDbPromises)
// Find what's still missing and fetch from network
const stillMissing = misses.filter(({ index }) => results[index] === undefined)
needsIndexedDb.push({ pubkey, index })
}
await Promise.allSettled(
needsIndexedDb.map(async ({ pubkey, index }) => {
try {
const event = await indexedDb.getReplaceableEvent(pubkey, kind)
if (event) {
results[index] = event
}
} catch {
/* ignore */
}
})
)
const stillMissing = needsIndexedDb.filter(({ index }) => results[index] === undefined)
if (stillMissing.length > 0) {
const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany(
stillMissing.map(({ pubkey }) => ({ pubkey, kind }))
@ -327,7 +349,7 @@ export class ReplaceableEventService { @@ -327,7 +349,7 @@ export class ReplaceableEventService {
}
})
}
return results
}
@ -367,45 +389,53 @@ export class ReplaceableEventService { @@ -367,45 +389,53 @@ export class ReplaceableEventService {
})
}
// Step 1: Batch check IndexedDB for all requested events
const groups = new Map<number, string[]>()
params.forEach(({ pubkey, kind }) => {
if (!groups.has(kind)) {
groups.set(kind, [])
}
groups.get(kind)!.push(pubkey)
})
const results: (NEvent | null)[] = new Array(params.length).fill(null)
const eventsMap = new Map<string, NEvent>()
for (let i = 0; i < params.length; i++) {
const { pubkey, kind } = params[i]
if (kind !== kinds.Metadata) continue
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
results[i] = sessionEv
eventsMap.set(`${pubkey}:${kind}`, sessionEv)
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind },
Promise.resolve(sessionEv)
)
}
}
const idbByKind = new Map<number, { pubkey: string; index: number }[]>()
params.forEach(({ pubkey, kind }, index) => {
if (results[index] != null) return
if (!idbByKind.has(kind)) {
idbByKind.set(kind, [])
}
idbByKind.get(kind)!.push({ pubkey, index })
})
const missingParams: { pubkey: string; kind: number; index: number }[] = []
// Batch IndexedDB checks by kind
await Promise.allSettled(
Array.from(groups.entries()).map(async ([kind, pubkeys]) => {
Array.from(idbByKind.entries()).map(async ([kind, items]) => {
const pubkeys = items.map((x) => x.pubkey)
try {
// Use batched IndexedDB query
const indexedDbEvents = await indexedDb.getManyReplaceableEvents(pubkeys, kind)
// Only log at debug level to reduce noise during rapid scrolling
logger.debug('[ReplaceableEventService] IndexedDB batch query completed', {
kind,
pubkeyCount: pubkeys.length,
foundCount: indexedDbEvents.filter(e => e !== null && e !== undefined).length
foundCount: indexedDbEvents.filter((e) => e !== null && e !== undefined).length
})
// Map IndexedDB results back to params
pubkeys.forEach((pubkey, idx) => {
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind)
if (paramIndex >= 0) {
const event = indexedDbEvents[idx]
if (event && event !== null) {
results[paramIndex] = event
eventsMap.set(`${pubkey}:${kind}`, event)
// Refresh in background
this.refreshInBackground(pubkey, kind).catch(() => {})
} else {
missingParams.push({ pubkey, kind, index: paramIndex })
}
items.forEach(({ pubkey, index }, idx) => {
const event = indexedDbEvents[idx]
if (event && event !== null) {
results[index] = event
eventsMap.set(`${pubkey}:${kind}`, event)
this.refreshInBackground(pubkey, kind).catch(() => {})
} else {
missingParams.push({ pubkey, kind, index })
}
})
} catch (error) {
@ -413,20 +443,16 @@ export class ReplaceableEventService { @@ -413,20 +443,16 @@ export class ReplaceableEventService {
kind,
error: error instanceof Error ? error.message : String(error)
})
// If IndexedDB fails, mark all as missing
pubkeys.forEach((pubkey) => {
const paramIndex = params.findIndex(p => p.pubkey === pubkey && p.kind === kind)
if (paramIndex >= 0) {
missingParams.push({ pubkey, kind, index: paramIndex })
}
})
for (const { pubkey, index } of items) {
missingParams.push({ pubkey, kind, index })
}
}
})
)
// Step 2: Only fetch missing events from network
if (missingParams.length === 0) {
logger.debug('[ReplaceableEventService] All events found in IndexedDB, skipping network fetch', {
logger.debug('[ReplaceableEventService] All events resolved (session + IndexedDB), skipping network fetch', {
totalCount: params.length
})
return results
@ -794,6 +820,18 @@ export class ReplaceableEventService { @@ -794,6 +820,18 @@ export class ReplaceableEventService {
logger.error('[ReplaceableEventService] Invalid id - no pubkey extracted', { id })
throw new Error('Invalid id')
}
if (!_skipCache) {
const sessionEv = client.eventService.getSessionMetadataForPubkey(pubkey)
if (sessionEv && !shouldDropEventOnIngest(sessionEv)) {
this.replaceableEventFromBigRelaysDataloader.prime(
{ pubkey, kind: kinds.Metadata },
Promise.resolve(sessionEv)
)
await this.indexProfile(sessionEv)
return sessionEv
}
}
// CRITICAL: Always use relay hints from bech32 addresses (nprofile, naddr, nevent) when available
// Relay hints should have highest priority and always be included

119
src/services/client.service.ts

@ -89,6 +89,18 @@ function summarizeFiltersForRelayLog(filters: Filter[]): Record<string, unknown> @@ -89,6 +89,18 @@ function summarizeFiltersForRelayLog(filters: Filter[]): Record<string, unknown>
if (f['#t']?.length) out.tTagCount = f['#t'].length
return out
}
/** Hostname (+ path when not "/") for readable publish / retry console lines. */
function relayHostForUserLog(url: string): string {
const n = normalizeUrl(url) || url
try {
const u = new URL(n.replace(/^wss:/i, 'https:').replace(/^ws:/i, 'http:'))
const path = u.pathname && u.pathname !== '/' ? u.pathname.replace(/\/$/, '') : ''
return path ? `${u.host}${path}` : u.host
} catch {
return n
}
}
import { EventService } from './client-events.service'
import { ReplaceableEventService } from './client-replaceable-events.service'
import { MacroService, createBookstrService } from './client-macro.service'
@ -344,14 +356,50 @@ class ClientService extends EventTarget { @@ -344,14 +356,50 @@ class ClientService extends EventTarget {
}
const failedOutboxes = userOutboxUrls.filter((u) => !hadSuccess.has(norm(u)))
if (failedOutboxes.length === 0) return
logger.info('[PublishEvent] Outbox relay(s) failed; retrying once after delay', {
eventId: event.id?.slice(0, 8),
kind: event.kind,
failedCount: failedOutboxes.length,
delayMs: OUTBOX_PUBLISH_RETRY_DELAY_MS
})
const statusHint = (url: string): string => {
const n = norm(url)
const forUrl = relayStatuses.filter((r) => norm(r.url) === n)
const failMsgs = forUrl.filter((r) => !r.success).map((r) => r.error).filter(Boolean)
if (failMsgs.length) return failMsgs[failMsgs.length - 1]!
return 'No OK from this relay (timeout, connection failed, or still pending when the first wave ended)'
}
const okOutboxes = userOutboxUrls.filter((u) => hadSuccess.has(norm(u)))
const eventHint =
event.id && /^[0-9a-f]{64}$/i.test(event.id)
? `note id ${event.id.slice(0, 12)}… (kind ${event.kind})`
: `kind ${event.kind} note`
const failedSummary = failedOutboxes
.map((u) => `${relayHostForUserLog(u)}${statusHint(u)}`)
.join('\n')
const okSummary =
okOutboxes.length > 0
? okOutboxes.map((u) => `${relayHostForUserLog(u)}`).join('\n')
: ' (none)'
logger.info(
`[Publish] NIP-65 write relays (your outboxes): ${failedOutboxes.length} did not confirm ${eventHint}. ` +
`Retrying only those in ${OUTBOX_PUBLISH_RETRY_DELAY_MS / 1000}s (one more try each).\n` +
`Not OK:\n${failedSummary}\n` +
`Confirmed on first publish wave:\n${okSummary}`,
{
delaySeconds: OUTBOX_PUBLISH_RETRY_DELAY_MS / 1000,
failed: failedOutboxes.map((url) => ({
url,
host: relayHostForUserLog(url),
reason: statusHint(url)
})),
confirmed: okOutboxes.map((url) => ({ url, host: relayHostForUserLog(url) }))
}
)
await new Promise<void>((r) => setTimeout(r, OUTBOX_PUBLISH_RETRY_DELAY_MS))
await this.publishEvent(failedOutboxes, event, { skipOutboxRetry: true })
await this.publishEvent(failedOutboxes, event, {
skipOutboxRetry: true,
publishBatchLabel: 'NIP-65 outbox retry — 2nd attempt (failed write relays only)'
})
}
private async prioritizePublishUrlList(
@ -717,8 +765,14 @@ class ClientService extends EventTarget { @@ -717,8 +765,14 @@ class ClientService extends EventTarget {
/** One failed publish or subscribe connection per normalized URL (accumulates until {@link SESSION_RELAY_FAILURE_STRIKE_THRESHOLD}). */
/** NOTICE "failed to fetch events" (relay DB/backend) — same session strike as a failed connection. */
private recordRelayNoticeFetchFailure(url: string, noticeMessage: string) {
const n = normalizeUrl(url) || url
if (!n) return
const prev = this.publishStrikeCount.get(n) ?? 0
if (prev >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD) {
return
}
logger.info('[Relay] NOTICE failed-fetch → session strike', {
url,
url: n,
noticeSnippet: noticeMessage.slice(0, 220)
})
this.recordSessionRelayFailure(url)
@ -922,9 +976,21 @@ class ClientService extends EventTarget { @@ -922,9 +976,21 @@ class ClientService extends EventTarget {
} else {
logger.debug('[PublishEvent] Unique relays', { count: uniqueRelayUrls.length, relays: uniqueRelayUrls.slice(0, 5) })
}
const publishBatchSource = publishExtras?.publishBatchLabel
? `publish — ${publishExtras.publishBatchLabel}`
: 'ClientService.publishEvent'
if (publishExtras?.publishBatchLabel) {
const idBit =
event.id && /^[0-9a-f]{64}$/i.test(event.id) ? `${event.id.slice(0, 12)}` : '(unsigned or no id)'
logger.info(`[Publish] ${publishExtras.publishBatchLabel}`, {
readable: `Kind ${event.kind} note ${idBit}${uniqueRelayUrls.length} relay(s): ${uniqueRelayUrls.map(relayHostForUserLog).join(', ')}`,
targets: uniqueRelayUrls.map((url) => ({ where: relayHostForUserLog(url), url }))
})
}
const relayStatuses: { url: string; success: boolean; error?: string }[] = []
const publishOpBatch = new RelayPublishOpBatch('ClientService.publishEvent', event.id, uniqueRelayUrls)
const publishOpBatch = new RelayPublishOpBatch(publishBatchSource, event.id, uniqueRelayUrls)
publishOpBatch.logBegin()
// eslint-disable-next-line @typescript-eslint/no-this-alias
@ -1166,9 +1232,14 @@ class ClientService extends EventTarget { @@ -1166,9 +1232,14 @@ class ClientService extends EventTarget {
void client
.retryFailedOutboxPublishesOnce(event, userOutboxUrls, relayStatuses)
.catch((err) =>
logger.warn('[PublishEvent] Outbox retry pass failed', {
error: err instanceof Error ? err.message : String(err)
})
logger.warn(
'[Publish] NIP-65 outbox retry (2nd attempt) failed — check the network or relay logs above',
{
error: err instanceof Error ? err.message : String(err),
eventKind: event.kind,
eventId: event.id && /^[0-9a-f]{64}$/i.test(event.id) ? `${event.id.slice(0, 16)}` : event.id
}
)
)
})
}
@ -2223,8 +2294,26 @@ class ClientService extends EventTarget { @@ -2223,8 +2294,26 @@ class ClientService extends EventTarget {
}
async searchNpubsFromLocal(query: string, limit: number = 100) {
const result = await this.userIndex.searchAsync(query, { limit })
return result.map((pubkey) => pubkeyToNpub(pubkey as string)).filter(Boolean) as string[]
const seen = new Set<string>()
const out: string[] = []
const pushNpub = (npub: string) => {
if (!npub || seen.has(npub) || out.length >= limit) return
seen.add(npub)
out.push(npub)
}
for (const pk of this.eventService.searchSessionProfilePubkeys(query, limit)) {
const npub = pubkeyToNpub(pk)
if (npub) pushNpub(npub)
}
if (out.length >= limit) return out
const remaining = limit - out.length
const result = await this.userIndex.searchAsync(query, { limit: remaining * 4 })
for (const pubkey of result) {
const npub = pubkeyToNpub(pubkey as string)
if (npub) pushNpub(npub)
if (out.length >= limit) break
}
return out
}
/**

4
src/services/content-parser.service.ts

@ -9,6 +9,7 @@ import { getImetaInfosFromEvent } from '@/lib/event' @@ -9,6 +9,7 @@ import { getImetaInfosFromEvent } from '@/lib/event'
import { URL_REGEX, ExtendedKind } from '@/constants'
import { TImetaInfo } from '@/types'
import logger from '@/lib/logger'
import { isPseudoNostrHttpsUrl } from '@/lib/url'
export interface ParsedContent {
html: string
@ -982,6 +983,7 @@ class ContentParserService { @@ -982,6 +983,7 @@ class ContentParserService {
// Give 'r' tags lowest priority
if (tag[0] === 'r' && (!sourceTag || sourceTag[0] === 'r')) {
if (tag[1] && isPseudoNostrHttpsUrl(tag[1])) continue
sourceTag = tag
continue
}
@ -1009,7 +1011,7 @@ class ContentParserService { @@ -1009,7 +1011,7 @@ class ContentParserService {
relays: relay ? [relay] : []
})
})
} else if (sourceTag[0] === 'r') {
} else if (sourceTag[0] === 'r' && sourceTag[1] && !isPseudoNostrHttpsUrl(sourceTag[1])) {
sources.push({
type: 'url',
value: sourceTag[1],

37
src/services/relay-operation-log.service.ts

@ -1,8 +1,20 @@ @@ -1,8 +1,20 @@
import logger from '@/lib/logger'
import { normalizeUrl } from '@/lib/url'
import type { Filter } from 'nostr-tools'
let batchSeq = 0
function relayHostForPublishLog(url: string): string {
const n = normalizeUrl(url) || url
try {
const u = new URL(n.replace(/^wss:/i, 'https:').replace(/^ws:/i, 'http:'))
const path = u.pathname && u.pathname !== '/' ? u.pathname.replace(/\/$/, '') : ''
return path ? `${u.host}${path}` : u.host
} catch {
return n
}
}
function nextBatchId(prefix: string): string {
return `${prefix}-${Date.now().toString(36)}-${(++batchSeq).toString(36)}`
}
@ -214,6 +226,20 @@ export class RelayPublishOpBatch { @@ -214,6 +226,20 @@ export class RelayPublishOpBatch {
)
const ok = this.results.filter((r) => r.ok)
const fail = this.results.filter((r) => !r.ok)
const sorted = this.results.sort((a, b) => a.cmdIndex - b.cmdIndex)
const readableSummary =
fail.length === 0
? `All ${ok.length} relay(s) accepted the publish.`
: [
`${fail.length} relay(s) failed:`,
...fail.map(
(r) =>
`${relayHostForPublishLog(r.relayUrl)}${(r.error && String(r.error).trim()) || 'rejected or error'}`
),
ok.length > 0 ? `${ok.length} relay(s) OK: ${ok.map((r) => relayHostForPublishLog(r.relayUrl)).join(', ')}` : ''
]
.filter(Boolean)
.join('\n')
logger.info('[RelayOp] publish_batch_end', {
batchId: this.batchId,
source: this.source,
@ -222,16 +248,23 @@ export class RelayPublishOpBatch { @@ -222,16 +248,23 @@ export class RelayPublishOpBatch {
elapsedMs,
okCount: ok.length,
failCount: fail.length,
readableSummary,
byState: {
ok: { count: ok.length, relays: ok.map((r) => r.relayUrl), cmdIndices: ok.map((r) => r.cmdIndex) },
ok: {
count: ok.length,
relays: ok.map((r) => r.relayUrl),
hosts: ok.map((r) => relayHostForPublishLog(r.relayUrl)),
cmdIndices: ok.map((r) => r.cmdIndex)
},
fail: {
count: fail.length,
relays: fail.map((r) => r.relayUrl),
hosts: fail.map((r) => relayHostForPublishLog(r.relayUrl)),
cmdIndices: fail.map((r) => r.cmdIndex),
errors: fail.map((r) => r.error ?? '')
}
},
results: this.results.sort((a, b) => a.cmdIndex - b.cmdIndex)
results: sorted
})
}
}

2
src/types/index.d.ts vendored

@ -186,6 +186,8 @@ export type TPublishEventExtras = { @@ -186,6 +186,8 @@ export type TPublishEventExtras = {
favoriteRelayUrls?: string[]
/** When true (internal): only publish to the given URLs; do not merge outboxes or schedule outbox retry. */
skipOutboxRetry?: boolean
/** Shown in relay batch logs and an info line (e.g. "NIP-65 outbox retry — 2nd attempt"). */
publishBatchLabel?: string
}
export type TNoteListMode = 'posts' | 'postsAndReplies' | 'you' | 'bookmarksAndHashtags'

Loading…
Cancel
Save