Browse Source

more bug-fixing

imwald
Silberengel 1 month ago
parent
commit
4729ecb5c1
  1. 156
      src/components/NoteList/index.tsx
  2. 6
      src/i18n/locales/de.ts
  3. 6
      src/i18n/locales/en.ts
  4. 18
      src/pages/primary/SpellsPage/fauxSpellFeeds.ts
  5. 11
      src/pages/primary/SpellsPage/index.tsx
  6. 26
      src/services/client-query.service.ts
  7. 68
      src/services/client.service.ts

156
src/components/NoteList/index.tsx

@ -24,9 +24,9 @@ import { useNostr } from '@/providers/NostrProvider' @@ -24,9 +24,9 @@ import { useNostr } from '@/providers/NostrProvider'
import { useUserTrust } from '@/contexts/user-trust-context'
import { useZap } from '@/providers/ZapProvider'
import client from '@/services/client.service'
import { TFeedSubRequest } from '@/types'
import type { TFeedSubRequest, TSubRequestFilter } from '@/types'
import dayjs from 'dayjs'
import { Event, kinds } from 'nostr-tools'
import { type Event, type Filter, kinds } from 'nostr-tools'
import { decode } from 'nostr-tools/nip19'
import {
forwardRef,
@ -66,6 +66,16 @@ function mergeEventBatchesById(prev: Event[], incoming: Event[], cap: number): E @@ -66,6 +66,16 @@ function mergeEventBatchesById(prev: Event[], incoming: Event[], cap: number): E
.slice(0, cap)
}
/** When omitting `kinds` from a live REQ, require another scope so we never subscribe to a whole relay. */
function timelineFilterHasNonKindScope(f: Filter): boolean {
return (
(Array.isArray(f.authors) && f.authors.length > 0) ||
(Array.isArray(f.ids) && f.ids.length > 0) ||
(Array.isArray(f['#p']) && f['#p']!.length > 0) ||
(Array.isArray(f['#e']) && f['#e']!.length > 0)
)
}
const NoteList = forwardRef(
(
{
@ -103,6 +113,16 @@ const NoteList = forwardRef( @@ -103,6 +113,16 @@ const NoteList = forwardRef(
spellFeedInstrumentToken,
/** Spells page: fired once when the filtered list first has rows after a picker change. */
onSpellFeedFirstPaint,
/**
* After this many ms with no forced completion, loading is cleared so empty state can show (default 15s).
* Use a larger value for slow feeds (e.g. notifications `#p` across many relays).
*/
timelineLoadingSafetyTimeoutMs,
/**
* With {@link useFilterAsIs}: omit relay `kinds` when the subrequest filter has none, and narrow
* incoming events to {@link showKinds} before merging (so caps are not filled by unrelated kinds).
*/
clientSideKindFilter = false,
/**
* When true, load events with parallel {@link client.fetchEvents} per subRequest instead of
* {@link client.subscribeTimeline}. No live stream or `loadMore` timeline pagination; use for faux spells
@ -146,6 +166,8 @@ const NoteList = forwardRef( @@ -146,6 +166,8 @@ const NoteList = forwardRef(
spellFetchTimeoutMs?: number
spellFeedInstrumentToken?: number
onSpellFeedFirstPaint?: (detail: { eventCount: number; firstEventId: string }) => void
timelineLoadingSafetyTimeoutMs?: number
clientSideKindFilter?: boolean
oneShotFetch?: boolean
oneShotMergedCap?: number
revealBatchSize?: number
@ -260,6 +282,13 @@ const NoteList = forwardRef( @@ -260,6 +282,13 @@ const NoteList = forwardRef(
return JSON.stringify([...showKinds].sort((a, b) => a - b))
}, [showKinds])
const showKindsRef = useRef(showKinds)
showKindsRef.current = showKinds
const useFilterAsIsRef = useRef(useFilterAsIs)
useFilterAsIsRef.current = useFilterAsIs
const clientSideKindFilterRef = useRef(clientSideKindFilter)
clientSideKindFilterRef.current = clientSideKindFilter
const shouldHideEvent = useCallback(
(evt: Event) => {
const pinnedEventHexIdSet = new Set()
@ -525,35 +554,43 @@ const NoteList = forwardRef( @@ -525,35 +554,43 @@ const NoteList = forwardRef(
setHasMore(true)
consecutiveEmptyRef.current = 0 // Reset counter on refresh
const mappedSubRequests = subRequestsRef.current.map(({ urls, filter }) => {
// CRITICAL: Always ensure filter has kinds - relays require this to return events
const defaultKinds = showKinds.length > 0 ? showKinds : [kinds.ShortTextNote]
const finalFilter = useFilterAsIs
? {
...filter,
// If filter doesn't have kinds, add them (required for relay queries)
kinds: filter.kinds && filter.kinds.length > 0 ? filter.kinds : defaultKinds,
limit: filter.limit ?? (areAlgoRelays ? ALGO_LIMIT : LIMIT)
const mappedSubRequests = subRequestsRef.current.map(({ urls, filter }) => {
const baseLimit = filter.limit ?? (areAlgoRelays ? ALGO_LIMIT : LIMIT)
if (useFilterAsIs) {
const finalFilter: Filter = { ...filter, limit: baseLimit }
const hasKindsInRequest = Array.isArray(filter.kinds) && filter.kinds.length > 0
if (clientSideKindFilter) {
if (hasKindsInRequest) {
finalFilter.kinds = filter.kinds
} else {
delete finalFilter.kinds
}
} else if (hasKindsInRequest) {
finalFilter.kinds = filter.kinds
} else {
finalFilter.kinds = defaultKinds
}
: {
return { urls, filter: finalFilter }
}
return {
urls,
filter: {
...filter,
// If showKinds is empty, default to kind 1 (ShortTextNote) only
kinds: defaultKinds,
limit: areAlgoRelays ? ALGO_LIMIT : LIMIT
}
// CRITICAL: Validate filter has kinds before subscribing
if (!finalFilter.kinds || finalFilter.kinds.length === 0) {
finalFilter.kinds = [kinds.ShortTextNote]
}
return { urls, filter: finalFilter }
})
// CRITICAL: Validate all filters have kinds before subscribing
const invalidFilters = mappedSubRequests.filter(({ filter }) => !filter.kinds || filter.kinds.length === 0)
const filterMissingKinds = (f: Filter) => !f.kinds || f.kinds.length === 0
const invalidFilters = mappedSubRequests.filter(({ filter: f }) => {
if (!filterMissingKinds(f)) return false
if (useFilterAsIs && clientSideKindFilter && timelineFilterHasNonKindScope(f)) return false
return true
})
if (invalidFilters.length > 0) {
// Don't subscribe with invalid filters - this would return no events
if (oneShotDebugLabel) {
logger.warn(`[${oneShotDebugLabel}] abort: filter missing kinds`, {
subRequestsKey: timelineSubscriptionKey
@ -561,10 +598,14 @@ const NoteList = forwardRef( @@ -561,10 +598,14 @@ const NoteList = forwardRef(
}
setLoading(false)
setEvents([])
// Return a no-op closer function to satisfy the cleanup function
return () => {}
}
const narrowLiveBatch = (evs: Event[]) => {
if (!useFilterAsIs || !clientSideKindFilter) return evs
return evs.filter((e) => showKinds.includes(e.kind))
}
if (oneShotFetch) {
if (!keepExistingTimelineEvents) {
setEvents([])
@ -595,9 +636,12 @@ const NoteList = forwardRef( @@ -595,9 +636,12 @@ const NoteList = forwardRef(
}
}
const cap = oneShotMergedCap ?? ONE_SHOT_MERGED_CAP
const merged = [...byId.values()]
let merged = [...byId.values()]
.sort((a, b) => b.created_at - a.created_at)
.slice(0, cap)
if (useFilterAsIs && clientSideKindFilter) {
merged = merged.filter((e) => showKinds.includes(e.kind))
}
if (oneShotDebugLabel) {
const f0 = mappedSubRequests[0]?.filter
const batchEventCounts = batches.map((b) => b.length)
@ -662,20 +706,22 @@ const NoteList = forwardRef( @@ -662,20 +706,22 @@ const NoteList = forwardRef(
const eventCap = areAlgoRelays ? ALGO_LIMIT : LIMIT
timelineSubscribePromise = client.subscribeTimeline(
mappedSubRequests,
mappedSubRequests as Array<{ urls: string[]; filter: TSubRequestFilter }>,
{
onEvents: (batch: Event[], eosed: boolean) => {
if (!effectActive) return
const narrowed = narrowLiveBatch(batch)
if (batch.length > 0) {
if (narrowed.length > 0) {
if (preserveTimelineOnSubRequestsChange) {
setEvents((prev) => {
const next = mergeEventBatchesById(prev, batch, eventCap)
const next = mergeEventBatchesById(prev, narrowed, eventCap)
lastEventsForTimelinePrefetchRef.current = next
return next
})
} else {
setEvents(batch)
lastEventsForTimelinePrefetchRef.current = batch
setEvents(narrowed)
lastEventsForTimelinePrefetchRef.current = narrowed
}
// Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
setLoading(false)
@ -715,6 +761,12 @@ const NoteList = forwardRef( @@ -715,6 +761,12 @@ const NoteList = forwardRef(
}
setLoading(false)
}
} else if (eosed) {
if (!preserveTimelineOnSubRequestsChange) {
setEvents([])
}
setLoading(false)
}
if (areAlgoRelays) {
// Algorithm feeds typically return all results at once
@ -738,6 +790,7 @@ const NoteList = forwardRef( @@ -738,6 +790,7 @@ const NoteList = forwardRef(
onNew: (event: Event) => {
if (!effectActive) return
if (!useFilterAsIs && !showKinds.includes(event.kind)) return
if (clientSideKindFilter && useFilterAsIs && !showKinds.includes(event.kind)) return
if (event.kind === kinds.ShortTextNote) {
const isReply = isReplyNoteEvent(event)
if (isReply && !showKind1Replies) return
@ -815,7 +868,8 @@ const NoteList = forwardRef( @@ -815,7 +868,8 @@ const NoteList = forwardRef(
oneShotDebugLabel,
oneShotGlobalTimeoutMs,
oneShotEoseTimeoutMs,
oneShotFirstRelayGraceMs
oneShotFirstRelayGraceMs,
clientSideKindFilter
])
const oneShotDebugPrevLoadingRef = useRef(false)
@ -855,6 +909,8 @@ const NoteList = forwardRef( @@ -855,6 +909,8 @@ const NoteList = forwardRef(
eventsRef.current = events
}, [events])
const loadingSafetyMs = timelineLoadingSafetyTimeoutMs ?? 15_000
useEffect(() => {
if (!subRequestsRef.current.length) return
let cancelled = false
@ -866,12 +922,12 @@ const NoteList = forwardRef( @@ -866,12 +922,12 @@ const NoteList = forwardRef(
if (eventsRef.current.length === 0) {
setHasMore(false)
}
}, 15_000)
}, loadingSafetyMs)
return () => {
cancelled = true
clearTimeout(timer)
}
}, [timelineSubscriptionKey, refreshCount])
}, [timelineSubscriptionKey, refreshCount, loadingSafetyMs])
// Use refs to avoid dependency issues and ensure latest values in async callbacks
const showCountRef = useRef(showCount)
@ -995,10 +1051,42 @@ const NoteList = forwardRef( @@ -995,10 +1051,42 @@ const NoteList = forwardRef(
return
}
// Reset consecutive empty counter on success
let fetchBatch = newEvents
let toAppend =
useFilterAsIsRef.current && clientSideKindFilterRef.current
? fetchBatch.filter((e) => showKindsRef.current.includes(e.kind))
: fetchBatch
if (
useFilterAsIsRef.current &&
clientSideKindFilterRef.current &&
toAppend.length === 0 &&
fetchBatch.length > 0
) {
let skipUntil = Math.min(...fetchBatch.map((e) => e.created_at)) - 1
for (let depth = 0; depth < 8 && toAppend.length === 0; depth++) {
fetchBatch = await client.loadMoreTimeline(latestTimelineKey, skipUntil, LIMIT)
if (fetchBatch.length === 0) break
toAppend = fetchBatch.filter((e) => showKindsRef.current.includes(e.kind))
if (toAppend.length > 0) break
skipUntil = Math.min(...fetchBatch.map((e) => e.created_at)) - 1
}
}
if (toAppend.length === 0) {
consecutiveEmptyRef.current += 1
const eventCount = latestEvents.length
const shouldStop = consecutiveEmptyRef.current >= (eventCount < 50 ? 30 : 15)
if (shouldStop) {
setHasMore(false)
}
setLoading(false)
return
}
consecutiveEmptyRef.current = 0
setEvents((oldEvents) => [...oldEvents, ...newEvents])
setEvents((oldEvents) => [...oldEvents, ...toAppend])
// After appending, the bottom sentinel may have moved below the fold. Re-check after
// paint: if it's still in/near view, trigger loadMore again so user doesn't have to scroll.
@ -1018,7 +1106,7 @@ const NoteList = forwardRef( @@ -1018,7 +1106,7 @@ const NoteList = forwardRef(
// CRITICAL: Prefetch profiles for newly loaded events (optimized to reduce stuttering)
// Only prefetch if we're not currently loading to avoid blocking scroll
if (newEvents.length > 0 && !loadingRef.current) {
if (toAppend.length > 0 && !loadingRef.current) {
// Use requestIdleCallback if available, otherwise setTimeout with longer delay
const schedulePrefetch = (callback: () => void) => {
if (typeof requestIdleCallback !== 'undefined') {
@ -1029,7 +1117,7 @@ const NoteList = forwardRef( @@ -1029,7 +1117,7 @@ const NoteList = forwardRef(
}
schedulePrefetch(() => {
const { hexIds, nip19Pointers } = mergePrefetchTargetsFromEvents(newEvents.slice(0, 30))
const { hexIds, nip19Pointers } = mergePrefetchTargetsFromEvents(toAppend.slice(0, 30))
const hexIdsToFetch = hexIds.filter((id) => !prefetchedEventIdsRef.current.has(id))
const nip19ToFetch = nip19Pointers.filter((p) => !prefetchedEventIdsRef.current.has(p))
if (hexIdsToFetch.length === 0 && nip19ToFetch.length === 0) return

6
src/i18n/locales/de.ts

@ -492,13 +492,13 @@ export default { @@ -492,13 +492,13 @@ export default {
relayType_randomly_selected: 'Zufällig (optional)',
'Session relays': 'Session-Relays',
'Session relays tab description':
'Relay-Logik für diese Session: funktionierende und gestrichene Preset-Relays sowie bewertete Zufallsrelays (bevorzugt schnellere, bewährte Relays beim Hinzufügen von Zufallsrelays).',
'Relay-Logik für diese Session: funktionierende und gestrichene Preset-Relays sowie bewertete Zufallsrelays. Gestrichene Relays werden für Lesen und Schreiben bis zum Neuladen der App übersprungen.',
'Session relays preset working': 'Funktionierende Preset-Relays',
'Session relays preset working hint':
'Preset-Relays (App-Standard), die in dieser Session keine 3 Publish-Fehler erreicht haben.',
'Preset-Relays (App-Standard), die die Session-Fehlerschwelle (2 Fehler) noch nicht erreicht haben.',
'Session relays preset striked': 'Gestrichene Preset-Relays',
'Session relays preset striked hint':
'Preset-Relays mit 3 Publish-Fehlern in dieser Session; werden für den Rest der Session übersprungen.',
'Preset-Relays mit 2 Verbindungs- oder Publish-Fehlern in dieser Session; werden für Lesen und Schreiben bis zum Neuladen übersprungen.',
'Session relays scored random': 'Bewertete Zufallsrelays',
'Session relays scored random hint':
'Relays, die in dieser Session mindestens ein Publish angenommen haben; werden beim Auswählen von Zufallsrelays bevorzugt. Sortiert nach durchschnittlicher Latenz.',

6
src/i18n/locales/en.ts

@ -483,13 +483,13 @@ export default { @@ -483,13 +483,13 @@ export default {
relayType_randomly_selected: 'Random (optional)',
'Session relays': 'Session relays',
'Session relays tab description':
'Relay logic for this session: working and striked preset relays, and scored random relays (used to prefer faster, proven relays when adding random relays to publish).',
'Relay logic for this session: working and striked preset relays, and scored random relays (used to prefer faster, proven relays when adding random relays to publish). Striked relays are skipped for reads and publishes until reload.',
'Session relays preset working': 'Working preset relays',
'Session relays preset working hint':
'Preset relays (from app defaults) that have not reached 3 publish failures this session.',
'Preset relays (from app defaults) that have not reached the session failure threshold (2 failures).',
'Session relays preset striked': 'Striked preset relays',
'Session relays preset striked hint':
'Preset relays that have reached 3 publish failures this session and are skipped for the rest of the session.',
'Preset relays that have reached 2 connection or publish failures this session and are skipped for reads and writes until you reload the app.',
'Session relays scored random': 'Scored random relays',
'Session relays scored random hint':
'Relays that have accepted at least one publish this session; used to prefer faster relays when picking random relays. Sorted by average latency.',

18
src/pages/primary/SpellsPage/fauxSpellFeeds.ts

@ -49,6 +49,9 @@ export function applyFauxSpellCapsToSubRequests(requests: TFeedSubRequest[]): TF @@ -49,6 +49,9 @@ export function applyFauxSpellCapsToSubRequests(requests: TFeedSubRequest[]): TF
/**
* Mention/notification-shaped kinds only (aligned with global notification-shaped kinds, plus zap receipts).
* Not full {@link PROFILE_FEED_KINDS} that asked relays for huge multi-kind slices per `#p`.
*
* Live notifications spell: REQ uses `#p` only (no relay `kinds`); {@link NOTIFICATION_SPELL_KINDS} is applied
* in NoteList via `clientSideKindFilter` so the timeline buffer is not filled by other kinds that mention you.
*/
export const NOTIFICATION_SPELL_KINDS = [
kinds.ShortTextNote,
@ -63,6 +66,9 @@ export const NOTIFICATION_SPELL_KINDS = [ @@ -63,6 +66,9 @@ export const NOTIFICATION_SPELL_KINDS = [
ExtendedKind.ZAP_RECEIPT
] as const
/** Live notifications spell: longer than NoteList’s default 15s before empty state (slow `#p` on some relays). */
export const NOTIFICATION_SPELL_LOADING_SAFETY_MS = 90_000
/**
* Max distinct `t` tag values in one filter (very long `#t` arrays can hit relay limits).
*/
@ -105,9 +111,13 @@ export const MEDIA_SPELL_KINDS = [ @@ -105,9 +111,13 @@ export const MEDIA_SPELL_KINDS = [
*/
export const PROFILE_MEDIA_TAB_KINDS = [...MEDIA_SPELL_KINDS] as const
function normalizeMentionPubkey(pubkey: string): string {
return /^[0-9a-f]{64}$/i.test(pubkey.trim()) ? pubkey.trim().toLowerCase() : pubkey.trim()
}
/** Notifications faux spell: `#p` = you, narrow kinds — see module docstring. */
export function buildMentionsSpellFilter(pubkey: string): Filter {
const pk = /^[0-9a-f]{64}$/i.test(pubkey.trim()) ? pubkey.trim().toLowerCase() : pubkey.trim()
const pk = normalizeMentionPubkey(pubkey)
return {
kinds: [...NOTIFICATION_SPELL_KINDS],
limit: FAUX_SPELL_EVENT_LIMIT,
@ -115,6 +125,12 @@ export function buildMentionsSpellFilter(pubkey: string): Filter { @@ -115,6 +125,12 @@ export function buildMentionsSpellFilter(pubkey: string): Filter {
}
}
/** Live timeline: one REQ per relay set, any kind with `#p` = you; kinds narrowed in the client. */
export function buildNotificationsSpellSubRequests(urls: string[], pubkey: string): TFeedSubRequest[] {
const pk = normalizeMentionPubkey(pubkey)
return [{ urls, filter: { limit: FAUX_SPELL_EVENT_LIMIT, '#p': [pk] } }]
}
export function buildDiscussionFilter(): Filter {
return {
kinds: [ExtendedKind.DISCUSSION],

11
src/pages/primary/SpellsPage/index.tsx

@ -93,7 +93,8 @@ import { @@ -93,7 +93,8 @@ import {
buildDiscussionFilter,
buildInterestsSubRequests,
buildMediaSpellFilter,
buildMentionsSpellFilter,
buildNotificationsSpellSubRequests,
NOTIFICATION_SPELL_LOADING_SAFETY_MS,
FAUX_SPELL_EVENT_LIMIT,
MEDIA_SPELL_KINDS,
NOTIFICATION_SPELL_KINDS
@ -680,7 +681,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -680,7 +681,7 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
if (selectedFauxSpell === 'notifications') {
if (!pubkey || !feedUrls.length) return []
return [{ urls: feedUrls, filter: buildMentionsSpellFilter(pubkey) }]
return buildNotificationsSpellSubRequests(feedUrls, pubkey)
}
if (selectedFauxSpell === 'discussions') {
// Read-only prepended in appendCuratedReadOnlyRelays so FAUX_SPELL_MAX_RELAYS still includes aggr.
@ -1359,6 +1360,12 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -1359,6 +1360,12 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
spellFetchTimeoutMs={1}
spellFeedInstrumentToken={spellFeedInstrumentToken}
onSpellFeedFirstPaint={handleSpellFeedFirstPaint}
timelineLoadingSafetyTimeoutMs={
selectedFauxSpell === 'notifications'
? NOTIFICATION_SPELL_LOADING_SAFETY_MS
: undefined
}
clientSideKindFilter={selectedFauxSpell === 'notifications'}
useFilterAsIs={fauxNoteListUseFilterAsIs}
oneShotFetch={false}
showKind1OPs={selectedFauxSpell === 'following' ? showKind1OPs : true}

26
src/services/client-query.service.ts

@ -47,10 +47,19 @@ export interface SubscribeCallbacks { @@ -47,10 +47,19 @@ export interface SubscribeCallbacks {
onAllClose?: (reasons: string[]) => void
}
export type QueryServiceRelaySessionOptions = {
/** Skip opening REQ/publish paths to this normalized URL for the rest of the page session. */
shouldSkipRelayForSession?: (normalizedUrl: string) => boolean
/** After failed `ensureRelay` (timeout / connection error), increment client session strike counter. */
onRelayConnectionFailure?: (normalizedUrl: string) => void
}
export class QueryService {
private pool: SimplePool
private signer?: ISigner
private signerType?: TSignerType
private shouldSkipRelayForSession?: (normalizedUrl: string) => boolean
private onRelayConnectionFailure?: (normalizedUrl: string) => void
/** Max concurrent REQ subscriptions per relay URL */
private static readonly MAX_CONCURRENT_SUBS_PER_RELAY = MAX_CONCURRENT_RELAY_CONNECTIONS
@ -81,8 +90,10 @@ export class QueryService { @@ -81,8 +90,10 @@ export class QueryService {
if (next) next()
}
constructor(pool: SimplePool) {
constructor(pool: SimplePool, relaySession?: QueryServiceRelaySessionOptions) {
this.pool = pool
this.shouldSkipRelayForSession = relaySession?.shouldSkipRelayForSession
this.onRelayConnectionFailure = relaySession?.onRelayConnectionFailure
}
setSigner(signer: ISigner | undefined, signerType: TSignerType | undefined) {
@ -347,6 +358,17 @@ export class QueryService { @@ -347,6 +358,17 @@ export class QueryService {
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
relays = relays.filter((url) => !kind1BlockedSet.has(normalizeUrl(url) || url))
}
if (this.shouldSkipRelayForSession) {
relays = relays.filter((url) => {
const n = normalizeUrl(url) || url
return !this.shouldSkipRelayForSession!(n)
})
}
if (relays.length === 0) {
queueMicrotask(() => callbacks.oneose?.(true))
return { close: () => {} }
}
const _knownIds = new Set<string>()
const grouped = new Map<string, Filter[]>()
@ -412,6 +434,7 @@ export class QueryService { @@ -412,6 +434,7 @@ export class QueryService {
try {
relay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.onRelayConnectionFailure?.(relayKey)
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
@ -446,6 +469,7 @@ export class QueryService { @@ -446,6 +469,7 @@ export class QueryService {
try {
liveRelay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.onRelayConnectionFailure?.(relayKey)
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return

68
src/services/client.service.ts

@ -123,9 +123,12 @@ class ClientService extends EventTarget { @@ -123,9 +123,12 @@ class ClientService extends EventTarget {
})
/** Session-only: relay URL -> publish failure count; after 3 strikes we skip that relay for the rest of the session. */
/**
* Session-only: connection/publish failures per normalized relay URL. After
* {@link ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD} strikes we skip that relay for reads and publishes until reload.
*/
private publishStrikeCount = new Map<string, number>()
private static readonly PUBLISH_STRIKES_THRESHOLD = 3
private static readonly SESSION_RELAY_FAILURE_STRIKE_THRESHOLD = 2
/** Session-only: relay URL -> { successCount, sumLatencyMs } for preferring faster, proven relays when picking "random" relays. */
private sessionRelayPublishStats = new Map<string, { successCount: number; sumLatencyMs: number }>()
@ -142,7 +145,12 @@ class ClientService extends EventTarget { @@ -142,7 +145,12 @@ class ClientService extends EventTarget {
this.pool.trackRelays = true
// Initialize sub-services
this.queryService = new QueryService(this.pool)
this.queryService = new QueryService(this.pool, {
shouldSkipRelayForSession: (normalizedUrl) =>
(this.publishStrikeCount.get(normalizedUrl) ?? 0) >=
ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD,
onRelayConnectionFailure: (normalizedUrl) => this.recordSessionRelayFailure(normalizedUrl)
})
this.eventService = new EventService(this.queryService)
this.replaceableEventService = new ReplaceableEventService(
this.queryService,
@ -660,15 +668,24 @@ class ClientService extends EventTarget { @@ -660,15 +668,24 @@ class ClientService extends EventTarget {
return relays
}
/** Record publish failures for 3-strikes session policy (skip relay for rest of session after 3 rejections). */
private recordPublishFailures(relayStatuses: { url: string; success: boolean; error?: string }[]) {
relayStatuses.filter((s) => !s.success).forEach((s) => {
const n = normalizeUrl(s.url) || s.url
/** One failed publish or subscribe connection per normalized URL (accumulates until {@link SESSION_RELAY_FAILURE_STRIKE_THRESHOLD}). */
private recordSessionRelayFailure(url: string) {
const n = normalizeUrl(url) || url
if (!n) return
const count = (this.publishStrikeCount.get(n) ?? 0) + 1
this.publishStrikeCount.set(n, count)
if (count >= ClientService.PUBLISH_STRIKES_THRESHOLD) {
logger.debug('[PublishEvent] Relay reached 3 strikes, skipping for session', { url: n })
if (count >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD) {
logger.info('[Relay] Session strike threshold — relay skipped for reads/publishes until reload', {
url: n,
strikes: count
})
}
}
private filterSessionStrikedRelays(urls: string[]): string[] {
return urls.filter((u) => {
const n = normalizeUrl(u) || u
return (this.publishStrikeCount.get(n) ?? 0) < ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD
})
}
@ -695,7 +712,7 @@ class ClientService extends EventTarget { @@ -695,7 +712,7 @@ class ClientService extends EventTarget {
if (stats.successCount < 1) continue
const n = normalizeUrl(url) || url
if (!n || readOnlySet.has(n)) continue
if ((this.publishStrikeCount.get(n) ?? 0) >= ClientService.PUBLISH_STRIKES_THRESHOLD) continue
if ((this.publishStrikeCount.get(n) ?? 0) >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD) continue
out.push(n)
}
out.sort((a, b) => {
@ -709,6 +726,7 @@ class ClientService extends EventTarget { @@ -709,6 +726,7 @@ class ClientService extends EventTarget {
/**
* Session-only debug info for the Session Relays settings tab: working/striked preset relays and scored random relays.
* Strikes accrue from failed publishes and failed subscribe/query connections (same counter).
*/
getSessionRelayDebug(): {
strikedUrls: string[]
@ -723,10 +741,14 @@ class ClientService extends EventTarget { @@ -723,10 +741,14 @@ class ClientService extends EventTarget {
}
const preset = Array.from(presetSet)
const strikedUrls = Array.from(this.publishStrikeCount.entries())
.filter(([, count]) => count >= ClientService.PUBLISH_STRIKES_THRESHOLD)
.filter(([, count]) => count >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD)
.map(([url]) => url)
const presetStriked = preset.filter((url) => (this.publishStrikeCount.get(url) ?? 0) >= ClientService.PUBLISH_STRIKES_THRESHOLD)
const presetWorking = preset.filter((url) => (this.publishStrikeCount.get(url) ?? 0) < ClientService.PUBLISH_STRIKES_THRESHOLD)
const presetStriked = preset.filter(
(url) => (this.publishStrikeCount.get(url) ?? 0) >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD
)
const presetWorking = preset.filter(
(url) => (this.publishStrikeCount.get(url) ?? 0) < ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD
)
const scoredRelays = Array.from(this.sessionRelayPublishStats.entries()).map(([url, s]) => ({
url,
successCount: s.successCount,
@ -746,7 +768,9 @@ class ClientService extends EventTarget { @@ -746,7 +768,9 @@ class ClientService extends EventTarget {
.map((u) => normalizeUrl(u) || u)
.filter((n) => n && !readOnlySet.has(n))
const unique = Array.from(new Set(normalizedCandidates))
const notStruckOut = unique.filter((n) => (this.publishStrikeCount.get(n) ?? 0) < ClientService.PUBLISH_STRIKES_THRESHOLD)
const notStruckOut = unique.filter(
(n) => (this.publishStrikeCount.get(n) ?? 0) < ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD
)
const preferred: string[] = []
const rest: string[] = []
for (const url of notStruckOut) {
@ -789,7 +813,7 @@ class ClientService extends EventTarget { @@ -789,7 +813,7 @@ class ClientService extends EventTarget {
if (readOnlySet.has(n)) return false
if (event.kind === kinds.ShortTextNote && kind1BlockedSet.has(n)) return false
const strikes = this.publishStrikeCount.get(n) ?? 0
if (strikes >= ClientService.PUBLISH_STRIKES_THRESHOLD) return false
if (strikes >= ClientService.SESSION_RELAY_FAILURE_STRIKE_THRESHOLD) return false
return true
})
filtered = Array.from(new Set(filtered))
@ -854,6 +878,7 @@ class ClientService extends EventTarget { @@ -854,6 +878,7 @@ class ClientService extends EventTarget {
if (!alreadyFinished) {
logger.warn('[PublishEvent] Marking relay as timed out', { url })
relayStatuses.push({ url, success: false, error: 'Timeout: Operation took too long' })
client.recordSessionRelayFailure(url)
finishedCount++
}
})
@ -861,7 +886,6 @@ class ClientService extends EventTarget { @@ -861,7 +886,6 @@ class ClientService extends EventTarget {
// Ensure we resolve even if not all relays finished
if (!hasResolved) {
hasResolved = true
client.recordPublishFailures(relayStatuses)
logger.debug('[PublishEvent] Resolving due to timeout', {
success: successCount >= uniqueRelayUrls.length / 3,
successCount,
@ -955,11 +979,13 @@ class ClientService extends EventTarget { @@ -955,11 +979,13 @@ class ClientService extends EventTarget {
logger.error(`[PublishEvent] Auth or publish failed`, { url, error: authError.message })
errors.push({ url, error: authError })
relayStatuses.push({ url, success: false, error: authError.message })
that.recordSessionRelayFailure(url)
})
} else {
logger.error(`[PublishEvent] Publish failed`, { url, error: error.message })
errors.push({ url, error })
relayStatuses.push({ url, success: false, error: error.message })
that.recordSessionRelayFailure(url)
}
})
@ -978,6 +1004,7 @@ class ClientService extends EventTarget { @@ -978,6 +1004,7 @@ class ClientService extends EventTarget {
success: false,
error: error instanceof Error ? error.message : 'Connection failed'
})
that.recordSessionRelayFailure(url)
} finally {
clearTimeout(relayTimeout)
const currentFinished = ++finishedCount
@ -995,7 +1022,6 @@ class ClientService extends EventTarget { @@ -995,7 +1022,6 @@ class ClientService extends EventTarget {
}
if (currentFinished >= uniqueRelayUrls.length && !hasResolved) {
hasResolved = true
client.recordPublishFailures(relayStatuses)
logger.debug('[PublishEvent] All relays finished, resolving', {
success: successCount >= uniqueRelayUrls.length / 3,
successCount,
@ -1018,7 +1044,6 @@ class ClientService extends EventTarget { @@ -1018,7 +1044,6 @@ class ClientService extends EventTarget {
setTimeout(() => {
if (!hasResolved) {
hasResolved = true
client.recordPublishFailures(relayStatuses)
logger.debug('[PublishEvent] Resolving early with enough successes', {
success: true,
successCount,
@ -1304,6 +1329,7 @@ class ClientService extends EventTarget { @@ -1304,6 +1329,7 @@ class ClientService extends EventTarget {
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
relays = relays.filter((url) => !kind1BlockedSet.has(normalizeUrl(url) || url))
}
relays = this.filterSessionStrikedRelays(relays)
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
@ -1423,6 +1449,7 @@ class ClientService extends EventTarget { @@ -1423,6 +1449,7 @@ class ClientService extends EventTarget {
try {
relay = await that.pool.ensureRelay(url, { connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS })
} catch (err) {
that.recordSessionRelayFailure(url)
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
@ -1465,6 +1492,7 @@ class ClientService extends EventTarget { @@ -1465,6 +1492,7 @@ class ClientService extends EventTarget {
connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS
})
} catch (err) {
that.recordSessionRelayFailure(url)
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
@ -1931,9 +1959,13 @@ class ClientService extends EventTarget { @@ -1931,9 +1959,13 @@ class ClientService extends EventTarget {
if (!normalized) {
return { events: [], connectionError: 'Invalid relay URL' }
}
if (this.filterSessionStrikedRelays([normalized]).length === 0) {
return { events: [], connectionError: 'Relay skipped this session (repeated failures)' }
}
try {
await this.pool.ensureRelay(normalized, { connectionTimeout: 12_000 })
} catch (e) {
this.recordSessionRelayFailure(normalized)
const msg = e instanceof Error ? e.message : String(e)
return { events: [], connectionError: msg }
}

Loading…
Cancel
Save