Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
ba6cfbdfd8
  1. 2
      src/App.tsx
  2. 102
      src/components/VersionUpdateBanner/index.tsx
  3. 46
      src/lib/fetch-with-timeout.ts
  4. 11
      src/lib/index-relay-http.ts
  5. 6
      src/lib/nip05.ts
  6. 6
      src/lib/read-aloud.ts
  7. 3
      src/main.tsx
  8. 4
      src/pages/primary/ExplorePage/index.tsx
  9. 2
      src/pages/primary/NoteListPage/index.tsx
  10. 25
      src/services/client.service.ts
  11. 59
      src/services/discussion-feed-cache.service.ts
  12. 98
      src/services/indexed-db.service.ts
  13. 13
      src/services/lightning.service.ts
  14. 6
      src/services/media-upload.service.ts
  15. 11
      src/services/relay-info.service.ts
  16. 39
      src/services/rss-feed.service.ts
  17. 9
      src/services/web.service.ts

2
src/App.tsx

@ -26,6 +26,7 @@ import { UserPreferencesProvider } from '@/providers/UserPreferencesProvider'
import { UserTrustProvider } from '@/providers/UserTrustProvider' import { UserTrustProvider } from '@/providers/UserTrustProvider'
import { ZapProvider } from '@/providers/ZapProvider' import { ZapProvider } from '@/providers/ZapProvider'
import StartupSessionBanner from '@/components/StartupSessionBanner' import StartupSessionBanner from '@/components/StartupSessionBanner'
import VersionUpdateBanner from '@/components/VersionUpdateBanner'
import { PageManager } from './PageManager' import { PageManager } from './PageManager'
export default function App(): JSX.Element { export default function App(): JSX.Element {
@ -37,6 +38,7 @@ export default function App(): JSX.Element {
<DeletedEventProvider> <DeletedEventProvider>
<NostrProvider> <NostrProvider>
<div className="flex min-h-[100dvh] flex-col"> <div className="flex min-h-[100dvh] flex-col">
<VersionUpdateBanner />
<StartupSessionBanner /> <StartupSessionBanner />
<div className="flex min-h-0 min-w-0 flex-1 flex-col"> <div className="flex min-h-0 min-w-0 flex-1 flex-col">
<ZapProvider> <ZapProvider>

102
src/components/VersionUpdateBanner/index.tsx

@ -17,62 +17,93 @@ export default function VersionUpdateBanner() {
return return
} }
let registration: ServiceWorkerRegistration | null = null /**
* Workbox is built with skipWaiting + clientsClaim, so `registration.waiting` is almost never
* set the new worker activates immediately. The reliable signal is `controllerchange`.
* Skip the first such event when we started without a controller (first install for this origin).
*/
let ignoreNextControllerChange = !navigator.serviceWorker.controller
let cancelled = false
const cleanups: Array<() => void> = []
const runCleanup = () => {
for (let i = cleanups.length - 1; i >= 0; i--) {
try {
cleanups[i]?.()
} catch {
// ignore
}
}
cleanups.length = 0
}
const checkForUpdates = async () => { const onControllerChange = () => {
if (ignoreNextControllerChange) {
ignoreNextControllerChange = false
return
}
if (navigator.serviceWorker.controller) {
setUpdateAvailable(true)
}
}
;(async () => {
try { try {
registration = await navigator.serviceWorker.ready const registration = await navigator.serviceWorker.ready
if (!registration) return if (cancelled || !registration) return
navigator.serviceWorker.addEventListener('controllerchange', onControllerChange)
cleanups.push(() => navigator.serviceWorker.removeEventListener('controllerchange', onControllerChange))
// Check if there's a waiting service worker (new version ready)
if (registration.waiting) { if (registration.waiting) {
// There's already a new version waiting
setUpdateAvailable(true) setUpdateAvailable(true)
} }
// Listen for updates const installingListeners: Array<{ worker: ServiceWorker; fn: () => void }> = []
const handleUpdateFound = () => { const handleUpdateFound = () => {
const newWorker = registration?.installing const newWorker = registration.installing
if (!newWorker) return if (!newWorker) return
const handleStateChange = () => { const onState = () => {
if (newWorker.state === 'installed') { if (newWorker.state === 'installed' && navigator.serviceWorker.controller) {
// New version installed setUpdateAvailable(true)
if (navigator.serviceWorker.controller) {
// There's a new version ready (not the first install)
setUpdateAvailable(true)
}
} }
} }
// May already be `installed` before we attach (skipWaiting race)
newWorker.addEventListener('statechange', handleStateChange) onState()
newWorker.addEventListener('statechange', onState)
installingListeners.push({ worker: newWorker, fn: onState })
} }
registration.addEventListener('updatefound', handleUpdateFound) registration.addEventListener('updatefound', handleUpdateFound)
cleanups.push(() => registration.removeEventListener('updatefound', handleUpdateFound))
// Check for updates periodically cleanups.push(() => {
const checkInterval = setInterval(() => { for (const { worker, fn } of installingListeners) {
if (registration) { worker.removeEventListener('statechange', fn)
registration.update()
} }
}, 60000) // Check every minute installingListeners.length = 0
})
// Initial update check const checkUpdate = () => {
registration.update() if (document.hidden) return
registration.update().catch(() => {})
return () => {
clearInterval(checkInterval)
if (registration) {
registration.removeEventListener('updatefound', handleUpdateFound as EventListener)
}
} }
const interval = window.setInterval(checkUpdate, 60_000)
cleanups.push(() => window.clearInterval(interval))
document.addEventListener('visibilitychange', checkUpdate)
cleanups.push(() => document.removeEventListener('visibilitychange', checkUpdate))
checkUpdate()
} catch (error) { } catch (error) {
// In non-secure contexts or when no SW is registered, ready can reject with "The operation is insecure"
logger.debug('Service worker update check skipped or failed', { error }) logger.debug('Service worker update check skipped or failed', { error })
} }
} })()
checkForUpdates().catch(() => {}) return () => {
cancelled = true
runCleanup()
}
}, []) }, [])
const handleUpdate = () => { const handleUpdate = () => {
@ -83,7 +114,7 @@ export default function VersionUpdateBanner() {
const handleDismiss = () => { const handleDismiss = () => {
setIsDismissed(true) setIsDismissed(true)
// Store dismissal in localStorage to avoid showing it again this session // Store dismissal in sessionStorage to avoid showing it again this session
sessionStorage.setItem('versionUpdateDismissed', 'true') sessionStorage.setItem('versionUpdateDismissed', 'true')
} }
@ -145,4 +176,3 @@ export default function VersionUpdateBanner() {
</div> </div>
) )
} }

46
src/lib/fetch-with-timeout.ts

@ -0,0 +1,46 @@
/** Default cap for HTTP fetches so tabs cannot hang indefinitely on bad networks or servers. */
export const DEFAULT_FETCH_TIMEOUT_MS = 30_000
/**
* `fetch` with a wall-clock timeout. Honors an optional caller `signal` (abort propagates both ways).
*/
export async function fetchWithTimeout(
input: RequestInfo | URL,
init: RequestInit & { timeoutMs?: number } = {}
): Promise<Response> {
const { timeoutMs = DEFAULT_FETCH_TIMEOUT_MS, signal: userSignal, ...rest } = init
const controller = new AbortController()
let timeoutId: ReturnType<typeof setTimeout> | null = setTimeout(() => {
timeoutId = null
controller.abort()
}, timeoutMs)
const onUserAbort = () => {
if (timeoutId !== null) {
clearTimeout(timeoutId)
timeoutId = null
}
controller.abort()
}
if (userSignal) {
if (userSignal.aborted) {
if (timeoutId !== null) {
clearTimeout(timeoutId)
timeoutId = null
}
throw new DOMException('The operation was aborted.', 'AbortError')
}
userSignal.addEventListener('abort', onUserAbort, { once: true })
}
try {
return await fetch(input, { ...rest, signal: controller.signal })
} finally {
if (timeoutId !== null) {
clearTimeout(timeoutId)
}
userSignal?.removeEventListener('abort', onUserAbort)
}
}

11
src/lib/index-relay-http.ts

@ -6,6 +6,7 @@
* the Vite same-origin proxy `/dev-index-relay` `VITE_DEV_INDEX_RELAY_TARGET` (default in `vite.config.ts`). * the Vite same-origin proxy `/dev-index-relay` `VITE_DEV_INDEX_RELAY_TARGET` (default in `vite.config.ts`).
* Production and remote HTTPS relays are unchanged; those need CORS on the relay or a real reverse proxy. * Production and remote HTTPS relays are unchanged; those need CORS on the relay or a real reverse proxy.
*/ */
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { normalizeHttpRelayUrl } from '@/lib/url' import { normalizeHttpRelayUrl } from '@/lib/url'
import type { Filter, Event as NEvent } from 'nostr-tools' import type { Filter, Event as NEvent } from 'nostr-tools'
@ -170,14 +171,15 @@ export async function queryIndexRelay(
for (const f of filters) { for (const f of filters) {
const body = nostrFilterToIndexRelayBody(filterForIndexRelay(f)) const body = nostrFilterToIndexRelayBody(filterForIndexRelay(f))
try { try {
const res = await fetch(endpoint, { const res = await fetchWithTimeout(endpoint, {
method: 'POST', method: 'POST',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
body: JSON.stringify(body), body: JSON.stringify(body),
signal: options?.signal signal: options?.signal,
timeoutMs: 25_000
}) })
if (!res.ok) { if (!res.ok) {
sawHardFailure = true sawHardFailure = true
@ -231,7 +233,7 @@ export async function publishEventToIndexRelay(
const base = devProxyLoopbackIndexRelayBase(normalizeHttpRelayUrl(baseUrl) || baseUrl) const base = devProxyLoopbackIndexRelayBase(normalizeHttpRelayUrl(baseUrl) || baseUrl)
const endpoint = indexRelayPublishUrl(base) const endpoint = indexRelayPublishUrl(base)
try { try {
const res = await fetch(endpoint, { const res = await fetchWithTimeout(endpoint, {
method: 'POST', method: 'POST',
headers: { headers: {
Accept: 'application/json', Accept: 'application/json',
@ -248,7 +250,8 @@ export async function publishEventToIndexRelay(
sig: event.sig sig: event.sig
} }
}), }),
signal: options?.signal signal: options?.signal,
timeoutMs: 25_000
}) })
if (!res.ok) { if (!res.ok) {
if (isDevViteIndexRelayProxyPath(endpoint) && res.status === 500) { if (isDevViteIndexRelayProxyPath(endpoint) && res.status === 500) {

6
src/lib/nip05.ts

@ -1,6 +1,7 @@
import { LRUCache } from 'lru-cache' import { LRUCache } from 'lru-cache'
import { buildViteProxySitesFetchUrl } from '@/lib/vite-proxy-url' import { buildViteProxySitesFetchUrl } from '@/lib/vite-proxy-url'
import { isValidPubkey } from './pubkey' import { isValidPubkey } from './pubkey'
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import logger from '@/lib/logger' import logger from '@/lib/logger'
type TVerifyNip05Result = { type TVerifyNip05Result = {
@ -78,9 +79,10 @@ async function fetchWellKnownNostrJson(domain: string, name?: string): Promise<R
const proxyServer = import.meta.env.VITE_PROXY_SERVER?.trim() const proxyServer = import.meta.env.VITE_PROXY_SERVER?.trim()
const fetchUrl = proxyServer ? buildViteProxySitesFetchUrl(targetUrl, proxyServer) : targetUrl const fetchUrl = proxyServer ? buildViteProxySitesFetchUrl(targetUrl, proxyServer) : targetUrl
try { try {
const res = await fetch(fetchUrl, { const res = await fetchWithTimeout(fetchUrl, {
credentials: 'omit', credentials: 'omit',
headers: { Accept: 'application/json, text/plain;q=0.9,*/*;q=0.8' } headers: { Accept: 'application/json, text/plain;q=0.9,*/*;q=0.8' },
timeoutMs: 15_000
}) })
if (!res.ok) return null if (!res.ok) return null
const data: unknown = await res.json() const data: unknown = await res.json()

6
src/lib/read-aloud.ts

@ -5,6 +5,7 @@ import {
getPiperTtsCacheTtlMs getPiperTtsCacheTtlMs
} from '@/lib/piper-tts-cache-policy' } from '@/lib/piper-tts-cache-policy'
import indexedDb from '@/services/indexed-db.service' import indexedDb from '@/services/indexed-db.service'
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata' import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger' import logger from '@/lib/logger'
import { Event, kinds } from 'nostr-tools' import { Event, kinds } from 'nostr-tools'
@ -316,11 +317,12 @@ async function fetchPiperTtsBlobForChunk(
let response: Response let response: Response
try { try {
response = await fetch(url, { response = await fetchWithTimeout(url, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ text, speed }), body: JSON.stringify({ text, speed }),
signal signal,
timeoutMs: 120_000
}) })
} catch (e) { } catch (e) {
if (isAbortError(e)) { if (isAbortError(e)) {

3
src/main.tsx

@ -3,6 +3,7 @@ import './polyfill'
import './services/lightning.service' import './services/lightning.service'
import './lib/error-suppression' import './lib/error-suppression'
import './lib/debug-utils' import './lib/debug-utils'
import { fetchWithTimeout } from './lib/fetch-with-timeout'
import { StrictMode } from 'react' import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client' import { createRoot } from 'react-dom/client'
@ -68,7 +69,7 @@ async function bootstrap() {
storage.initAsync(), storage.initAsync(),
(async () => { (async () => {
try { try {
const r = await fetch('/config.json') const r = await fetchWithTimeout('/config.json', { timeoutMs: 10_000 })
if (r.ok) { if (r.ok) {
window.__RUNTIME_CONFIG__ = (await r.json()) as { window.__RUNTIME_CONFIG__ = (await r.json()) as {
NIP66_MONITOR_NPUB?: string NIP66_MONITOR_NPUB?: string

4
src/pages/primary/ExplorePage/index.tsx

@ -3,7 +3,6 @@ import ExploreFavoriteRelays from '@/components/Explore/ExploreFavoriteRelays'
import ExploreRelayReviews from '@/components/Explore/ExploreRelayReviews' import ExploreRelayReviews from '@/components/Explore/ExploreRelayReviews'
import FollowingFavoriteRelayList from '@/components/FollowingFavoriteRelayList' import FollowingFavoriteRelayList from '@/components/FollowingFavoriteRelayList'
import Tabs from '@/components/Tabs' import Tabs from '@/components/Tabs'
import VersionUpdateBanner from '@/components/VersionUpdateBanner'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input' import { Input } from '@/components/ui/input'
import { toRelay } from '@/lib/link' import { toRelay } from '@/lib/link'
@ -138,9 +137,6 @@ const ExplorePage = forwardRef<TPageRef>((_, ref) => {
displayScrollToTopButton displayScrollToTopButton
> >
<div className="min-w-0 pt-2"> <div className="min-w-0 pt-2">
<div className="px-2">
<VersionUpdateBanner />
</div>
{tab === 'explore' && ( {tab === 'explore' && (
<div key={contentRefreshKey} className="min-w-0"> <div key={contentRefreshKey} className="min-w-0">
<ExploreFavoriteRelays /> <ExploreFavoriteRelays />

2
src/pages/primary/NoteListPage/index.tsx

@ -1,7 +1,6 @@
import BookmarkList from '@/components/BookmarkList' import BookmarkList from '@/components/BookmarkList'
import RelayInfo from '@/components/RelayInfo' import RelayInfo from '@/components/RelayInfo'
import { RefreshButton } from '@/components/RefreshButton' import { RefreshButton } from '@/components/RefreshButton'
import VersionUpdateBanner from '@/components/VersionUpdateBanner'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
import PrimaryPageLayout from '@/layouts/PrimaryPageLayout' import PrimaryPageLayout from '@/layouts/PrimaryPageLayout'
import { useCurrentRelays } from '@/providers/CurrentRelaysProvider' import { useCurrentRelays } from '@/providers/CurrentRelaysProvider'
@ -197,7 +196,6 @@ const NoteListPage = forwardRef<TPageRef>((_, ref) => {
displayScrollToTopButton displayScrollToTopButton
> >
<div className="min-w-0 pt-2"> <div className="min-w-0 pt-2">
<VersionUpdateBanner />
{content} {content}
</div> </div>
</PrimaryPageLayout> </PrimaryPageLayout>

25
src/services/client.service.ts

@ -2115,23 +2115,30 @@ class ClientService extends EventTarget {
} }
} }
void (async () => { try {
try { const st = await indexedDb.getTimelinePersistedState(key)
const st = await indexedDb.getTimelinePersistedState(key) if (st?.refs?.length) {
if (!st?.refs?.length) return const hexIds = st.refs.map((r) => r[0])
const list = await indexedDb.getArchivedEventsByIds(st.refs.map((r) => r[0])) const list = await indexedDb.getArchivedEventsByIds(hexIds)
if (list.length === 0) return
for (const ev of list) { for (const ev of list) {
if (shouldDropEventOnIngest(ev)) continue if (shouldDropEventOnIngest(ev)) continue
if (eventIds.has(ev.id)) continue if (eventIds.has(ev.id)) continue
eventIds.add(ev.id) eventIds.add(ev.id)
events.push(ev) events.push(ev)
} }
for (const refId of hexIds) {
if (eventIds.has(refId)) continue
const sess = that.eventService.peekSessionCachedEvent(refId)
if (sess && !shouldDropEventOnIngest(sess)) {
eventIds.add(refId)
events.push(sess)
}
}
flushStreamingSnapshot() flushStreamingSnapshot()
} catch (err) {
logger.warn('[ClientService] Timeline disk hydrate failed', err)
} }
})() } catch (err) {
logger.warn('[ClientService] Timeline disk hydrate failed', err)
}
const handleTimelineEose = (eosed: boolean) => { const handleTimelineEose = (eosed: boolean) => {
if (!eosed) return if (!eosed) return

59
src/services/discussion-feed-cache.service.ts

@ -34,6 +34,10 @@ class DiscussionFeedCacheService {
private discussionsListCache: CachedDiscussionsListData | null = null private discussionsListCache: CachedDiscussionsListData | null = null
private readonly CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes private readonly CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes
private readonly DISCUSSIONS_LIST_CACHE_TTL_MS = 2 * 60 * 1000 // 2 minutes for discussions list private readonly DISCUSSIONS_LIST_CACHE_TTL_MS = 2 * 60 * 1000 // 2 minutes for discussions list
/** Cap in-memory thread caches so long sessions do not retain unbounded reply payloads. */
private readonly MAX_THREAD_CACHE_KEYS = 100
/** Cap merged discussions list `eventMap` so unbounded merges cannot grow RAM without limit. */
private readonly MAX_DISCUSSIONS_LIST_THREADS = 400
static getInstance(): DiscussionFeedCacheService { static getInstance(): DiscussionFeedCacheService {
if (!DiscussionFeedCacheService.instance) { if (!DiscussionFeedCacheService.instance) {
@ -153,12 +157,64 @@ class DiscussionFeedCacheService {
this.cache.set(cacheKey, cachedData) this.cache.set(cacheKey, cachedData)
this.trimThreadCacheIfNeeded()
// Clean up stale entries periodically (every 10th set operation) // Clean up stale entries periodically (every 10th set operation)
if (this.cache.size > 50 && Math.random() < 0.1) { if (this.cache.size > 50 && Math.random() < 0.1) {
this.cleanupStaleEntries() this.cleanupStaleEntries()
} }
} }
/** Drop oldest threads by {@link CachedThreadData.timestamp} when over {@link MAX_THREAD_CACHE_KEYS}. */
private trimThreadCacheIfNeeded(): void {
if (this.cache.size <= this.MAX_THREAD_CACHE_KEYS) return
const entries = [...this.cache.entries()].sort((a, b) => a[1].timestamp - b[1].timestamp)
const overflow = this.cache.size - this.MAX_THREAD_CACHE_KEYS
for (let i = 0; i < overflow; i++) {
const key = entries[i]?.[0]
if (key) this.cache.delete(key)
}
}
/** Best-effort recency for discussion thread rows (unknown shapes → 0). */
private discussionsEntryRecency(entry: unknown): number {
if (!entry || typeof entry !== 'object') return 0
const o = entry as Record<string, unknown>
for (const k of ['lastReplyAt', 'lastActivityAt', 'updatedAt', 'fetchedAt']) {
const v = o[k]
if (typeof v === 'number' && v > 0) return v
}
const root = o.rootEvent ?? o.event ?? o.threadRoot
if (root && typeof root === 'object' && 'created_at' in root) {
const ca = (root as { created_at?: unknown }).created_at
if (typeof ca === 'number') return ca
}
return 0
}
/**
* When over {@link MAX_DISCUSSIONS_LIST_THREADS}, keep rows from the latest fetch first, then
* next-most-recent by {@link discussionsEntryRecency}.
*/
private trimDiscussionsEventMap(
map: Map<string, unknown>,
prioritizeIds: ReadonlySet<string>
): Map<string, unknown> {
if (map.size <= this.MAX_DISCUSSIONS_LIST_THREADS) return map
const entries = [...map.entries()].sort((a, b) => {
const pa = prioritizeIds.has(a[0]) ? 1 : 0
const pb = prioritizeIds.has(b[0]) ? 1 : 0
if (pa !== pb) return pb - pa
return this.discussionsEntryRecency(b[1]) - this.discussionsEntryRecency(a[1])
})
const next = new Map<string, unknown>()
for (let i = 0; i < this.MAX_DISCUSSIONS_LIST_THREADS && i < entries.length; i++) {
const row = entries[i]
if (row) next.set(row[0], row[1])
}
return next
}
/** /**
* Clear cache for a specific thread * Clear cache for a specific thread
*/ */
@ -248,6 +304,7 @@ class DiscussionFeedCacheService {
* When merge=true, ALWAYS preserves all existing threads and adds new ones * When merge=true, ALWAYS preserves all existing threads and adds new ones
*/ */
setCachedDiscussionsList(eventMap: Map<string, any>, dynamicTopics: { mainTopics: any[]; subtopics: any[]; allTopics: any[] }, merge = true): void { setCachedDiscussionsList(eventMap: Map<string, any>, dynamicTopics: { mainTopics: any[]; subtopics: any[]; allTopics: any[] }, merge = true): void {
const newIds = new Set(eventMap.keys())
let mergedEventMap: Map<string, any> let mergedEventMap: Map<string, any>
const existingCacheSize = this.discussionsListCache?.eventMap.size || 0 const existingCacheSize = this.discussionsListCache?.eventMap.size || 0
const newDataSize = eventMap.size const newDataSize = eventMap.size
@ -279,6 +336,8 @@ class DiscussionFeedCacheService {
mergedEventMap = new Map(eventMap) mergedEventMap = new Map(eventMap)
logger.debug('[DiscussionFeedCache] Cached new discussions list (no merge):', eventMap.size, 'threads') logger.debug('[DiscussionFeedCache] Cached new discussions list (no merge):', eventMap.size, 'threads')
} }
mergedEventMap = this.trimDiscussionsEventMap(mergedEventMap, newIds) as Map<string, any>
// Store merged event map // Store merged event map
this.discussionsListCache = { this.discussionsListCache = {

98
src/services/indexed-db.service.ts

@ -90,7 +90,7 @@ export const StoreNames = {
} }
/** Schema version we expect. When adding stores or migrations, bump this. */ /** Schema version we expect. When adding stores or migrations, bump this. */
const DB_VERSION = 33 const DB_VERSION = 34
/** Max age for profile and payment info cache before we refetch (5 min). */ /** Max age for profile and payment info cache before we refetch (5 min). */
const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000 const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000
@ -132,6 +132,13 @@ class IndexedDbService {
private db: IDBDatabase | null = null private db: IDBDatabase | null = null
private initPromise: Promise<void> | null = null private initPromise: Promise<void> | null = null
/** Browser timer id (DOM `setTimeout` returns a number). */
private cleanupTimer: number | null = null
/** First TTL sweep after DB open (profile / relay list rows). */
private static readonly CLEANUP_INITIAL_DELAY_MS = 60 * 1000
/** Repeat TTL sweeps on this interval so pruning is not a one-shot. */
private static readonly CLEANUP_INTERVAL_MS = 60 * 60 * 1000
init(): Promise<void> { init(): Promise<void> {
if (!this.initPromise) { if (!this.initPromise) {
@ -171,7 +178,7 @@ class IndexedDbService {
} }
openWithStored.onsuccess = () => { openWithStored.onsuccess = () => {
this.db = openWithStored.result this.db = openWithStored.result
setTimeout(() => this.cleanUp(), 1000 * 60) this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INITIAL_DELAY_MS)
resolve() resolve()
} }
openWithStored.onupgradeneeded = () => { openWithStored.onupgradeneeded = () => {
@ -187,7 +194,7 @@ class IndexedDbService {
request.onsuccess = () => { request.onsuccess = () => {
this.db = request.result this.db = request.result
setTimeout(() => this.cleanUp(), 1000 * 60) this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INITIAL_DELAY_MS)
resolve() resolve()
} }
@ -301,6 +308,9 @@ class IndexedDbService {
if (!db.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) { if (!db.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) {
db.createObjectStore(StoreNames.PIPER_TTS_CACHE, { keyPath: 'key' }) db.createObjectStore(StoreNames.PIPER_TTS_CACHE, { keyPath: 'key' })
} }
if (event.oldVersion < 34) {
// v34: app-side changes (fetch timeouts, timeline hydrate order, discussion list cap)
}
ensureMissingObjectStores(db) ensureMissingObjectStores(db)
} }
} }
@ -1288,7 +1298,7 @@ class IndexedDbService {
const allStoreNames = Array.from(this.db.objectStoreNames) const allStoreNames = Array.from(this.db.objectStoreNames)
const transaction = this.db.transaction(allStoreNames, 'readwrite') const transaction = this.db.transaction(allStoreNames, 'readwrite')
await Promise.allSettled( const clearResults = await Promise.allSettled(
allStoreNames.map(storeName => { allStoreNames.map(storeName => {
return new Promise<void>((resolve, reject) => { return new Promise<void>((resolve, reject) => {
const store = transaction.objectStore(storeName) const store = transaction.objectStore(storeName)
@ -1298,6 +1308,15 @@ class IndexedDbService {
}) })
}) })
) )
for (let i = 0; i < clearResults.length; i++) {
const r = clearResults[i]
if (r?.status === 'rejected') {
logger.warn('[IndexedDB] clearAllCache failed for store', {
store: allStoreNames[i],
error: r.reason
})
}
}
} }
async getStoreInfo(): Promise<Record<string, number>> { async getStoreInfo(): Promise<Record<string, number>> {
@ -1306,25 +1325,30 @@ class IndexedDbService {
return {} return {}
} }
const storeInfo: Record<string, number> = {}
const allStoreNames = Array.from(this.db.objectStoreNames) const allStoreNames = Array.from(this.db.objectStoreNames)
if (allStoreNames.length === 0) {
await Promise.allSettled( return {}
allStoreNames.map(storeName => { }
return new Promise<void>((resolve, reject) => {
const transaction = this.db!.transaction(storeName, 'readonly')
const store = transaction.objectStore(storeName)
const request = store.count()
request.onsuccess = () => {
storeInfo[storeName] = request.result
resolve()
}
request.onerror = (event) => reject(idbEventToError(event))
})
})
)
return storeInfo return new Promise((resolve, reject) => {
const storeInfo: Record<string, number> = {}
const tx = this.db!.transaction(allStoreNames, 'readonly')
let pending = allStoreNames.length
for (const storeName of allStoreNames) {
const req = tx.objectStore(storeName).count()
req.onsuccess = () => {
storeInfo[storeName] = req.result
pending--
if (pending === 0) {
resolve(storeInfo)
}
}
req.onerror = (ev) => {
reject(idbEventToError(ev))
}
}
})
} }
async getStoreItems(storeName: string): Promise<TValue<any>[]> { async getStoreItems(storeName: string): Promise<TValue<any>[]> {
@ -1614,12 +1638,26 @@ class IndexedDbService {
}) })
} }
private scheduleNextCleanUp(delayMs: number): void {
if (typeof window === 'undefined') return
if (this.cleanupTimer !== null) {
clearTimeout(this.cleanupTimer)
this.cleanupTimer = null
}
if (!this.db) return
this.cleanupTimer = window.setTimeout(() => {
this.cleanupTimer = null
void this.cleanUp()
}, delayMs)
}
private async cleanUp() { private async cleanUp() {
await this.initPromise await this.initPromise
if (!this.db) { if (!this.db) {
return return
} }
try {
const stores = [ const stores = [
{ name: StoreNames.PROFILE_EVENTS, expirationTimestamp: Date.now() - 1000 * 60 * 60 * 24 }, // 1 day { name: StoreNames.PROFILE_EVENTS, expirationTimestamp: Date.now() - 1000 * 60 * 60 * 24 }, // 1 day
{ name: StoreNames.PAYMENT_INFO_EVENTS, expirationTimestamp: Date.now() - PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS }, // 5 min { name: StoreNames.PAYMENT_INFO_EVENTS, expirationTimestamp: Date.now() - PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS }, // 5 min
@ -1650,7 +1688,7 @@ class IndexedDbService {
existingStores.map((store) => store.name), existingStores.map((store) => store.name),
'readwrite' 'readwrite'
) )
await Promise.allSettled( const sweepResults = await Promise.allSettled(
existingStores.map(({ name, expirationTimestamp }) => { existingStores.map(({ name, expirationTimestamp }) => {
if (expirationTimestamp < 0) { if (expirationTimestamp < 0) {
return Promise.resolve() return Promise.resolve()
@ -1677,6 +1715,22 @@ class IndexedDbService {
}) })
}) })
) )
for (let i = 0; i < sweepResults.length; i++) {
const r = sweepResults[i]
if (r?.status === 'rejected') {
logger.warn('[IndexedDB] cleanUp store sweep failed', {
store: existingStores[i]?.name,
error: r.reason
})
}
}
} catch (error) {
logger.warn('[IndexedDB] cleanUp failed', { error })
} finally {
if (this.db) {
this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INTERVAL_MS)
}
}
} }
/** /**

13
src/services/lightning.service.ts

@ -18,6 +18,7 @@ import { utf8Decoder } from 'nostr-tools/utils'
import client from './client.service' import client from './client.service'
import { queryService, replaceableEventService } from './client.service' import { queryService, replaceableEventService } from './client.service'
import { getProfileFromEvent } from '@/lib/event-metadata' import { getProfileFromEvent } from '@/lib/event-metadata'
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import logger from '@/lib/logger' import logger from '@/lib/logger'
export type TRecentSupporter = { pubkey: string; amount: number; comment?: string } export type TRecentSupporter = { pubkey: string; amount: number; comment?: string }
@ -82,8 +83,9 @@ class LightningService {
comment comment
}) })
const zapRequest = await client.signer.signEvent(zapRequestDraft) const zapRequest = await client.signer.signEvent(zapRequestDraft)
const zapRequestRes = await fetch( const zapRequestRes = await fetchWithTimeout(
`${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}` `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}`,
{ timeoutMs: 25_000 }
) )
const zapRequestResBody = await zapRequestRes.json() const zapRequestResBody = await zapRequestRes.json()
if (zapRequestResBody.error) { if (zapRequestResBody.error) {
@ -205,8 +207,9 @@ class LightningService {
comment comment
}) })
const zapRequest = await client.signer.signEvent(zapRequestDraft) const zapRequest = await client.signer.signEvent(zapRequestDraft)
const zapRequestRes = await fetch( const zapRequestRes = await fetchWithTimeout(
`${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}` `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}`,
{ timeoutMs: 25_000 }
) )
const zapRequestResBody = await zapRequestRes.json() const zapRequestResBody = await zapRequestRes.json()
if (zapRequestResBody.error) { if (zapRequestResBody.error) {
@ -354,7 +357,7 @@ class LightningService {
lnurl = utf8Decoder.decode(data) lnurl = utf8Decoder.decode(data)
} }
const res = await fetch(lnurl) const res = await fetchWithTimeout(lnurl, { timeoutMs: 15_000 })
const body = await res.json() const body = await res.json()
if (body.allowsNostr && body.nostrPubkey) { if (body.allowsNostr && body.nostrPubkey) {

6
src/services/media-upload.service.ts

@ -1,3 +1,4 @@
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import { simplifyUrl } from '@/lib/url' import { simplifyUrl } from '@/lib/url'
import { TDraftEvent, TMediaUploadServiceConfig } from '@/types' import { TDraftEvent, TMediaUploadServiceConfig } from '@/types'
import { BlossomClient } from 'blossom-client-sdk' import { BlossomClient } from 'blossom-client-sdk'
@ -122,7 +123,10 @@ class MediaUploadService {
} }
let uploadUrl = this.nip96ServiceUploadUrlMap.get(service) let uploadUrl = this.nip96ServiceUploadUrlMap.get(service)
if (!uploadUrl) { if (!uploadUrl) {
const response = await fetch(`${service}/.well-known/nostr/nip96.json`) const response = await fetchWithTimeout(`${service}/.well-known/nostr/nip96.json`, {
signal: options?.signal,
timeoutMs: 15_000
})
if (!response.ok) { if (!response.ok) {
throw new Error( throw new Error(
`${simplifyUrl(service)} does not work, please try another service in your settings` `${simplifyUrl(service)} does not work, please try another service in your settings`

11
src/services/relay-info.service.ts

@ -3,6 +3,7 @@ import indexDb from '@/services/indexed-db.service'
import { TAwesomeRelayCollection, TRelayInfo } from '@/types' import { TAwesomeRelayCollection, TRelayInfo } from '@/types'
import DataLoader from 'dataloader' import DataLoader from 'dataloader'
import FlexSearch from 'flexsearch' import FlexSearch from 'flexsearch'
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import logger from '@/lib/logger' import logger from '@/lib/logger'
class RelayInfoService { class RelayInfoService {
@ -97,8 +98,9 @@ class RelayInfoService {
this.awesomeRelayCollections = (async () => { this.awesomeRelayCollections = (async () => {
try { try {
const res = await fetch( const res = await fetchWithTimeout(
'https://raw.githubusercontent.com/CodyTseng/awesome-nostr-relays/master/dist/collections.json' 'https://raw.githubusercontent.com/CodyTseng/awesome-nostr-relays/master/dist/collections.json',
{ timeoutMs: 20_000 }
) )
if (!res.ok) { if (!res.ok) {
throw new Error('Failed to fetch awesome relay collections') throw new Error('Failed to fetch awesome relay collections')
@ -146,8 +148,9 @@ class RelayInfoService {
private async fetchRelayNip11(url: string) { private async fetchRelayNip11(url: string) {
try { try {
logger.debug('Fetching NIP-11 metadata', { url }) logger.debug('Fetching NIP-11 metadata', { url })
const res = await fetch(url.replace('ws://', 'http://').replace('wss://', 'https://'), { const res = await fetchWithTimeout(url.replace('ws://', 'http://').replace('wss://', 'https://'), {
headers: { Accept: 'application/nostr+json' } headers: { Accept: 'application/nostr+json' },
timeoutMs: 12_000
}) })
return res.json() as Omit<TRelayInfo, 'url' | 'shortUrl'> return res.json() as Omit<TRelayInfo, 'url' | 'shortUrl'>
} catch { } catch {

39
src/services/rss-feed.service.ts

@ -1,4 +1,5 @@
import { DEFAULT_RSS_FEEDS } from '@/constants' import { DEFAULT_RSS_FEEDS } from '@/constants'
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import { canonicalizeRssArticleUrl } from '@/lib/rss-article' import { canonicalizeRssArticleUrl } from '@/lib/rss-article'
import { cleanUrl } from '@/lib/url' import { cleanUrl } from '@/lib/url'
import logger from '@/lib/logger' import logger from '@/lib/logger'
@ -317,64 +318,38 @@ class RssFeedService {
*/ */
private async fetchWithStrategy(originalUrl: string, strategy: { name: string; getUrl: (url: string) => string }, externalSignal?: AbortSignal): Promise<string> { private async fetchWithStrategy(originalUrl: string, strategy: { name: string; getUrl: (url: string) => string }, externalSignal?: AbortSignal): Promise<string> {
const fetchUrl = strategy.getUrl(originalUrl) const fetchUrl = strategy.getUrl(originalUrl)
// Check if external signal is already aborted
if (externalSignal?.aborted) { if (externalSignal?.aborted) {
throw new DOMException('The operation was aborted.', 'AbortError') throw new DOMException('The operation was aborted.', 'AbortError')
} }
const controller = new AbortController()
// Use a longer timeout for RSS feeds (30 seconds) since they can be slow
// Don't abort on timeout - just log a warning, let the fetch continue
const timeoutId = setTimeout(() => {
logger.warn('[RssFeedService] Fetch taking longer than expected', {
url: originalUrl,
strategy: strategy.name,
elapsed: '30s'
})
// Don't abort - just log. The fetch will continue or fail naturally
}, 30000) // 30 second warning (but don't abort)
// If external signal is provided, abort our controller when external signal aborts
if (externalSignal) {
externalSignal.addEventListener('abort', () => {
clearTimeout(timeoutId)
controller.abort()
}, { once: true })
}
try { try {
const res = await fetch(fetchUrl, { const res = await fetchWithTimeout(fetchUrl, {
signal: controller.signal, signal: externalSignal,
timeoutMs: 60_000,
mode: 'cors', mode: 'cors',
credentials: 'omit', credentials: 'omit',
headers: { headers: {
'Accept': 'application/rss+xml, application/xml, application/atom+xml, text/xml, */*' Accept: 'application/rss+xml, application/xml, application/atom+xml, text/xml, */*'
} }
}) })
clearTimeout(timeoutId)
if (!res.ok) { if (!res.ok) {
throw new Error(`HTTP ${res.status}: ${res.statusText}`) throw new Error(`HTTP ${res.status}: ${res.statusText}`)
} }
const xmlText = await res.text() const xmlText = await res.text()
// Validate that we got XML content
if (!xmlText || xmlText.trim().length === 0) { if (!xmlText || xmlText.trim().length === 0) {
throw new Error('Empty response') throw new Error('Empty response')
} }
// Basic validation - check if it looks like XML
if (!xmlText.trim().startsWith('<')) { if (!xmlText.trim().startsWith('<')) {
throw new Error('Response does not appear to be XML') throw new Error('Response does not appear to be XML')
} }
return xmlText return xmlText
} catch (error) { } catch (error) {
clearTimeout(timeoutId)
// Re-throw abort errors as-is
if (error instanceof DOMException && error.name === 'AbortError') { if (error instanceof DOMException && error.name === 'AbortError') {
throw error throw error
} }

9
src/services/web.service.ts

@ -1,3 +1,4 @@
import { fetchWithTimeout } from '@/lib/fetch-with-timeout'
import { buildViteProxySitesFetchUrl, urlLooksLikeViteProxyRequest } from '@/lib/vite-proxy-url' import { buildViteProxySitesFetchUrl, urlLooksLikeViteProxyRequest } from '@/lib/vite-proxy-url'
import { TWebMetadata } from '@/types' import { TWebMetadata } from '@/types'
import DataLoader from 'dataloader' import DataLoader from 'dataloader'
@ -20,11 +21,9 @@ const HTML_FETCH_HEADERS = {
} }
async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise<string | null> { async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise<string | null> {
const controller = new AbortController()
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
try { try {
const res = await fetch(fetchUrl, { const res = await fetchWithTimeout(fetchUrl, {
signal: controller.signal, timeoutMs,
mode: 'cors', mode: 'cors',
credentials: 'omit', credentials: 'omit',
headers: HTML_FETCH_HEADERS headers: HTML_FETCH_HEADERS
@ -36,8 +35,6 @@ async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise<string
return html return html
} catch { } catch {
return null return null
} finally {
clearTimeout(timeoutId)
} }
} }

Loading…
Cancel
Save