diff --git a/src/App.tsx b/src/App.tsx index cd083170..62bb53ad 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -26,6 +26,7 @@ import { UserPreferencesProvider } from '@/providers/UserPreferencesProvider' import { UserTrustProvider } from '@/providers/UserTrustProvider' import { ZapProvider } from '@/providers/ZapProvider' import StartupSessionBanner from '@/components/StartupSessionBanner' +import VersionUpdateBanner from '@/components/VersionUpdateBanner' import { PageManager } from './PageManager' export default function App(): JSX.Element { @@ -37,6 +38,7 @@ export default function App(): JSX.Element {
+
diff --git a/src/components/VersionUpdateBanner/index.tsx b/src/components/VersionUpdateBanner/index.tsx index 4993c8b8..74c907f8 100644 --- a/src/components/VersionUpdateBanner/index.tsx +++ b/src/components/VersionUpdateBanner/index.tsx @@ -17,62 +17,93 @@ export default function VersionUpdateBanner() { return } - let registration: ServiceWorkerRegistration | null = null + /** + * Workbox is built with skipWaiting + clientsClaim, so `registration.waiting` is almost never + * set — the new worker activates immediately. The reliable signal is `controllerchange`. + * Skip the first such event when we started without a controller (first install for this origin). + */ + let ignoreNextControllerChange = !navigator.serviceWorker.controller + let cancelled = false + const cleanups: Array<() => void> = [] + + const runCleanup = () => { + for (let i = cleanups.length - 1; i >= 0; i--) { + try { + cleanups[i]?.() + } catch { + // ignore + } + } + cleanups.length = 0 + } - const checkForUpdates = async () => { + const onControllerChange = () => { + if (ignoreNextControllerChange) { + ignoreNextControllerChange = false + return + } + if (navigator.serviceWorker.controller) { + setUpdateAvailable(true) + } + } + + ;(async () => { try { - registration = await navigator.serviceWorker.ready - if (!registration) return + const registration = await navigator.serviceWorker.ready + if (cancelled || !registration) return + + navigator.serviceWorker.addEventListener('controllerchange', onControllerChange) + cleanups.push(() => navigator.serviceWorker.removeEventListener('controllerchange', onControllerChange)) - // Check if there's a waiting service worker (new version ready) if (registration.waiting) { - // There's already a new version waiting setUpdateAvailable(true) } - // Listen for updates + const installingListeners: Array<{ worker: ServiceWorker; fn: () => void }> = [] + const handleUpdateFound = () => { - const newWorker = registration?.installing + const newWorker = registration.installing if (!newWorker) return - const handleStateChange = () => { - if (newWorker.state === 'installed') { - // New version installed - if (navigator.serviceWorker.controller) { - // There's a new version ready (not the first install) - setUpdateAvailable(true) - } + const onState = () => { + if (newWorker.state === 'installed' && navigator.serviceWorker.controller) { + setUpdateAvailable(true) } } - - newWorker.addEventListener('statechange', handleStateChange) + // May already be `installed` before we attach (skipWaiting race) + onState() + newWorker.addEventListener('statechange', onState) + installingListeners.push({ worker: newWorker, fn: onState }) } registration.addEventListener('updatefound', handleUpdateFound) - - // Check for updates periodically - const checkInterval = setInterval(() => { - if (registration) { - registration.update() + cleanups.push(() => registration.removeEventListener('updatefound', handleUpdateFound)) + cleanups.push(() => { + for (const { worker, fn } of installingListeners) { + worker.removeEventListener('statechange', fn) } - }, 60000) // Check every minute + installingListeners.length = 0 + }) - // Initial update check - registration.update() - - return () => { - clearInterval(checkInterval) - if (registration) { - registration.removeEventListener('updatefound', handleUpdateFound as EventListener) - } + const checkUpdate = () => { + if (document.hidden) return + registration.update().catch(() => {}) } + const interval = window.setInterval(checkUpdate, 60_000) + cleanups.push(() => window.clearInterval(interval)) + document.addEventListener('visibilitychange', checkUpdate) + cleanups.push(() => document.removeEventListener('visibilitychange', checkUpdate)) + + checkUpdate() } catch (error) { - // In non-secure contexts or when no SW is registered, ready can reject with "The operation is insecure" logger.debug('Service worker update check skipped or failed', { error }) } - } + })() - checkForUpdates().catch(() => {}) + return () => { + cancelled = true + runCleanup() + } }, []) const handleUpdate = () => { @@ -83,7 +114,7 @@ export default function VersionUpdateBanner() { const handleDismiss = () => { setIsDismissed(true) - // Store dismissal in localStorage to avoid showing it again this session + // Store dismissal in sessionStorage to avoid showing it again this session sessionStorage.setItem('versionUpdateDismissed', 'true') } @@ -145,4 +176,3 @@ export default function VersionUpdateBanner() {
) } - diff --git a/src/lib/fetch-with-timeout.ts b/src/lib/fetch-with-timeout.ts new file mode 100644 index 00000000..43680d5c --- /dev/null +++ b/src/lib/fetch-with-timeout.ts @@ -0,0 +1,46 @@ +/** Default cap for HTTP fetches so tabs cannot hang indefinitely on bad networks or servers. */ +export const DEFAULT_FETCH_TIMEOUT_MS = 30_000 + +/** + * `fetch` with a wall-clock timeout. Honors an optional caller `signal` (abort propagates both ways). + */ +export async function fetchWithTimeout( + input: RequestInfo | URL, + init: RequestInit & { timeoutMs?: number } = {} +): Promise { + const { timeoutMs = DEFAULT_FETCH_TIMEOUT_MS, signal: userSignal, ...rest } = init + const controller = new AbortController() + + let timeoutId: ReturnType | null = setTimeout(() => { + timeoutId = null + controller.abort() + }, timeoutMs) + + const onUserAbort = () => { + if (timeoutId !== null) { + clearTimeout(timeoutId) + timeoutId = null + } + controller.abort() + } + + if (userSignal) { + if (userSignal.aborted) { + if (timeoutId !== null) { + clearTimeout(timeoutId) + timeoutId = null + } + throw new DOMException('The operation was aborted.', 'AbortError') + } + userSignal.addEventListener('abort', onUserAbort, { once: true }) + } + + try { + return await fetch(input, { ...rest, signal: controller.signal }) + } finally { + if (timeoutId !== null) { + clearTimeout(timeoutId) + } + userSignal?.removeEventListener('abort', onUserAbort) + } +} diff --git a/src/lib/index-relay-http.ts b/src/lib/index-relay-http.ts index 0ee51237..deccdd84 100644 --- a/src/lib/index-relay-http.ts +++ b/src/lib/index-relay-http.ts @@ -6,6 +6,7 @@ * the Vite same-origin proxy `/dev-index-relay` → `VITE_DEV_INDEX_RELAY_TARGET` (default in `vite.config.ts`). * Production and remote HTTPS relays are unchanged; those need CORS on the relay or a real reverse proxy. */ +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import logger from '@/lib/logger' import { normalizeHttpRelayUrl } from '@/lib/url' import type { Filter, Event as NEvent } from 'nostr-tools' @@ -170,14 +171,15 @@ export async function queryIndexRelay( for (const f of filters) { const body = nostrFilterToIndexRelayBody(filterForIndexRelay(f)) try { - const res = await fetch(endpoint, { + const res = await fetchWithTimeout(endpoint, { method: 'POST', headers: { Accept: 'application/json', 'Content-Type': 'application/json' }, body: JSON.stringify(body), - signal: options?.signal + signal: options?.signal, + timeoutMs: 25_000 }) if (!res.ok) { sawHardFailure = true @@ -231,7 +233,7 @@ export async function publishEventToIndexRelay( const base = devProxyLoopbackIndexRelayBase(normalizeHttpRelayUrl(baseUrl) || baseUrl) const endpoint = indexRelayPublishUrl(base) try { - const res = await fetch(endpoint, { + const res = await fetchWithTimeout(endpoint, { method: 'POST', headers: { Accept: 'application/json', @@ -248,7 +250,8 @@ export async function publishEventToIndexRelay( sig: event.sig } }), - signal: options?.signal + signal: options?.signal, + timeoutMs: 25_000 }) if (!res.ok) { if (isDevViteIndexRelayProxyPath(endpoint) && res.status === 500) { diff --git a/src/lib/nip05.ts b/src/lib/nip05.ts index 5b906e29..d428fe60 100644 --- a/src/lib/nip05.ts +++ b/src/lib/nip05.ts @@ -1,6 +1,7 @@ import { LRUCache } from 'lru-cache' import { buildViteProxySitesFetchUrl } from '@/lib/vite-proxy-url' import { isValidPubkey } from './pubkey' +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import logger from '@/lib/logger' type TVerifyNip05Result = { @@ -78,9 +79,10 @@ async function fetchWellKnownNostrJson(domain: string, name?: string): Promise { try { - const r = await fetch('/config.json') + const r = await fetchWithTimeout('/config.json', { timeoutMs: 10_000 }) if (r.ok) { window.__RUNTIME_CONFIG__ = (await r.json()) as { NIP66_MONITOR_NPUB?: string diff --git a/src/pages/primary/ExplorePage/index.tsx b/src/pages/primary/ExplorePage/index.tsx index 4141a250..c7701953 100644 --- a/src/pages/primary/ExplorePage/index.tsx +++ b/src/pages/primary/ExplorePage/index.tsx @@ -3,7 +3,6 @@ import ExploreFavoriteRelays from '@/components/Explore/ExploreFavoriteRelays' import ExploreRelayReviews from '@/components/Explore/ExploreRelayReviews' import FollowingFavoriteRelayList from '@/components/FollowingFavoriteRelayList' import Tabs from '@/components/Tabs' -import VersionUpdateBanner from '@/components/VersionUpdateBanner' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { toRelay } from '@/lib/link' @@ -138,9 +137,6 @@ const ExplorePage = forwardRef((_, ref) => { displayScrollToTopButton >
-
- -
{tab === 'explore' && (
diff --git a/src/pages/primary/NoteListPage/index.tsx b/src/pages/primary/NoteListPage/index.tsx index 8f42459e..54163fd9 100644 --- a/src/pages/primary/NoteListPage/index.tsx +++ b/src/pages/primary/NoteListPage/index.tsx @@ -1,7 +1,6 @@ import BookmarkList from '@/components/BookmarkList' import RelayInfo from '@/components/RelayInfo' import { RefreshButton } from '@/components/RefreshButton' -import VersionUpdateBanner from '@/components/VersionUpdateBanner' import { Button } from '@/components/ui/button' import PrimaryPageLayout from '@/layouts/PrimaryPageLayout' import { useCurrentRelays } from '@/providers/CurrentRelaysProvider' @@ -197,7 +196,6 @@ const NoteListPage = forwardRef((_, ref) => { displayScrollToTopButton >
- {content}
diff --git a/src/services/client.service.ts b/src/services/client.service.ts index 6b739df0..506b9bbb 100644 --- a/src/services/client.service.ts +++ b/src/services/client.service.ts @@ -2115,23 +2115,30 @@ class ClientService extends EventTarget { } } - void (async () => { - try { - const st = await indexedDb.getTimelinePersistedState(key) - if (!st?.refs?.length) return - const list = await indexedDb.getArchivedEventsByIds(st.refs.map((r) => r[0])) - if (list.length === 0) return + try { + const st = await indexedDb.getTimelinePersistedState(key) + if (st?.refs?.length) { + const hexIds = st.refs.map((r) => r[0]) + const list = await indexedDb.getArchivedEventsByIds(hexIds) for (const ev of list) { if (shouldDropEventOnIngest(ev)) continue if (eventIds.has(ev.id)) continue eventIds.add(ev.id) events.push(ev) } + for (const refId of hexIds) { + if (eventIds.has(refId)) continue + const sess = that.eventService.peekSessionCachedEvent(refId) + if (sess && !shouldDropEventOnIngest(sess)) { + eventIds.add(refId) + events.push(sess) + } + } flushStreamingSnapshot() - } catch (err) { - logger.warn('[ClientService] Timeline disk hydrate failed', err) } - })() + } catch (err) { + logger.warn('[ClientService] Timeline disk hydrate failed', err) + } const handleTimelineEose = (eosed: boolean) => { if (!eosed) return diff --git a/src/services/discussion-feed-cache.service.ts b/src/services/discussion-feed-cache.service.ts index 1889c5f6..75ad0a44 100644 --- a/src/services/discussion-feed-cache.service.ts +++ b/src/services/discussion-feed-cache.service.ts @@ -34,6 +34,10 @@ class DiscussionFeedCacheService { private discussionsListCache: CachedDiscussionsListData | null = null private readonly CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes private readonly DISCUSSIONS_LIST_CACHE_TTL_MS = 2 * 60 * 1000 // 2 minutes for discussions list + /** Cap in-memory thread caches so long sessions do not retain unbounded reply payloads. */ + private readonly MAX_THREAD_CACHE_KEYS = 100 + /** Cap merged discussions list `eventMap` so unbounded merges cannot grow RAM without limit. */ + private readonly MAX_DISCUSSIONS_LIST_THREADS = 400 static getInstance(): DiscussionFeedCacheService { if (!DiscussionFeedCacheService.instance) { @@ -153,12 +157,64 @@ class DiscussionFeedCacheService { this.cache.set(cacheKey, cachedData) + this.trimThreadCacheIfNeeded() + // Clean up stale entries periodically (every 10th set operation) if (this.cache.size > 50 && Math.random() < 0.1) { this.cleanupStaleEntries() } } + /** Drop oldest threads by {@link CachedThreadData.timestamp} when over {@link MAX_THREAD_CACHE_KEYS}. */ + private trimThreadCacheIfNeeded(): void { + if (this.cache.size <= this.MAX_THREAD_CACHE_KEYS) return + const entries = [...this.cache.entries()].sort((a, b) => a[1].timestamp - b[1].timestamp) + const overflow = this.cache.size - this.MAX_THREAD_CACHE_KEYS + for (let i = 0; i < overflow; i++) { + const key = entries[i]?.[0] + if (key) this.cache.delete(key) + } + } + + /** Best-effort recency for discussion thread rows (unknown shapes → 0). */ + private discussionsEntryRecency(entry: unknown): number { + if (!entry || typeof entry !== 'object') return 0 + const o = entry as Record + for (const k of ['lastReplyAt', 'lastActivityAt', 'updatedAt', 'fetchedAt']) { + const v = o[k] + if (typeof v === 'number' && v > 0) return v + } + const root = o.rootEvent ?? o.event ?? o.threadRoot + if (root && typeof root === 'object' && 'created_at' in root) { + const ca = (root as { created_at?: unknown }).created_at + if (typeof ca === 'number') return ca + } + return 0 + } + + /** + * When over {@link MAX_DISCUSSIONS_LIST_THREADS}, keep rows from the latest fetch first, then + * next-most-recent by {@link discussionsEntryRecency}. + */ + private trimDiscussionsEventMap( + map: Map, + prioritizeIds: ReadonlySet + ): Map { + if (map.size <= this.MAX_DISCUSSIONS_LIST_THREADS) return map + const entries = [...map.entries()].sort((a, b) => { + const pa = prioritizeIds.has(a[0]) ? 1 : 0 + const pb = prioritizeIds.has(b[0]) ? 1 : 0 + if (pa !== pb) return pb - pa + return this.discussionsEntryRecency(b[1]) - this.discussionsEntryRecency(a[1]) + }) + const next = new Map() + for (let i = 0; i < this.MAX_DISCUSSIONS_LIST_THREADS && i < entries.length; i++) { + const row = entries[i] + if (row) next.set(row[0], row[1]) + } + return next + } + /** * Clear cache for a specific thread */ @@ -248,6 +304,7 @@ class DiscussionFeedCacheService { * When merge=true, ALWAYS preserves all existing threads and adds new ones */ setCachedDiscussionsList(eventMap: Map, dynamicTopics: { mainTopics: any[]; subtopics: any[]; allTopics: any[] }, merge = true): void { + const newIds = new Set(eventMap.keys()) let mergedEventMap: Map const existingCacheSize = this.discussionsListCache?.eventMap.size || 0 const newDataSize = eventMap.size @@ -279,6 +336,8 @@ class DiscussionFeedCacheService { mergedEventMap = new Map(eventMap) logger.debug('[DiscussionFeedCache] Cached new discussions list (no merge):', eventMap.size, 'threads') } + + mergedEventMap = this.trimDiscussionsEventMap(mergedEventMap, newIds) as Map // Store merged event map this.discussionsListCache = { diff --git a/src/services/indexed-db.service.ts b/src/services/indexed-db.service.ts index 6b7cd1ed..1a69a47c 100644 --- a/src/services/indexed-db.service.ts +++ b/src/services/indexed-db.service.ts @@ -90,7 +90,7 @@ export const StoreNames = { } /** Schema version we expect. When adding stores or migrations, bump this. */ -const DB_VERSION = 33 +const DB_VERSION = 34 /** Max age for profile and payment info cache before we refetch (5 min). */ const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000 @@ -132,6 +132,13 @@ class IndexedDbService { private db: IDBDatabase | null = null private initPromise: Promise | null = null + /** Browser timer id (DOM `setTimeout` returns a number). */ + private cleanupTimer: number | null = null + + /** First TTL sweep after DB open (profile / relay list rows). */ + private static readonly CLEANUP_INITIAL_DELAY_MS = 60 * 1000 + /** Repeat TTL sweeps on this interval so pruning is not a one-shot. */ + private static readonly CLEANUP_INTERVAL_MS = 60 * 60 * 1000 init(): Promise { if (!this.initPromise) { @@ -171,7 +178,7 @@ class IndexedDbService { } openWithStored.onsuccess = () => { this.db = openWithStored.result - setTimeout(() => this.cleanUp(), 1000 * 60) + this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INITIAL_DELAY_MS) resolve() } openWithStored.onupgradeneeded = () => { @@ -187,7 +194,7 @@ class IndexedDbService { request.onsuccess = () => { this.db = request.result - setTimeout(() => this.cleanUp(), 1000 * 60) + this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INITIAL_DELAY_MS) resolve() } @@ -301,6 +308,9 @@ class IndexedDbService { if (!db.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) { db.createObjectStore(StoreNames.PIPER_TTS_CACHE, { keyPath: 'key' }) } + if (event.oldVersion < 34) { + // v34: app-side changes (fetch timeouts, timeline hydrate order, discussion list cap) + } ensureMissingObjectStores(db) } } @@ -1288,7 +1298,7 @@ class IndexedDbService { const allStoreNames = Array.from(this.db.objectStoreNames) const transaction = this.db.transaction(allStoreNames, 'readwrite') - await Promise.allSettled( + const clearResults = await Promise.allSettled( allStoreNames.map(storeName => { return new Promise((resolve, reject) => { const store = transaction.objectStore(storeName) @@ -1298,6 +1308,15 @@ class IndexedDbService { }) }) ) + for (let i = 0; i < clearResults.length; i++) { + const r = clearResults[i] + if (r?.status === 'rejected') { + logger.warn('[IndexedDB] clearAllCache failed for store', { + store: allStoreNames[i], + error: r.reason + }) + } + } } async getStoreInfo(): Promise> { @@ -1306,25 +1325,30 @@ class IndexedDbService { return {} } - const storeInfo: Record = {} const allStoreNames = Array.from(this.db.objectStoreNames) - - await Promise.allSettled( - allStoreNames.map(storeName => { - return new Promise((resolve, reject) => { - const transaction = this.db!.transaction(storeName, 'readonly') - const store = transaction.objectStore(storeName) - const request = store.count() - request.onsuccess = () => { - storeInfo[storeName] = request.result - resolve() - } - request.onerror = (event) => reject(idbEventToError(event)) - }) - }) - ) + if (allStoreNames.length === 0) { + return {} + } - return storeInfo + return new Promise((resolve, reject) => { + const storeInfo: Record = {} + const tx = this.db!.transaction(allStoreNames, 'readonly') + let pending = allStoreNames.length + + for (const storeName of allStoreNames) { + const req = tx.objectStore(storeName).count() + req.onsuccess = () => { + storeInfo[storeName] = req.result + pending-- + if (pending === 0) { + resolve(storeInfo) + } + } + req.onerror = (ev) => { + reject(idbEventToError(ev)) + } + } + }) } async getStoreItems(storeName: string): Promise[]> { @@ -1614,12 +1638,26 @@ class IndexedDbService { }) } + private scheduleNextCleanUp(delayMs: number): void { + if (typeof window === 'undefined') return + if (this.cleanupTimer !== null) { + clearTimeout(this.cleanupTimer) + this.cleanupTimer = null + } + if (!this.db) return + this.cleanupTimer = window.setTimeout(() => { + this.cleanupTimer = null + void this.cleanUp() + }, delayMs) + } + private async cleanUp() { await this.initPromise if (!this.db) { return } + try { const stores = [ { name: StoreNames.PROFILE_EVENTS, expirationTimestamp: Date.now() - 1000 * 60 * 60 * 24 }, // 1 day { name: StoreNames.PAYMENT_INFO_EVENTS, expirationTimestamp: Date.now() - PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS }, // 5 min @@ -1650,7 +1688,7 @@ class IndexedDbService { existingStores.map((store) => store.name), 'readwrite' ) - await Promise.allSettled( + const sweepResults = await Promise.allSettled( existingStores.map(({ name, expirationTimestamp }) => { if (expirationTimestamp < 0) { return Promise.resolve() @@ -1677,6 +1715,22 @@ class IndexedDbService { }) }) ) + for (let i = 0; i < sweepResults.length; i++) { + const r = sweepResults[i] + if (r?.status === 'rejected') { + logger.warn('[IndexedDB] cleanUp store sweep failed', { + store: existingStores[i]?.name, + error: r.reason + }) + } + } + } catch (error) { + logger.warn('[IndexedDB] cleanUp failed', { error }) + } finally { + if (this.db) { + this.scheduleNextCleanUp(IndexedDbService.CLEANUP_INTERVAL_MS) + } + } } /** diff --git a/src/services/lightning.service.ts b/src/services/lightning.service.ts index f9c9f747..6022589a 100644 --- a/src/services/lightning.service.ts +++ b/src/services/lightning.service.ts @@ -18,6 +18,7 @@ import { utf8Decoder } from 'nostr-tools/utils' import client from './client.service' import { queryService, replaceableEventService } from './client.service' import { getProfileFromEvent } from '@/lib/event-metadata' +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import logger from '@/lib/logger' export type TRecentSupporter = { pubkey: string; amount: number; comment?: string } @@ -82,8 +83,9 @@ class LightningService { comment }) const zapRequest = await client.signer.signEvent(zapRequestDraft) - const zapRequestRes = await fetch( - `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}` + const zapRequestRes = await fetchWithTimeout( + `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}`, + { timeoutMs: 25_000 } ) const zapRequestResBody = await zapRequestRes.json() if (zapRequestResBody.error) { @@ -205,8 +207,9 @@ class LightningService { comment }) const zapRequest = await client.signer.signEvent(zapRequestDraft) - const zapRequestRes = await fetch( - `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}` + const zapRequestRes = await fetchWithTimeout( + `${callback}?amount=${amount}&nostr=${encodeURI(JSON.stringify(zapRequest))}&lnurl=${lnurl}`, + { timeoutMs: 25_000 } ) const zapRequestResBody = await zapRequestRes.json() if (zapRequestResBody.error) { @@ -354,7 +357,7 @@ class LightningService { lnurl = utf8Decoder.decode(data) } - const res = await fetch(lnurl) + const res = await fetchWithTimeout(lnurl, { timeoutMs: 15_000 }) const body = await res.json() if (body.allowsNostr && body.nostrPubkey) { diff --git a/src/services/media-upload.service.ts b/src/services/media-upload.service.ts index e39442ac..f520adfb 100644 --- a/src/services/media-upload.service.ts +++ b/src/services/media-upload.service.ts @@ -1,3 +1,4 @@ +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import { simplifyUrl } from '@/lib/url' import { TDraftEvent, TMediaUploadServiceConfig } from '@/types' import { BlossomClient } from 'blossom-client-sdk' @@ -122,7 +123,10 @@ class MediaUploadService { } let uploadUrl = this.nip96ServiceUploadUrlMap.get(service) if (!uploadUrl) { - const response = await fetch(`${service}/.well-known/nostr/nip96.json`) + const response = await fetchWithTimeout(`${service}/.well-known/nostr/nip96.json`, { + signal: options?.signal, + timeoutMs: 15_000 + }) if (!response.ok) { throw new Error( `${simplifyUrl(service)} does not work, please try another service in your settings` diff --git a/src/services/relay-info.service.ts b/src/services/relay-info.service.ts index 209d4c46..6d794b2b 100644 --- a/src/services/relay-info.service.ts +++ b/src/services/relay-info.service.ts @@ -3,6 +3,7 @@ import indexDb from '@/services/indexed-db.service' import { TAwesomeRelayCollection, TRelayInfo } from '@/types' import DataLoader from 'dataloader' import FlexSearch from 'flexsearch' +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import logger from '@/lib/logger' class RelayInfoService { @@ -97,8 +98,9 @@ class RelayInfoService { this.awesomeRelayCollections = (async () => { try { - const res = await fetch( - 'https://raw.githubusercontent.com/CodyTseng/awesome-nostr-relays/master/dist/collections.json' + const res = await fetchWithTimeout( + 'https://raw.githubusercontent.com/CodyTseng/awesome-nostr-relays/master/dist/collections.json', + { timeoutMs: 20_000 } ) if (!res.ok) { throw new Error('Failed to fetch awesome relay collections') @@ -146,8 +148,9 @@ class RelayInfoService { private async fetchRelayNip11(url: string) { try { logger.debug('Fetching NIP-11 metadata', { url }) - const res = await fetch(url.replace('ws://', 'http://').replace('wss://', 'https://'), { - headers: { Accept: 'application/nostr+json' } + const res = await fetchWithTimeout(url.replace('ws://', 'http://').replace('wss://', 'https://'), { + headers: { Accept: 'application/nostr+json' }, + timeoutMs: 12_000 }) return res.json() as Omit } catch { diff --git a/src/services/rss-feed.service.ts b/src/services/rss-feed.service.ts index 61a0d082..1651c92c 100644 --- a/src/services/rss-feed.service.ts +++ b/src/services/rss-feed.service.ts @@ -1,4 +1,5 @@ import { DEFAULT_RSS_FEEDS } from '@/constants' +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import { canonicalizeRssArticleUrl } from '@/lib/rss-article' import { cleanUrl } from '@/lib/url' import logger from '@/lib/logger' @@ -317,64 +318,38 @@ class RssFeedService { */ private async fetchWithStrategy(originalUrl: string, strategy: { name: string; getUrl: (url: string) => string }, externalSignal?: AbortSignal): Promise { const fetchUrl = strategy.getUrl(originalUrl) - - // Check if external signal is already aborted + if (externalSignal?.aborted) { throw new DOMException('The operation was aborted.', 'AbortError') } - const controller = new AbortController() - // Use a longer timeout for RSS feeds (30 seconds) since they can be slow - // Don't abort on timeout - just log a warning, let the fetch continue - const timeoutId = setTimeout(() => { - logger.warn('[RssFeedService] Fetch taking longer than expected', { - url: originalUrl, - strategy: strategy.name, - elapsed: '30s' - }) - // Don't abort - just log. The fetch will continue or fail naturally - }, 30000) // 30 second warning (but don't abort) - - // If external signal is provided, abort our controller when external signal aborts - if (externalSignal) { - externalSignal.addEventListener('abort', () => { - clearTimeout(timeoutId) - controller.abort() - }, { once: true }) - } - try { - const res = await fetch(fetchUrl, { - signal: controller.signal, + const res = await fetchWithTimeout(fetchUrl, { + signal: externalSignal, + timeoutMs: 60_000, mode: 'cors', credentials: 'omit', headers: { - 'Accept': 'application/rss+xml, application/xml, application/atom+xml, text/xml, */*' + Accept: 'application/rss+xml, application/xml, application/atom+xml, text/xml, */*' } }) - clearTimeout(timeoutId) - if (!res.ok) { throw new Error(`HTTP ${res.status}: ${res.statusText}`) } const xmlText = await res.text() - - // Validate that we got XML content + if (!xmlText || xmlText.trim().length === 0) { throw new Error('Empty response') } - // Basic validation - check if it looks like XML if (!xmlText.trim().startsWith('<')) { throw new Error('Response does not appear to be XML') } return xmlText } catch (error) { - clearTimeout(timeoutId) - // Re-throw abort errors as-is if (error instanceof DOMException && error.name === 'AbortError') { throw error } diff --git a/src/services/web.service.ts b/src/services/web.service.ts index 6e58247a..f7ac3f33 100644 --- a/src/services/web.service.ts +++ b/src/services/web.service.ts @@ -1,3 +1,4 @@ +import { fetchWithTimeout } from '@/lib/fetch-with-timeout' import { buildViteProxySitesFetchUrl, urlLooksLikeViteProxyRequest } from '@/lib/vite-proxy-url' import { TWebMetadata } from '@/types' import DataLoader from 'dataloader' @@ -20,11 +21,9 @@ const HTML_FETCH_HEADERS = { } async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise { - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), timeoutMs) try { - const res = await fetch(fetchUrl, { - signal: controller.signal, + const res = await fetchWithTimeout(fetchUrl, { + timeoutMs, mode: 'cors', credentials: 'omit', headers: HTML_FETCH_HEADERS @@ -36,8 +35,6 @@ async function tryFetchHtml(fetchUrl: string, timeoutMs: number): Promise