Browse Source

make electron app session storage persistent

imwald
Silberengel 1 month ago
parent
commit
daf9336772
  1. 4
      package-lock.json
  2. 2
      package.json
  3. 10
      src/components/CacheRelaysSetting/index.tsx
  4. 167
      src/components/EventArchiveCacheSettings/index.tsx
  5. 8
      src/constants.ts
  6. 19
      src/i18n/locales/en.ts
  7. 15
      src/lib/client-platform.ts
  8. 83
      src/lib/event-archive-config.ts
  9. 31
      src/lib/piper-tts-cache-policy.ts
  10. 35
      src/lib/read-aloud.ts
  11. 2
      src/pages/secondary/CacheSettingsPage/index.tsx
  12. 43
      src/services/client-events.service.ts
  13. 55
      src/services/client.service.ts
  14. 131
      src/services/event-archive.service.ts
  15. 423
      src/services/indexed-db.service.ts

4
package-lock.json generated

@ -1,12 +1,12 @@ @@ -1,12 +1,12 @@
{
"name": "jumble-imwald",
"version": "21.1.2",
"version": "21.2.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "jumble-imwald",
"version": "21.1.2",
"version": "21.2.0",
"license": "MIT",
"dependencies": {
"@asciidoctor/core": "^3.0.4",

2
package.json

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
{
"name": "jumble-imwald",
"version": "21.1.2",
"version": "21.2.0",
"description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble",
"private": true,
"type": "module",

10
src/components/CacheRelaysSetting/index.tsx

@ -33,7 +33,7 @@ import { CloudUpload, Trash2, RefreshCw, Database, WrapText, Search, X, Triangle @@ -33,7 +33,7 @@ import { CloudUpload, Trash2, RefreshCw, Database, WrapText, Search, X, Triangle
import { Input } from '@/components/ui/input'
import { Skeleton } from '@/components/ui/skeleton'
import client from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import indexedDb, { StoreNames } from '@/services/indexed-db.service'
import postEditorCache from '@/services/post-editor-cache.service'
import { StorageKey } from '@/constants'
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogDescription } from '@/components/ui/dialog'
@ -216,8 +216,9 @@ export default function CacheRelaysSetting() { @@ -216,8 +216,9 @@ export default function CacheRelaysSetting() {
}
try {
// Clear IndexedDB
// Clear IndexedDB (all stores, including Piper read-aloud blobs)
await indexedDb.clearAllCache()
await indexedDb.clearPiperTtsCache()
// Clear localStorage (but keep essential settings like theme, accounts, etc.)
// We'll only clear Jumble-specific cache keys, not all localStorage
@ -726,6 +727,11 @@ export default function CacheRelaysSetting() { @@ -726,6 +727,11 @@ export default function CacheRelaysSetting() {
// If neither exists, it's invalid
return true
}
if (storeName === StoreNames.PIPER_TTS_CACHE) {
const v = item.value as { blob?: unknown; mimeType?: string } | null
return !(v && typeof v.mimeType === 'string' && v.blob instanceof Blob)
}
// For other stores, check if value exists
if (!item.value) return true

167
src/components/EventArchiveCacheSettings/index.tsx

@ -0,0 +1,167 @@ @@ -0,0 +1,167 @@
import { Button } from '@/components/ui/button'
import { Label } from '@/components/ui/label'
import { Switch } from '@/components/ui/switch'
import { Input } from '@/components/ui/input'
import { StorageKey } from '@/constants'
import {
EVENT_ARCHIVE_DEFAULTS,
getEventArchiveConfig
} from '@/lib/event-archive-config'
import { isJumbleElectron, isMobileBrowserProfile } from '@/lib/client-platform'
import client from '@/services/client.service'
import { invalidateArchiveFootprintCache } from '@/services/event-archive.service'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { toast } from 'sonner'
function platformLabel(): string {
if (isJumbleElectron()) return 'desktop-app'
if (isMobileBrowserProfile()) return 'mobile-web'
return 'desktop-web'
}
export default function EventArchiveCacheSettings() {
const { t } = useTranslation()
const [enabled, setEnabled] = useState(true)
const [maxMb, setMaxMb] = useState('')
const [maxEvents, setMaxEvents] = useState('')
const [sessionLru, setSessionLru] = useState('')
const defaultsHint = useMemo(() => {
const p = platformLabel()
if (p === 'mobile-web') {
return t('eventArchive.defaultsMobile', {
lru: EVENT_ARCHIVE_DEFAULTS.sessionLruMobile,
mb: EVENT_ARCHIVE_DEFAULTS.maxMbMobile,
ev: EVENT_ARCHIVE_DEFAULTS.maxEventsMobile
})
}
if (p === 'desktop-app') {
return t('eventArchive.defaultsElectron', {
lru: EVENT_ARCHIVE_DEFAULTS.sessionLruElectron,
mb: EVENT_ARCHIVE_DEFAULTS.maxMbElectron,
ev: EVENT_ARCHIVE_DEFAULTS.maxEventsElectron
})
}
return t('eventArchive.defaultsDesktopWeb', {
lru: EVENT_ARCHIVE_DEFAULTS.sessionLruDesktopBrowser,
mb: EVENT_ARCHIVE_DEFAULTS.maxMbDesktopBrowser,
ev: EVENT_ARCHIVE_DEFAULTS.maxEventsDesktopBrowser
})
}, [t])
useEffect(() => {
setEnabled(window.localStorage.getItem(StorageKey.EVENT_ARCHIVE_ENABLED) !== 'false')
setMaxMb(window.localStorage.getItem(StorageKey.EVENT_ARCHIVE_MAX_MB) ?? '')
setMaxEvents(window.localStorage.getItem(StorageKey.EVENT_ARCHIVE_MAX_EVENTS) ?? '')
setSessionLru(window.localStorage.getItem(StorageKey.SESSION_EVENT_LRU_MAX) ?? '')
}, [])
const apply = useCallback(() => {
window.localStorage.setItem(StorageKey.EVENT_ARCHIVE_ENABLED, enabled ? 'true' : 'false')
const mb = maxMb.trim()
if (mb) window.localStorage.setItem(StorageKey.EVENT_ARCHIVE_MAX_MB, mb)
else window.localStorage.removeItem(StorageKey.EVENT_ARCHIVE_MAX_MB)
const ev = maxEvents.trim()
if (ev) window.localStorage.setItem(StorageKey.EVENT_ARCHIVE_MAX_EVENTS, ev)
else window.localStorage.removeItem(StorageKey.EVENT_ARCHIVE_MAX_EVENTS)
const lru = sessionLru.trim()
if (lru) window.localStorage.setItem(StorageKey.SESSION_EVENT_LRU_MAX, lru)
else window.localStorage.removeItem(StorageKey.SESSION_EVENT_LRU_MAX)
client.reapplySessionLruFromSettings()
invalidateArchiveFootprintCache()
toast.success(t('eventArchive.appliedToast'))
}, [enabled, maxMb, maxEvents, sessionLru, t])
const effective = getEventArchiveConfig()
return (
<div className="mt-8 space-y-4 border-t border-border pt-6">
<h3 className="text-base font-medium">{t('eventArchive.sectionTitle')}</h3>
<p className="text-muted-foreground text-sm">{t('eventArchive.sectionBlurb')}</p>
<p className="text-muted-foreground text-xs">{defaultsHint}</p>
<div className="flex items-center justify-between gap-3">
<Label htmlFor="event-archive-enabled" className="text-sm font-normal">
{t('eventArchive.enablePersist')}
</Label>
<Switch
id="event-archive-enabled"
checked={enabled}
onCheckedChange={(v) => setEnabled(Boolean(v))}
/>
</div>
<div className="grid gap-3 sm:grid-cols-2">
<div className="space-y-1">
<Label htmlFor="archive-max-mb" className="text-sm font-normal">
{t('eventArchive.maxMb')}
</Label>
<Input
id="archive-max-mb"
inputMode="numeric"
placeholder={String(
isJumbleElectron()
? EVENT_ARCHIVE_DEFAULTS.maxMbElectron
: isMobileBrowserProfile()
? EVENT_ARCHIVE_DEFAULTS.maxMbMobile
: EVENT_ARCHIVE_DEFAULTS.maxMbDesktopBrowser
)}
value={maxMb}
onChange={(e) => setMaxMb(e.target.value)}
/>
</div>
<div className="space-y-1">
<Label htmlFor="archive-max-events" className="text-sm font-normal">
{t('eventArchive.maxEvents')}
</Label>
<Input
id="archive-max-events"
inputMode="numeric"
placeholder={String(
isJumbleElectron()
? EVENT_ARCHIVE_DEFAULTS.maxEventsElectron
: isMobileBrowserProfile()
? EVENT_ARCHIVE_DEFAULTS.maxEventsMobile
: EVENT_ARCHIVE_DEFAULTS.maxEventsDesktopBrowser
)}
value={maxEvents}
onChange={(e) => setMaxEvents(e.target.value)}
/>
</div>
</div>
<div className="space-y-1">
<Label htmlFor="session-lru" className="text-sm font-normal">
{t('eventArchive.sessionLru')}
</Label>
<Input
id="session-lru"
inputMode="numeric"
placeholder={String(
isJumbleElectron()
? EVENT_ARCHIVE_DEFAULTS.sessionLruElectron
: isMobileBrowserProfile()
? EVENT_ARCHIVE_DEFAULTS.sessionLruMobile
: EVENT_ARCHIVE_DEFAULTS.sessionLruDesktopBrowser
)}
value={sessionLru}
onChange={(e) => setSessionLru(e.target.value)}
/>
</div>
<p className="text-muted-foreground text-xs">
{t('eventArchive.effectiveSummary', {
enabled: effective.enabled ? t('eventArchive.on') : t('eventArchive.off'),
mb: Math.round(effective.maxBytes / (1024 * 1024)),
events: effective.maxEvents,
lru: effective.sessionLruMax
})}
</p>
<Button type="button" variant="secondary" onClick={apply}>
{t('eventArchive.apply')}
</Button>
</div>
)
}

8
src/constants.ts

@ -162,6 +162,14 @@ export const StorageKey = { @@ -162,6 +162,14 @@ export const StorageKey = {
SHOW_PUBLISH_SUCCESS_TOASTS: 'showPublishSuccessToasts',
/** When not `'false'`, show NIP-53 live activity banner (default on). */
SHOW_LIVE_ACTIVITIES_BANNER: 'showLiveActivitiesBanner',
/** Persist timeline notes/reactions to IndexedDB (platform defaults; disable for relay-only). */
EVENT_ARCHIVE_ENABLED: 'eventArchiveEnabled',
/** Max approximate archive size (MB). `0` in UI means “use platform default”. */
EVENT_ARCHIVE_MAX_MB: 'eventArchiveMaxMb',
/** Max rows in event archive. `0` means use platform default. */
EVENT_ARCHIVE_MAX_EVENTS: 'eventArchiveMaxEvents',
/** In-memory session LRU max (events). Platform default if unset. */
SESSION_EVENT_LRU_MAX: 'sessionEventLruMax',
/** Temporary draft cache: new notes and replies. Persisted after 30s idle; restored on refresh; cleared on logout/switch. */
POST_EDITOR_DRAFT: 'postEditorDraft',
MEDIA_UPLOAD_SERVICE: 'mediaUploadService', // deprecated

19
src/i18n/locales/en.ts

@ -625,6 +625,25 @@ export default { @@ -625,6 +625,25 @@ export default {
successes: 'successes',
None: 'None',
'Cache & offline storage': 'Cache & offline storage',
'eventArchive.sectionTitle': 'Notes & feed archive',
'eventArchive.sectionBlurb':
'Keeps notes, reactions, and timeline order on disk so feeds can load offline or on slow links. Replaceable data (profiles, relay lists, publications) stays in its existing stores — this archive only fills gaps for “firehose” events. Turn off to rely on relays only.',
'eventArchive.defaultsMobile':
'This device profile uses small defaults: about {{lru}} events in memory, ~{{mb}} MB / {{ev}} archived events (reactions/zaps drop first).',
'eventArchive.defaultsElectron':
'Desktop app defaults: ~{{lru}} in-memory events, ~{{mb}} MB / {{ev}} archived events.',
'eventArchive.defaultsDesktopWeb':
'Desktop browser defaults: ~{{lru}} in-memory events, ~{{mb}} MB / {{ev}} archived events.',
'eventArchive.enablePersist': 'Persist feed events to disk',
'eventArchive.maxMb': 'Max archive size (MB), blank = default for this device',
'eventArchive.maxEvents': 'Max archived events, blank = default',
'eventArchive.sessionLru': 'In-memory session cache (event count), blank = default',
'eventArchive.effectiveSummary':
'Currently: {{enabled}} — ~{{mb}} MB budget, {{events}} events, {{lru}} session LRU.',
'eventArchive.on': 'on',
'eventArchive.off': 'off',
'eventArchive.apply': 'Apply cache settings',
'eventArchive.appliedToast': 'Cache settings saved. Session memory updated.',
'Paste or drop media files to upload': 'Paste or drop media files to upload',
Preview: 'Preview',
'You are about to publish an event signed by [{{eventAuthorName}}]. You are currently logged in as [{{currentUsername}}]. Are you sure?':

15
src/lib/client-platform.ts

@ -0,0 +1,15 @@ @@ -0,0 +1,15 @@
/** True when running inside the packaged Electron shell ({@link electron/preload.cjs}). */
export function isJumbleElectron(): boolean {
return typeof window !== 'undefined' && window.jumbleElectron?.isElectron === true
}
/**
* Coarse phone / mobile browser profile: touch-first or narrow viewport, excluding Electron.
* Used for smaller in-memory LRU and tighter disk archive defaults (not a substitute for real UA tests).
*/
export function isMobileBrowserProfile(): boolean {
if (typeof window === 'undefined' || isJumbleElectron()) return false
const narrow = window.matchMedia?.('(max-width: 768px)')?.matches ?? false
const coarse = window.matchMedia?.('(pointer: coarse)')?.matches ?? false
return narrow || (coarse && (window.innerWidth ?? 1024) <= 900)
}

83
src/lib/event-archive-config.ts

@ -0,0 +1,83 @@ @@ -0,0 +1,83 @@
import { StorageKey } from '@/constants'
import { isJumbleElectron, isMobileBrowserProfile } from '@/lib/client-platform'
/** Platform defaults (overridable in Cache settings). */
export const EVENT_ARCHIVE_DEFAULTS = {
sessionLruMobile: 100,
sessionLruDesktopBrowser: 2500,
sessionLruElectron: 5000,
maxMbMobile: 48,
maxMbElectron: 512,
maxMbDesktopBrowser: 2048,
maxEventsMobile: 500,
maxEventsElectron: 400_000,
maxEventsDesktopBrowser: 80_000
} as const
export type TEventArchiveConfig = {
enabled: boolean
/** Soft byte budget (approximate, from JSON size). */
maxBytes: number
maxEvents: number
sessionLruMax: number
}
function readBool(key: string, defaultTrue: boolean): boolean {
try {
const v = window.localStorage.getItem(key)
if (v === null) return defaultTrue
return v !== 'false' && v !== '0'
} catch {
return defaultTrue
}
}
function readPositiveInt(key: string, fallback: number): number {
try {
const v = window.localStorage.getItem(key)
if (v === null || v === '' || v === '0') return fallback
const n = Number.parseInt(v, 10)
return Number.isFinite(n) && n > 0 ? n : fallback
} catch {
return fallback
}
}
function defaultSessionLruMax(): number {
if (isJumbleElectron()) return EVENT_ARCHIVE_DEFAULTS.sessionLruElectron
if (isMobileBrowserProfile()) return EVENT_ARCHIVE_DEFAULTS.sessionLruMobile
return EVENT_ARCHIVE_DEFAULTS.sessionLruDesktopBrowser
}
function defaultMaxMb(): number {
if (isJumbleElectron()) return EVENT_ARCHIVE_DEFAULTS.maxMbElectron
if (isMobileBrowserProfile()) return EVENT_ARCHIVE_DEFAULTS.maxMbMobile
return EVENT_ARCHIVE_DEFAULTS.maxMbDesktopBrowser
}
function defaultMaxEvents(): number {
if (isJumbleElectron()) return EVENT_ARCHIVE_DEFAULTS.maxEventsElectron
if (isMobileBrowserProfile()) return EVENT_ARCHIVE_DEFAULTS.maxEventsMobile
return EVENT_ARCHIVE_DEFAULTS.maxEventsDesktopBrowser
}
/**
* Effective archive + session LRU limits (reads Cache settings from localStorage).
*/
export function getEventArchiveConfig(): TEventArchiveConfig {
const enabled = readBool(StorageKey.EVENT_ARCHIVE_ENABLED, true)
const maxMb = readPositiveInt(StorageKey.EVENT_ARCHIVE_MAX_MB, defaultMaxMb())
const maxEvents = readPositiveInt(StorageKey.EVENT_ARCHIVE_MAX_EVENTS, defaultMaxEvents())
const sessionLruMax = readPositiveInt(StorageKey.SESSION_EVENT_LRU_MAX, defaultSessionLruMax())
return {
enabled,
maxBytes: Math.max(8, maxMb) * 1024 * 1024,
maxEvents: Math.max(50, maxEvents),
sessionLruMax: Math.max(32, Math.min(200_000, sessionLruMax))
}
}
/** Session LRU max before localStorage overrides (for EventService constructor). */
export function getDefaultSessionLruMaxSync(): number {
return readPositiveInt(StorageKey.SESSION_EVENT_LRU_MAX, defaultSessionLruMax())
}

31
src/lib/piper-tts-cache-policy.ts

@ -0,0 +1,31 @@ @@ -0,0 +1,31 @@
import { isJumbleElectron, isMobileBrowserProfile } from '@/lib/client-platform'
/** How long we keep Piper WAV blobs (per device class). */
export function getPiperTtsCacheTtlMs(): number {
if (isJumbleElectron()) return 7 * 24 * 60 * 60 * 1000
if (isMobileBrowserProfile()) return 24 * 60 * 60 * 1000
return 48 * 60 * 60 * 1000
}
/** Caps so TTS audio cannot grow without bound (evicts oldest after TTL pass). */
export function getPiperTtsCacheBudget(): { maxEntries: number; maxBytes: number } {
if (isJumbleElectron()) return { maxEntries: 400, maxBytes: 400 * 1024 * 1024 }
if (isMobileBrowserProfile()) return { maxEntries: 80, maxBytes: 45 * 1024 * 1024 }
return { maxEntries: 200, maxBytes: 180 * 1024 * 1024 }
}
/**
* Stable key for a Piper request: same URL + text + speed same audio.
* Server upgrades / voice changes require a new endpoint URL or speed to bust the cache.
*/
export async function buildPiperTtsCacheKey(
endpointUrl: string,
text: string,
speed: number
): Promise<string> {
const payload = new TextEncoder().encode(JSON.stringify({ u: endpointUrl, t: text, s: speed }))
const digest = await crypto.subtle.digest('SHA-256', payload)
return Array.from(new Uint8Array(digest))
.map((b) => b.toString(16).padStart(2, '0'))
.join('')
}

35
src/lib/read-aloud.ts

@ -1,4 +1,10 @@ @@ -1,4 +1,10 @@
import { ExtendedKind, READ_ALOUD_TTS_URL } from '@/constants'
import {
buildPiperTtsCacheKey,
getPiperTtsCacheBudget,
getPiperTtsCacheTtlMs
} from '@/lib/piper-tts-cache-policy'
import indexedDb from '@/services/indexed-db.service'
import { getLongFormArticleMetadataFromEvent } from '@/lib/event-metadata'
import logger from '@/lib/logger'
import { Event, kinds } from 'nostr-tools'
@ -294,12 +300,26 @@ async function fetchPiperTtsBlobForChunk( @@ -294,12 +300,26 @@ async function fetchPiperTtsBlobForChunk(
throw new Error(`Part ${chunkIndex + 1} of ${totalChunks}: TTS URL not configured`)
}
const speed = 1
const ttlMs = getPiperTtsCacheTtlMs()
const budget = getPiperTtsCacheBudget()
let cacheKey: string | undefined
try {
cacheKey = await buildPiperTtsCacheKey(url, text, speed)
const hit = await indexedDb.getPiperTtsBlobCache(cacheKey, ttlMs)
if (hit && hit.size > 0) {
return hit
}
} catch {
/* IndexedDB or crypto unavailable — fetch without cache */
}
let response: Response
try {
response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ text, speed: 1 }),
body: JSON.stringify({ text, speed }),
signal
})
} catch (e) {
@ -328,6 +348,19 @@ async function fetchPiperTtsBlobForChunk( @@ -328,6 +348,19 @@ async function fetchPiperTtsBlobForChunk(
throw new Error(`Part ${chunkIndex + 1} of ${totalChunks}: empty audio response`)
}
if (cacheKey) {
try {
const mime = blob.type || response.headers.get('Content-Type') || 'audio/wav'
await indexedDb.putPiperTtsBlobCache(cacheKey, blob, mime, {
ttlMs,
maxEntries: budget.maxEntries,
maxBytes: budget.maxBytes
})
} catch {
/* cache write failure should not break playback */
}
}
return blob
}

2
src/pages/secondary/CacheSettingsPage/index.tsx

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
import CacheRelaysSetting from '@/components/CacheRelaysSetting'
import EventArchiveCacheSettings from '@/components/EventArchiveCacheSettings'
import { RefreshButton } from '@/components/RefreshButton'
import SecondaryPageLayout from '@/layouts/SecondaryPageLayout'
import { usePrimaryNoteView } from '@/contexts/primary-note-view-context'
@ -30,6 +31,7 @@ const CacheSettingsPage = forwardRef( @@ -30,6 +31,7 @@ const CacheSettingsPage = forwardRef(
>
<div key={contentKey} className="px-4 py-3">
<CacheRelaysSetting />
<EventArchiveCacheSettings />
</div>
</SecondaryPageLayout>
)

43
src/services/client-events.service.ts

@ -19,6 +19,13 @@ import { LRUCache } from 'lru-cache' @@ -19,6 +19,13 @@ import { LRUCache } from 'lru-cache'
import indexedDb from './indexed-db.service'
import type { QueryService } from './client-query.service'
import client from './client.service'
import {
invalidateArchiveFootprintCache,
loadArchivedEventForFetch,
prefetchArchivedEvents,
queuePersistSeenEvent
} from './event-archive.service'
import { getDefaultSessionLruMaxSync } from '@/lib/event-archive-config'
import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter'
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder'
import { normalizeUrl } from '@/lib/url'
@ -54,8 +61,8 @@ export class EventService { @@ -54,8 +61,8 @@ export class EventService {
* In-memory session cache: events seen this tab session (timelines, queries, fetches).
* Larger cap + no TTL so navigation and repeat fetches reuse data until reload.
*/
/** Large cap: timelines + note-stats (reactions, replies, zaps, reposts per note) share one LRU. */
private sessionEventCache = new LRUCache<string, NEvent>({ max: 5_000 })
/** Timelines + note-stats; cap is platform-aware (see Cache settings). */
private sessionEventCache = new LRUCache<string, NEvent>({ max: getDefaultSessionLruMaxSync() })
/** Latest kind-0 per pubkey from {@link sessionEventCache} for batch profile short-circuit. */
private sessionMetadataByPubkey = new Map<string, NEvent>()
/** Callbacks waiting for an event id to appear in {@link sessionEventCache} (e.g. embed loads before timeline caches the note). */
@ -248,7 +255,14 @@ export class EventService { @@ -248,7 +255,14 @@ export class EventService {
.filter((id) => /^[0-9a-f]{64}$/.test(id))
)
]
const toFetch = hexIds.filter((id) => !this.getSessionEventIfAllowed(id))
let toFetch = hexIds.filter((id) => !this.getSessionEventIfAllowed(id))
if (toFetch.length === 0) return
const archived = await prefetchArchivedEvents(toFetch)
for (const ev of archived) {
if (!shouldDropEventOnIngest(ev)) this.addEventToCache(ev)
}
toFetch = toFetch.filter((id) => !this.getSessionEventIfAllowed(id))
if (toFetch.length === 0) return
const relayUrls = await buildComprehensiveRelayListForEvents(undefined, [], [], [])
@ -367,6 +381,17 @@ export class EventService { @@ -367,6 +381,17 @@ export class EventService {
}
}
this.notifySessionEventWaiters(id)
queuePersistSeenEvent(cleanEvent as NEvent)
}
/** Apply {@link StorageKey.SESSION_EVENT_LRU_MAX} without reload (copies entries into a new LRU). */
reapplySessionLruMax(): void {
const max = getDefaultSessionLruMaxSync()
const entries = [...this.sessionEventCache.entries()]
this.sessionEventCache = new LRUCache<string, NEvent>({ max })
for (const [k, v] of entries) {
this.sessionEventCache.set(k, v)
}
}
/** Kind 0 already ingested this session (e.g. from a timeline REQ). */
@ -600,6 +625,7 @@ export class EventService { @@ -600,6 +625,7 @@ export class EventService {
this.eventCacheMap.clear()
this.sessionEventWaiters.clear()
this.fetchEventFromBigRelaysDataloader.clearAll()
invalidateArchiveFootprintCache()
logger.info('[EventService] In-memory caches cleared')
}
@ -638,6 +664,17 @@ export class EventService { @@ -638,6 +664,17 @@ export class EventService {
if (!filter) return undefined
if (filter.ids?.length === 1) {
const hid = filter.ids[0]!.toLowerCase()
if (/^[0-9a-f]{64}$/.test(hid)) {
const fromArchive = await loadArchivedEventForFetch(hid)
if (fromArchive && !shouldDropEventOnIngest(fromArchive)) {
this.addEventToCache(fromArchive)
return fromArchive
}
}
}
// Try cache first
if (filter.ids?.length) {
const cached = await indexedDb.getEventFromPublicationStore(filter.ids[0])

55
src/services/client.service.ts

@ -73,6 +73,7 @@ import { @@ -73,6 +73,7 @@ import {
} from 'nostr-tools'
import { AbstractRelay } from 'nostr-tools/abstract-relay'
import indexedDb from './indexed-db.service'
import { invalidateArchiveFootprintCache } from './event-archive.service'
import { notifyLiveActivitiesPrewarmComplete } from './live-activities-prewarm-bridge'
import nip66Service from './nip66.service'
import { patchRelayNoticeForFetchFailures } from '@/services/relay-notice-strike'
@ -173,6 +174,7 @@ class ClientService extends EventTarget { @@ -173,6 +174,7 @@ class ClientService extends EventTarget {
| string[]
| undefined
> = {}
private timelinePersistTimers = new Map<string, ReturnType<typeof setTimeout>>()
/** In-flight {@link fetchRelayList} dedupe: key = viewer pubkey + target pubkey (sanitization depends on viewer). */
private relayListRequestCache = new Map<string, Promise<TRelayList>>()
private userIndex = new FlexSearch.Index({
@ -1439,6 +1441,30 @@ class ClientService extends EventTarget { @@ -1439,6 +1441,30 @@ class ClientService extends EventTarget {
/** =========== Timeline =========== */
private scheduleTimelinePersist(timelineKey: string): void {
const prev = this.timelinePersistTimers.get(timelineKey)
if (prev) clearTimeout(prev)
const t = setTimeout(() => {
this.timelinePersistTimers.delete(timelineKey)
void this.flushTimelinePersist(timelineKey)
}, 1600)
this.timelinePersistTimers.set(timelineKey, t)
}
private async flushTimelinePersist(timelineKey: string): Promise<void> {
const tl = this.timelines[timelineKey]
if (!tl || Array.isArray(tl) || !tl.refs?.length) return
try {
await indexedDb.putTimelinePersistedState(timelineKey, {
refs: [...tl.refs],
filter: { ...(tl.filter as object) } as Record<string, unknown>,
urls: [...tl.urls]
})
} catch (e) {
logger.warn('[ClientService] Timeline persist failed', { timelineKey: timelineKey.slice(0, 12), e })
}
}
private generateTimelineKey(urls: string[], filter: Filter) {
const stableFilter: any = {}
Object.entries(filter)
@ -2089,6 +2115,24 @@ class ClientService extends EventTarget { @@ -2089,6 +2115,24 @@ class ClientService extends EventTarget {
}
}
void (async () => {
try {
const st = await indexedDb.getTimelinePersistedState(key)
if (!st?.refs?.length) return
const list = await indexedDb.getArchivedEventsByIds(st.refs.map((r) => r[0]))
if (list.length === 0) return
for (const ev of list) {
if (shouldDropEventOnIngest(ev)) continue
if (eventIds.has(ev.id)) continue
eventIds.add(ev.id)
events.push(ev)
}
flushStreamingSnapshot()
} catch (err) {
logger.warn('[ClientService] Timeline disk hydrate failed', err)
}
})()
const handleTimelineEose = (eosed: boolean) => {
if (!eosed) return
if (eosedAt != null) return
@ -2125,6 +2169,7 @@ class ClientService extends EventTarget { @@ -2125,6 +2169,7 @@ class ClientService extends EventTarget {
}
}
onEvents([...events], true)
that.scheduleTimelinePersist(key)
}
const subCloser = this.subscribe(relays, filter, {
@ -2161,6 +2206,7 @@ class ClientService extends EventTarget { @@ -2161,6 +2206,7 @@ class ClientService extends EventTarget {
timeline.refs = events
.map((e) => [e.id, e.created_at] as TTimelineRef)
.sort((a, b) => b[1] - a[1])
that.scheduleTimelinePersist(key)
}
return
}
@ -2175,6 +2221,7 @@ class ClientService extends EventTarget { @@ -2175,6 +2221,7 @@ class ClientService extends EventTarget {
if (timeline.refs.length === 0) {
timeline.refs = events.map((e) => [e.id, e.created_at] as TTimelineRef).sort((a, b) => b[1] - a[1])
that.scheduleTimelinePersist(key)
return
}
@ -2191,6 +2238,7 @@ class ClientService extends EventTarget { @@ -2191,6 +2238,7 @@ class ClientService extends EventTarget {
if (idx >= timeline.refs.length) return
timeline.refs.splice(idx, 0, [evt.id, evt.created_at])
that.scheduleTimelinePersist(key)
},
oneose: handleTimelineEose,
onclose: onClose
@ -2247,6 +2295,7 @@ class ClientService extends EventTarget { @@ -2247,6 +2295,7 @@ class ClientService extends EventTarget {
timeline.refs.push(...newRefs)
}
this.scheduleTimelinePersist(key)
return events
}
@ -2420,6 +2469,10 @@ class ClientService extends EventTarget { @@ -2420,6 +2469,10 @@ class ClientService extends EventTarget {
this.eventService.addEventToCache(event)
}
reapplySessionLruFromSettings(): void {
this.eventService.reapplySessionLruMax()
}
peekSessionCachedEvent(noteId: string): NEvent | undefined {
return this.eventService.peekSessionCachedEvent(noteId)
}
@ -2548,6 +2601,7 @@ class ClientService extends EventTarget { @@ -2548,6 +2601,7 @@ class ClientService extends EventTarget {
if (removed > 0) {
logger.info('[ClientService] Removed tombstoned events from cache', { count: removed })
}
invalidateArchiveFootprintCache()
dispatchTombstonesUpdated()
}
@ -2613,6 +2667,7 @@ class ClientService extends EventTarget { @@ -2613,6 +2667,7 @@ class ClientService extends EventTarget {
count: removed
})
}
invalidateArchiveFootprintCache()
dispatchTombstonesUpdated()
}
} catch (e) {

131
src/services/event-archive.service.ts

@ -0,0 +1,131 @@ @@ -0,0 +1,131 @@
import { ExtendedKind } from '@/constants'
import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter'
import { getEventArchiveConfig } from '@/lib/event-archive-config'
import { isNip18RepostKind, isNip25ReactionKind, isReplaceableEvent } from '@/lib/event'
import logger from '@/lib/logger'
import type { Event } from 'nostr-tools'
import { kinds } from 'nostr-tools'
import indexedDb from '@/services/indexed-db.service'
/** “Primary” notes / threads — evicted last. */
const CORE_FEED_KINDS = new Set<number>([
kinds.ShortTextNote,
11,
ExtendedKind.COMMENT,
20,
21,
22,
9802 // highlights
])
let footprint: { count: number; bytes: number } | null = null
const pending = new Map<string, Event>()
let flushTimer: ReturnType<typeof setTimeout> | null = null
export function invalidateArchiveFootprintCache(): void {
footprint = null
}
async function ensureFootprint(): Promise<void> {
if (footprint === null) {
footprint = await indexedDb.getArchiveFootprint()
}
}
function archiveTierForEvent(ev: Event): number {
if (isNip25ReactionKind(ev.kind) || ev.kind === kinds.Zap || isNip18RepostKind(ev.kind)) {
return 0
}
if (CORE_FEED_KINDS.has(ev.kind)) return 2
return 1
}
function shouldSkipArchiving(ev: Event): boolean {
if (shouldDropEventOnIngest(ev)) return true
if (isReplaceableEvent(ev.kind) && indexedDb.hasReplaceableEventStoreForKind(ev.kind)) {
return true
}
return false
}
function approxEventBytes(ev: Event): number {
try {
return new Blob([JSON.stringify(ev)]).size
} catch {
return 512
}
}
async function trimArchiveIfNeeded(): Promise<void> {
const cfg = getEventArchiveConfig()
if (!cfg.enabled) return
await ensureFootprint()
let guard = 0
while (
footprint !== null &&
guard < 5000 &&
(footprint.count > cfg.maxEvents || footprint.bytes > cfg.maxBytes)
) {
guard++
const victim = await indexedDb.deleteNextEvictionArchiveCandidate()
if (!victim) {
footprint = await indexedDb.getArchiveFootprint()
break
}
footprint.count = Math.max(0, footprint.count - 1)
footprint.bytes = Math.max(0, footprint.bytes - victim.approxBytes)
}
}
async function flushArchiveQueue(): Promise<void> {
const cfg = getEventArchiveConfig()
if (!cfg.enabled || pending.size === 0) return
const batch = [...pending.values()]
pending.clear()
for (const ev of batch) {
if (shouldSkipArchiving(ev)) continue
const id = /^[0-9a-f]{64}$/i.test(ev.id) ? ev.id.toLowerCase() : ev.id
const tier = archiveTierForEvent(ev)
const bytes = approxEventBytes(ev)
try {
await indexedDb.putArchivedEventRow(ev, tier, bytes)
} catch (e) {
logger.warn('[EventArchive] put failed', { id: id.slice(0, 8), e })
}
}
footprint = await indexedDb.getArchiveFootprint()
await trimArchiveIfNeeded()
}
function scheduleFlush(): void {
if (flushTimer !== null) return
flushTimer = setTimeout(() => {
flushTimer = null
void flushArchiveQueue().catch((e) => logger.warn('[EventArchive] flush', e))
}, 450)
}
/** Queue a non-replaceable event for IndexedDB archive (Electron + mobile + desktop web; caps differ). */
export function queuePersistSeenEvent(ev: Event): void {
const cfg = getEventArchiveConfig()
if (!cfg.enabled) return
if (shouldSkipArchiving(ev)) return
const id = /^[0-9a-f]{64}$/i.test(ev.id) ? ev.id.toLowerCase() : ev.id
if (!/^[0-9a-f]{64}$/.test(id)) return
pending.set(id, ev)
scheduleFlush()
}
export async function loadArchivedEventForFetch(hexId: string): Promise<Event | undefined> {
const cfg = getEventArchiveConfig()
if (!cfg.enabled) return undefined
const ev = await indexedDb.getArchivedEventById(hexId, true)
if (!ev || shouldDropEventOnIngest(ev)) return undefined
return ev
}
export async function prefetchArchivedEvents(hexIds: string[]): Promise<Event[]> {
const cfg = getEventArchiveConfig()
if (!cfg.enabled || hexIds.length === 0) return []
return indexedDb.getArchivedEventsByIds(hexIds)
}

423
src/services/indexed-db.service.ts

@ -10,6 +10,28 @@ import { kinds } from 'nostr-tools' @@ -10,6 +10,28 @@ import { kinds } from 'nostr-tools'
import { isReplaceableEvent, getReplaceableCoordinateFromEvent } from '@/lib/event'
import logger from '@/lib/logger'
/** Hot archive row in {@link StoreNames.EVENT_ARCHIVE}. */
export type TArchivedEventRow = {
key: string
value: Event
addedAt: number
lastAccessAt: number
approxBytes: number
archiveTier: number
}
/** Persisted feed state for cold-start (filter JSON must round-trip). */
export type TTimelinePersistedPayload = {
refs: [string, number][]
filter: Record<string, unknown>
urls: string[]
}
export type TPiperTtsCacheValue = {
blob: Blob
mimeType: string
}
type TValue<T = any> = {
key: string
value: T | null
@ -58,11 +80,17 @@ export const StoreNames = { @@ -58,11 +80,17 @@ export const StoreNames = {
/** Tombstone list for deleted events (kind 5). Key: event id or replaceable coordinate. */
TOMBSTONE_LIST: 'tombstoneList',
/** NIP-58 badge definitions (kind 30009). Key: pubkey:d */
BADGE_DEFINITION_EVENTS: 'badgeDefinitionEvents'
BADGE_DEFINITION_EVENTS: 'badgeDefinitionEvents',
/** Hot timeline / REQ events (non-replaceable kinds not stored elsewhere). Key: event id hex. */
EVENT_ARCHIVE: 'eventArchive',
/** Persisted timeline refs + filter for cold-start hydration. Key: {@link ClientService.generateTimelineKey} hash. */
TIMELINE_STATE: 'timelineState',
/** Piper / read-aloud WAV blobs keyed by SHA-256 of endpoint + text + speed. */
PIPER_TTS_CACHE: 'piperTtsCache'
}
/** Schema version we expect. When adding stores or migrations, bump this. */
const DB_VERSION = 31
const DB_VERSION = 33
/** Max age for profile and payment info cache before we refetch (5 min). */
const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000
@ -83,6 +111,9 @@ function ensureMissingObjectStores(db: IDBDatabase): void { @@ -83,6 +111,9 @@ function ensureMissingObjectStores(db: IDBDatabase): void {
const store = db.createObjectStore(storeName, { keyPath: 'key' })
store.createIndex('feedUrl', 'feedUrl', { unique: false })
store.createIndex('pubDate', 'pubDate', { unique: false })
} else if (storeName === StoreNames.EVENT_ARCHIVE) {
const store = db.createObjectStore(storeName, { keyPath: 'key' })
store.createIndex('eviction', ['archiveTier', 'lastAccessAt'], { unique: false })
} else {
db.createObjectStore(storeName, { keyPath: 'key' })
}
@ -260,6 +291,16 @@ class IndexedDbService { @@ -260,6 +291,16 @@ class IndexedDbService {
if (!db.objectStoreNames.contains(StoreNames.BADGE_DEFINITION_EVENTS)) {
db.createObjectStore(StoreNames.BADGE_DEFINITION_EVENTS, { keyPath: 'key' })
}
if (!db.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) {
const arc = db.createObjectStore(StoreNames.EVENT_ARCHIVE, { keyPath: 'key' })
arc.createIndex('eviction', ['archiveTier', 'lastAccessAt'], { unique: false })
}
if (!db.objectStoreNames.contains(StoreNames.TIMELINE_STATE)) {
db.createObjectStore(StoreNames.TIMELINE_STATE, { keyPath: 'key' })
}
if (!db.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) {
db.createObjectStore(StoreNames.PIPER_TTS_CACHE, { keyPath: 'key' })
}
ensureMissingObjectStores(db)
}
}
@ -1234,6 +1275,10 @@ class IndexedDbService { @@ -1234,6 +1275,10 @@ class IndexedDbService {
}
}
/**
* Clears every object store in the `jumble` database, including
* {@link StoreNames.PIPER_TTS_CACHE} (read-aloud / Piper WAV blobs).
*/
async clearAllCache(): Promise<void> {
await this.initPromise
if (!this.db) {
@ -1336,6 +1381,11 @@ class IndexedDbService { @@ -1336,6 +1381,11 @@ class IndexedDbService {
})
}
/** Clear cached Piper / read-aloud audio blobs. No-op if the store is absent. */
async clearPiperTtsCache(): Promise<void> {
await this.clearStore(StoreNames.PIPER_TTS_CACHE)
}
async clearStore(storeName: string): Promise<void> {
await this.initPromise
if (!this.db || !this.db.objectStoreNames.contains(storeName)) {
@ -2102,6 +2152,362 @@ class IndexedDbService { @@ -2102,6 +2152,362 @@ class IndexedDbService {
})
}
/** Hot archive row (kinds already persisted in replaceable stores should not use this). */
async putArchivedEventRow(
event: Event,
archiveTier: number,
approxBytes: number
): Promise<void> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return
const id = /^[0-9a-f]{64}$/i.test(event.id) ? event.id.toLowerCase() : event.id
const clean = { ...event }
delete (clean as any).relayStatuses
const now = Date.now()
const row: TArchivedEventRow = {
key: id,
value: clean as Event,
addedAt: now,
lastAccessAt: now,
approxBytes: Math.max(80, approxBytes),
archiveTier
}
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readwrite')
const store = tx.objectStore(StoreNames.EVENT_ARCHIVE)
const put = store.put(row)
put.onsuccess = () => {
tx.commit()
resolve()
}
put.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async touchArchivedEventAccess(eventId: string): Promise<void> {
const id = eventId.toLowerCase()
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readwrite')
const store = tx.objectStore(StoreNames.EVENT_ARCHIVE)
const get = store.get(id)
get.onsuccess = () => {
const row = get.result as TArchivedEventRow | undefined
if (!row?.value) {
tx.commit()
resolve()
return
}
row.lastAccessAt = Date.now()
const put = store.put(row)
put.onsuccess = () => {
tx.commit()
resolve()
}
put.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
}
get.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async getArchivedEventById(eventId: string, touchAccess: boolean): Promise<Event | undefined> {
const id = eventId.toLowerCase()
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return undefined
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, touchAccess ? 'readwrite' : 'readonly')
const store = tx.objectStore(StoreNames.EVENT_ARCHIVE)
const get = store.get(id)
get.onsuccess = () => {
const row = get.result as TArchivedEventRow | undefined
const ev = row?.value
if (touchAccess && row && ev) {
row.lastAccessAt = Date.now()
const put = store.put(row)
put.onsuccess = () => {
tx.commit()
resolve(ev)
}
put.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
return
}
tx.commit()
resolve(ev)
}
get.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async getArchivedEventsByIds(ids: string[]): Promise<Event[]> {
const uniq = [...new Set(ids.map((x) => x.toLowerCase()))].filter((x) => /^[0-9a-f]{64}$/.test(x))
if (uniq.length === 0) return []
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return []
const out: Event[] = []
await Promise.all(
uniq.map(
(id) =>
new Promise<void>((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readonly')
const get = tx.objectStore(StoreNames.EVENT_ARCHIVE).get(id)
get.onsuccess = () => {
const row = get.result as TArchivedEventRow | undefined
if (row?.value) out.push(row.value)
tx.commit()
resolve()
}
get.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
)
)
return out
}
async deleteArchivedEvent(eventId: string): Promise<void> {
const id = eventId.toLowerCase()
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readwrite')
const del = tx.objectStore(StoreNames.EVENT_ARCHIVE).delete(id)
del.onsuccess = () => {
tx.commit()
resolve()
}
del.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
/** Delete lowest (tier, then oldest access) row for archive eviction. */
async deleteNextEvictionArchiveCandidate(): Promise<{ id: string; approxBytes: number } | null> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) return null
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readwrite')
const store = tx.objectStore(StoreNames.EVENT_ARCHIVE)
const idx = store.index('eviction')
const req = idx.openCursor()
req.onsuccess = () => {
const cursor = req.result as IDBCursorWithValue | null
if (!cursor) {
tx.commit()
resolve(null)
return
}
const row = cursor.value as TArchivedEventRow
const id = row.key
const approxBytes = row.approxBytes ?? 0
cursor.delete()
tx.commit()
resolve({ id, approxBytes })
}
req.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async getArchiveFootprint(): Promise<{ count: number; bytes: number }> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.EVENT_ARCHIVE)) {
return { count: 0, bytes: 0 }
}
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.EVENT_ARCHIVE, 'readonly')
const store = tx.objectStore(StoreNames.EVENT_ARCHIVE)
const req = store.openCursor()
let count = 0
let bytes = 0
req.onsuccess = () => {
const cursor = req.result as IDBCursorWithValue | null
if (!cursor) {
tx.commit()
resolve({ count, bytes })
return
}
const row = cursor.value as TArchivedEventRow
count++
bytes += row.approxBytes ?? 0
cursor.continue()
}
req.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async putTimelinePersistedState(
timelineKey: string,
payload: TTimelinePersistedPayload
): Promise<void> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.TIMELINE_STATE)) return
const row = this.formatValue(timelineKey, payload)
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.TIMELINE_STATE, 'readwrite')
const put = tx.objectStore(StoreNames.TIMELINE_STATE).put(row)
put.onsuccess = () => {
tx.commit()
resolve()
}
put.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async getTimelinePersistedState(timelineKey: string): Promise<TTimelinePersistedPayload | null> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.TIMELINE_STATE)) return null
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.TIMELINE_STATE, 'readonly')
const get = tx.objectStore(StoreNames.TIMELINE_STATE).get(timelineKey)
get.onsuccess = () => {
const row = get.result as TValue<TTimelinePersistedPayload> | undefined
tx.commit()
resolve(row?.value ?? null)
}
get.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async getPiperTtsBlobCache(cacheKey: string, ttlMs: number): Promise<Blob | null> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) return null
return new Promise((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.PIPER_TTS_CACHE, 'readwrite')
const store = tx.objectStore(StoreNames.PIPER_TTS_CACHE)
const get = store.get(cacheKey)
get.onsuccess = () => {
const row = get.result as TValue<TPiperTtsCacheValue> | undefined
if (!row?.value?.blob) {
tx.commit()
resolve(null)
return
}
if (Date.now() - row.addedAt > ttlMs) {
const del = store.delete(cacheKey)
del.onsuccess = () => {
tx.commit()
resolve(null)
}
del.onerror = () => {
tx.commit()
resolve(null)
}
return
}
tx.commit()
resolve(row.value.blob)
}
get.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
}
async putPiperTtsBlobCache(
cacheKey: string,
blob: Blob,
mimeType: string,
opts: { ttlMs: number; maxEntries: number; maxBytes: number }
): Promise<void> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) return
const row = this.formatValue(cacheKey, { blob, mimeType })
await new Promise<void>((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.PIPER_TTS_CACHE, 'readwrite')
const put = tx.objectStore(StoreNames.PIPER_TTS_CACHE).put(row)
put.onsuccess = () => {
tx.commit()
resolve()
}
put.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
await this.prunePiperTtsBlobCache(opts.ttlMs, opts.maxEntries, opts.maxBytes)
}
/** Drop expired Piper blobs, then oldest rows until under entry / byte caps. */
async prunePiperTtsBlobCache(ttlMs: number, maxEntries: number, maxBytes: number): Promise<void> {
await this.initPromise
if (!this.db?.objectStoreNames.contains(StoreNames.PIPER_TTS_CACHE)) return
const now = Date.now()
const rows: Array<{ key: string; addedAt: number; bytes: number }> = []
await new Promise<void>((resolve, reject) => {
const tx = this.db!.transaction(StoreNames.PIPER_TTS_CACHE, 'readonly')
const req = tx.objectStore(StoreNames.PIPER_TTS_CACHE).openCursor()
req.onsuccess = () => {
const cursor = req.result as IDBCursorWithValue | null
if (!cursor) {
tx.commit()
resolve()
return
}
const row = cursor.value as TValue<TPiperTtsCacheValue>
const key = cursor.key as string
const bytes = row.value?.blob?.size ?? 0
rows.push({ key, addedAt: row.addedAt, bytes })
cursor.continue()
}
req.onerror = (e) => {
tx.commit()
reject(idbEventToError(e))
}
})
const toDelete = new Set<string>()
for (const r of rows) {
if (now - r.addedAt > ttlMs) toDelete.add(r.key)
}
const survivors = rows.filter((r) => !toDelete.has(r.key)).sort((a, b) => a.addedAt - b.addedAt)
let totalBytes = survivors.reduce((s, r) => s + r.bytes, 0)
let totalCount = survivors.length
while (totalCount > maxEntries || totalBytes > maxBytes) {
const victim = survivors.shift()
if (!victim) break
toDelete.add(victim.key)
totalBytes -= victim.bytes
totalCount--
}
for (const key of toDelete) {
await this.deleteStoreItem(StoreNames.PIPER_TTS_CACHE, key)
}
}
/**
* Get all tombstoned keys
*/
@ -2149,13 +2555,12 @@ class IndexedDbService { @@ -2149,13 +2555,12 @@ class IndexedDbService {
// Or just event ID for non-replaceable events
const parts = key.split(':')
if (parts.length === 1) {
// Event ID - remove from publication store
try {
await this.deleteStoreItem(StoreNames.PUBLICATION_EVENTS, key)
removed++
} catch {
// Ignore errors
}
// Event ID - remove from publication store + hot archive
await Promise.allSettled([
this.deleteStoreItem(StoreNames.PUBLICATION_EVENTS, key),
this.deleteArchivedEvent(key)
])
removed++
} else if (parts.length >= 2) {
// Replaceable coordinate: kind:64-hex-pubkey[:d...] (d may contain ':' per NIP-33)
const kind = parseInt(parts[0]!, 10)

Loading…
Cancel
Save