56 changed files with 1030 additions and 321 deletions
@ -0,0 +1,100 @@ |
|||||||
|
/** |
||||||
|
* Background archive scheduler |
||||||
|
* Automatically archives old events in the background without blocking operations |
||||||
|
*/ |
||||||
|
|
||||||
|
import { archiveOldEvents } from './event-archive.js'; |
||||||
|
|
||||||
|
// Archive threshold: 30 days (events older than this will be archived)
|
||||||
|
const ARCHIVE_THRESHOLD = 30 * 24 * 60 * 60 * 1000; // 30 days in ms
|
||||||
|
|
||||||
|
// How often to check for events to archive (once per day)
|
||||||
|
const ARCHIVE_CHECK_INTERVAL = 24 * 60 * 60 * 1000; // 24 hours
|
||||||
|
|
||||||
|
let archiveTimer: number | null = null; |
||||||
|
let isArchiving = false; |
||||||
|
let lastArchiveTime = 0; |
||||||
|
|
||||||
|
/** |
||||||
|
* Start the archive scheduler |
||||||
|
* This will periodically archive old events in the background |
||||||
|
*/ |
||||||
|
export function startArchiveScheduler(): void { |
||||||
|
// Don't start multiple schedulers
|
||||||
|
if (archiveTimer !== null) return; |
||||||
|
|
||||||
|
// Run initial archive check after a delay (don't block startup)
|
||||||
|
setTimeout(() => { |
||||||
|
checkAndArchive(); |
||||||
|
}, 5 * 60 * 1000); // Wait 5 minutes after startup
|
||||||
|
|
||||||
|
// Schedule periodic checks
|
||||||
|
archiveTimer = window.setInterval(() => { |
||||||
|
checkAndArchive(); |
||||||
|
}, ARCHIVE_CHECK_INTERVAL); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Stop the archive scheduler |
||||||
|
*/ |
||||||
|
export function stopArchiveScheduler(): void { |
||||||
|
if (archiveTimer !== null) { |
||||||
|
clearInterval(archiveTimer); |
||||||
|
archiveTimer = null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Check for old events and archive them |
||||||
|
* This runs in the background and doesn't block operations |
||||||
|
*/ |
||||||
|
async function checkAndArchive(): Promise<void> { |
||||||
|
// Don't run if already archiving
|
||||||
|
if (isArchiving) return; |
||||||
|
|
||||||
|
// Don't run too frequently (at least 1 hour between runs)
|
||||||
|
const now = Date.now(); |
||||||
|
if (now - lastArchiveTime < 60 * 60 * 1000) return; |
||||||
|
|
||||||
|
isArchiving = true; |
||||||
|
lastArchiveTime = now; |
||||||
|
|
||||||
|
try { |
||||||
|
// Archive old events in the background
|
||||||
|
// This is non-blocking and won't affect day-to-day operations
|
||||||
|
const archived = await archiveOldEvents(ARCHIVE_THRESHOLD); |
||||||
|
|
||||||
|
if (archived > 0) { |
||||||
|
// Archive completed successfully
|
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
// Archive failed (non-critical, continue)
|
||||||
|
} finally { |
||||||
|
isArchiving = false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Manually trigger archive (for testing or manual cleanup) |
||||||
|
*/ |
||||||
|
export async function triggerArchive(): Promise<number> { |
||||||
|
if (isArchiving) return 0; |
||||||
|
|
||||||
|
isArchiving = true; |
||||||
|
try { |
||||||
|
const archived = await archiveOldEvents(ARCHIVE_THRESHOLD); |
||||||
|
lastArchiveTime = Date.now(); |
||||||
|
return archived; |
||||||
|
} catch (error) { |
||||||
|
return 0; |
||||||
|
} finally { |
||||||
|
isArchiving = false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Check if archive is currently running |
||||||
|
*/ |
||||||
|
export function isArchiveRunning(): boolean { |
||||||
|
return isArchiving; |
||||||
|
} |
||||||
@ -0,0 +1,395 @@ |
|||||||
|
/** |
||||||
|
* Event archiving with compression |
||||||
|
* Archives old events to save space while keeping them accessible |
||||||
|
*/ |
||||||
|
|
||||||
|
import { getDB } from './indexeddb-store.js'; |
||||||
|
import type { NostrEvent } from '../../types/nostr.js'; |
||||||
|
import type { CachedEvent } from './event-cache.js'; |
||||||
|
|
||||||
|
export interface ArchivedEvent { |
||||||
|
id: string; |
||||||
|
compressed: Uint8Array; // Compressed JSON bytes
|
||||||
|
created_at: number; // Original event created_at
|
||||||
|
kind: number; |
||||||
|
pubkey: string; |
||||||
|
archived_at: number; // When it was archived
|
||||||
|
} |
||||||
|
|
||||||
|
// Default archive threshold: 30 days
|
||||||
|
const DEFAULT_ARCHIVE_THRESHOLD = 30 * 24 * 60 * 60 * 1000; // 30 days in ms
|
||||||
|
|
||||||
|
/** |
||||||
|
* Check if CompressionStream API is available |
||||||
|
*/ |
||||||
|
function isCompressionSupported(): boolean { |
||||||
|
return typeof CompressionStream !== 'undefined'; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Compress a JSON object to Uint8Array using CompressionStream API |
||||||
|
*/ |
||||||
|
async function compressJSON(data: unknown): Promise<Uint8Array> { |
||||||
|
if (!isCompressionSupported()) { |
||||||
|
// Fallback: Use JSON string (no compression if API not available)
|
||||||
|
// In practice, modern browsers support CompressionStream
|
||||||
|
const jsonString = JSON.stringify(data); |
||||||
|
return new TextEncoder().encode(jsonString); |
||||||
|
} |
||||||
|
|
||||||
|
const jsonString = JSON.stringify(data); |
||||||
|
const stream = new CompressionStream('gzip'); |
||||||
|
const writer = stream.writable.getWriter(); |
||||||
|
const reader = stream.readable.getReader(); |
||||||
|
|
||||||
|
// Write data
|
||||||
|
writer.write(new TextEncoder().encode(jsonString)); |
||||||
|
writer.close(); |
||||||
|
|
||||||
|
// Read compressed chunks
|
||||||
|
const chunks: Uint8Array[] = []; |
||||||
|
while (true) { |
||||||
|
const { done, value } = await reader.read(); |
||||||
|
if (done) break; |
||||||
|
chunks.push(value); |
||||||
|
} |
||||||
|
|
||||||
|
// Combine chunks
|
||||||
|
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); |
||||||
|
const result = new Uint8Array(totalLength); |
||||||
|
let offset = 0; |
||||||
|
for (const chunk of chunks) { |
||||||
|
result.set(chunk, offset); |
||||||
|
offset += chunk.length; |
||||||
|
} |
||||||
|
|
||||||
|
return result; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Decompress Uint8Array to JSON object |
||||||
|
*/ |
||||||
|
async function decompressJSON(compressed: Uint8Array): Promise<unknown> { |
||||||
|
if (!isCompressionSupported()) { |
||||||
|
// Fallback: Assume it's uncompressed JSON
|
||||||
|
return JSON.parse(new TextDecoder().decode(compressed)); |
||||||
|
} |
||||||
|
|
||||||
|
const stream = new DecompressionStream('gzip'); |
||||||
|
const writer = stream.writable.getWriter(); |
||||||
|
const reader = stream.readable.getReader(); |
||||||
|
|
||||||
|
// Write compressed data
|
||||||
|
writer.write(new Uint8Array(compressed)); |
||||||
|
writer.close(); |
||||||
|
|
||||||
|
// Read decompressed chunks
|
||||||
|
const chunks: Uint8Array[] = []; |
||||||
|
while (true) { |
||||||
|
const { done, value } = await reader.read(); |
||||||
|
if (done) break; |
||||||
|
chunks.push(value); |
||||||
|
} |
||||||
|
|
||||||
|
// Combine chunks
|
||||||
|
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0); |
||||||
|
const result = new Uint8Array(totalLength); |
||||||
|
let offset = 0; |
||||||
|
for (const chunk of chunks) { |
||||||
|
result.set(chunk, offset); |
||||||
|
offset += chunk.length; |
||||||
|
} |
||||||
|
|
||||||
|
// Parse JSON
|
||||||
|
return JSON.parse(new TextDecoder().decode(result)); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Archive an event (compress and move to archive store) |
||||||
|
*/ |
||||||
|
export async function archiveEvent(event: CachedEvent): Promise<void> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
|
||||||
|
// Compress the event
|
||||||
|
const compressed = await compressJSON(event); |
||||||
|
|
||||||
|
// Create archived event
|
||||||
|
const archived: ArchivedEvent = { |
||||||
|
id: event.id, |
||||||
|
compressed, |
||||||
|
created_at: event.created_at, |
||||||
|
kind: event.kind, |
||||||
|
pubkey: event.pubkey, |
||||||
|
archived_at: Date.now() |
||||||
|
}; |
||||||
|
|
||||||
|
// Store in archive
|
||||||
|
await db.put('eventArchive', archived); |
||||||
|
|
||||||
|
// Remove from main events store
|
||||||
|
await db.delete('events', event.id); |
||||||
|
} catch (error) { |
||||||
|
// Archive failed (non-critical)
|
||||||
|
// Don't throw - archiving failures shouldn't break the app
|
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Archive multiple events in batch |
||||||
|
*/ |
||||||
|
export async function archiveEvents(events: CachedEvent[]): Promise<void> { |
||||||
|
if (events.length === 0) return; |
||||||
|
|
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const tx = db.transaction(['events', 'eventArchive'], 'readwrite'); |
||||||
|
|
||||||
|
// Compress all events in parallel
|
||||||
|
const archivePromises = events.map(async (event) => { |
||||||
|
const compressed = await compressJSON(event); |
||||||
|
return { |
||||||
|
id: event.id, |
||||||
|
compressed, |
||||||
|
created_at: event.created_at, |
||||||
|
kind: event.kind, |
||||||
|
pubkey: event.pubkey, |
||||||
|
archived_at: Date.now() |
||||||
|
} as ArchivedEvent; |
||||||
|
}); |
||||||
|
|
||||||
|
const archived = await Promise.all(archivePromises); |
||||||
|
|
||||||
|
// Store all archived events
|
||||||
|
await Promise.all(archived.map(a => tx.objectStore('eventArchive').put(a))); |
||||||
|
|
||||||
|
// Delete from main events store
|
||||||
|
await Promise.all(events.map(e => tx.objectStore('events').delete(e.id))); |
||||||
|
|
||||||
|
await tx.done; |
||||||
|
} catch (error) { |
||||||
|
// Archive failed (non-critical)
|
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Retrieve an archived event (decompress and return) |
||||||
|
*/ |
||||||
|
export async function getArchivedEvent(id: string): Promise<CachedEvent | undefined> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const archived = await db.get('eventArchive', id); |
||||||
|
|
||||||
|
if (!archived) return undefined; |
||||||
|
|
||||||
|
// Decompress
|
||||||
|
const decompressed = await decompressJSON(archived.compressed); |
||||||
|
|
||||||
|
// Return as CachedEvent
|
||||||
|
return decompressed as CachedEvent; |
||||||
|
} catch (error) { |
||||||
|
// Archive read failed (non-critical)
|
||||||
|
return undefined; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Archive old events (older than threshold) |
||||||
|
* This is a background operation that doesn't block day-to-day operations |
||||||
|
*/ |
||||||
|
export async function archiveOldEvents( |
||||||
|
threshold: number = DEFAULT_ARCHIVE_THRESHOLD |
||||||
|
): Promise<number> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const now = Date.now(); |
||||||
|
const cutoffTime = now - threshold; |
||||||
|
|
||||||
|
// Find old events
|
||||||
|
const tx = db.transaction('events', 'readonly'); |
||||||
|
const index = tx.store.index('created_at'); |
||||||
|
|
||||||
|
const oldEvents: CachedEvent[] = []; |
||||||
|
for await (const cursor of index.iterate()) { |
||||||
|
if (cursor.value.created_at < cutoffTime) { |
||||||
|
oldEvents.push(cursor.value as CachedEvent); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
await tx.done; |
||||||
|
|
||||||
|
if (oldEvents.length === 0) return 0; |
||||||
|
|
||||||
|
// Archive in batches to avoid blocking
|
||||||
|
const BATCH_SIZE = 50; |
||||||
|
let archived = 0; |
||||||
|
|
||||||
|
for (let i = 0; i < oldEvents.length; i += BATCH_SIZE) { |
||||||
|
const batch = oldEvents.slice(i, i + BATCH_SIZE); |
||||||
|
await archiveEvents(batch); |
||||||
|
archived += batch.length; |
||||||
|
|
||||||
|
// Yield to browser between batches to avoid blocking
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 0)); |
||||||
|
} |
||||||
|
|
||||||
|
return archived; |
||||||
|
} catch (error) { |
||||||
|
// Archive failed (non-critical)
|
||||||
|
return 0; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get archived events by kind (decompressed) |
||||||
|
*/ |
||||||
|
export async function getArchivedEventsByKind( |
||||||
|
kind: number, |
||||||
|
limit?: number |
||||||
|
): Promise<CachedEvent[]> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const tx = db.transaction('eventArchive', 'readonly'); |
||||||
|
const index = tx.store.index('kind'); |
||||||
|
|
||||||
|
const archived = await index.getAll(kind); |
||||||
|
await tx.done; |
||||||
|
|
||||||
|
// Decompress in parallel
|
||||||
|
const decompressed = await Promise.all( |
||||||
|
archived.map(a => decompressJSON(a.compressed)) |
||||||
|
); |
||||||
|
|
||||||
|
const events = decompressed as CachedEvent[]; |
||||||
|
|
||||||
|
// Sort and limit
|
||||||
|
const sorted = events.sort((a, b) => b.created_at - a.created_at); |
||||||
|
return limit ? sorted.slice(0, limit) : sorted; |
||||||
|
} catch (error) { |
||||||
|
// Archive read failed (non-critical)
|
||||||
|
return []; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get archived events by pubkey (decompressed) |
||||||
|
*/ |
||||||
|
export async function getArchivedEventsByPubkey( |
||||||
|
pubkey: string, |
||||||
|
limit?: number |
||||||
|
): Promise<CachedEvent[]> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const tx = db.transaction('eventArchive', 'readonly'); |
||||||
|
const index = tx.store.index('pubkey'); |
||||||
|
|
||||||
|
const archived = await index.getAll(pubkey); |
||||||
|
await tx.done; |
||||||
|
|
||||||
|
// Decompress in parallel
|
||||||
|
const decompressed = await Promise.all( |
||||||
|
archived.map(a => decompressJSON(a.compressed)) |
||||||
|
); |
||||||
|
|
||||||
|
const events = decompressed as CachedEvent[]; |
||||||
|
|
||||||
|
// Sort and limit
|
||||||
|
const sorted = events.sort((a, b) => b.created_at - a.created_at); |
||||||
|
return limit ? sorted.slice(0, limit) : sorted; |
||||||
|
} catch (error) { |
||||||
|
// Archive read failed (non-critical)
|
||||||
|
return []; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Restore an archived event back to main cache (unarchive) |
||||||
|
*/ |
||||||
|
export async function unarchiveEvent(id: string): Promise<void> { |
||||||
|
try { |
||||||
|
const archived = await getArchivedEvent(id); |
||||||
|
if (!archived) return; |
||||||
|
|
||||||
|
const db = await getDB(); |
||||||
|
|
||||||
|
// Put back in main events store
|
||||||
|
await db.put('events', archived); |
||||||
|
|
||||||
|
// Remove from archive
|
||||||
|
await db.delete('eventArchive', id); |
||||||
|
} catch (error) { |
||||||
|
// Unarchive failed (non-critical)
|
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Clear old archived events (permanent deletion) |
||||||
|
*/ |
||||||
|
export async function clearOldArchivedEvents(olderThan: number): Promise<number> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const tx = db.transaction('eventArchive', 'readwrite'); |
||||||
|
const index = tx.store.index('created_at'); |
||||||
|
|
||||||
|
const idsToDelete: string[] = []; |
||||||
|
for await (const cursor of index.iterate()) { |
||||||
|
if (cursor.value.created_at < olderThan) { |
||||||
|
idsToDelete.push(cursor.value.id); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
await tx.done; |
||||||
|
|
||||||
|
// Delete in batch
|
||||||
|
if (idsToDelete.length > 0) { |
||||||
|
const deleteTx = db.transaction('eventArchive', 'readwrite'); |
||||||
|
await Promise.all(idsToDelete.map(id => deleteTx.store.delete(id))); |
||||||
|
await deleteTx.done; |
||||||
|
} |
||||||
|
|
||||||
|
return idsToDelete.length; |
||||||
|
} catch (error) { |
||||||
|
// Clear failed (non-critical)
|
||||||
|
return 0; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get archive statistics |
||||||
|
*/ |
||||||
|
export async function getArchiveStats(): Promise<{ |
||||||
|
totalArchived: number; |
||||||
|
totalSize: number; // Approximate size in bytes
|
||||||
|
oldestArchived: number | null; |
||||||
|
}> { |
||||||
|
try { |
||||||
|
const db = await getDB(); |
||||||
|
const tx = db.transaction('eventArchive', 'readonly'); |
||||||
|
|
||||||
|
let totalArchived = 0; |
||||||
|
let totalSize = 0; |
||||||
|
let oldestArchived: number | null = null; |
||||||
|
|
||||||
|
for await (const cursor of tx.store.iterate()) { |
||||||
|
totalArchived++; |
||||||
|
totalSize += cursor.value.compressed.length; |
||||||
|
if (!oldestArchived || cursor.value.created_at < oldestArchived) { |
||||||
|
oldestArchived = cursor.value.created_at; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
await tx.done; |
||||||
|
|
||||||
|
return { |
||||||
|
totalArchived, |
||||||
|
totalSize, |
||||||
|
oldestArchived |
||||||
|
}; |
||||||
|
} catch (error) { |
||||||
|
return { |
||||||
|
totalArchived: 0, |
||||||
|
totalSize: 0, |
||||||
|
oldestArchived: null |
||||||
|
}; |
||||||
|
} |
||||||
|
} |
||||||
Loading…
Reference in new issue