Browse Source
background deletion removal corrected and expanded search and added cancel button show maintainers on the search result cards remove code search removed hard-coded theme classes Nostr-Signature: 8080f3cad9abacfc9a5fe08bc26744ff8444d0228ea8a6e8a449c8c2704885d6 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc 70120c99f5e8a1e9df6d74af756a51641c4998265b9233d5a7d187d9e21302dc6377ae274b07be4d6515af1dabfada43fa9af1a087a34e2879b028ac34e551camain
11 changed files with 2373 additions and 234 deletions
@ -0,0 +1,978 @@ |
|||||||
|
/** |
||||||
|
* Persistent event cache using IndexedDB for client-side storage |
||||||
|
* Provides offline access and reduces relay load |
||||||
|
*
|
||||||
|
* Strategy: |
||||||
|
* - Client-side only (IndexedDB) - events are immutable and user-specific |
||||||
|
* - Check cache first, return immediately if available |
||||||
|
* - Fetch from relays in background and merge results |
||||||
|
* - Never delete valid events, only append/integrate new ones |
||||||
|
* - Replaceable events (kind 0, 3, 10002) use latest version per pubkey |
||||||
|
*/ |
||||||
|
|
||||||
|
import type { NostrEvent, NostrFilter } from '../../types/nostr.js'; |
||||||
|
import { KIND } from '../../types/nostr.js'; |
||||||
|
import logger from '../logger.js'; |
||||||
|
import type { NostrClient } from './nostr-client.js'; |
||||||
|
|
||||||
|
const DB_NAME = 'gitrepublic_events'; |
||||||
|
const DB_VERSION = 1; |
||||||
|
const STORE_EVENTS = 'events'; |
||||||
|
const STORE_FILTERS = 'filters'; |
||||||
|
const STORE_PROFILES = 'profiles'; // Optimized storage for kind 0 events
|
||||||
|
|
||||||
|
// Replaceable event kinds (only latest per pubkey matters)
|
||||||
|
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
|
||||||
|
|
||||||
|
interface CachedEvent { |
||||||
|
event: NostrEvent; |
||||||
|
cachedAt: number; |
||||||
|
filterKey?: string; // Which filter(s) this event matches
|
||||||
|
} |
||||||
|
|
||||||
|
interface FilterCacheEntry { |
||||||
|
filterKey: string; |
||||||
|
eventIds: string[]; |
||||||
|
cachedAt: number; |
||||||
|
ttl: number; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Generate a deterministic cache key from a filter |
||||||
|
*/ |
||||||
|
function generateFilterKey(filter: NostrFilter): string { |
||||||
|
const sortedFilter = Object.keys(filter) |
||||||
|
.sort() |
||||||
|
.reduce((acc, key) => { |
||||||
|
const value = filter[key as keyof NostrFilter]; |
||||||
|
if (value !== undefined) { |
||||||
|
if (Array.isArray(value)) { |
||||||
|
acc[key] = [...value].sort(); |
||||||
|
} else { |
||||||
|
acc[key] = value; |
||||||
|
} |
||||||
|
} |
||||||
|
return acc; |
||||||
|
}, {} as Record<string, unknown>); |
||||||
|
|
||||||
|
return JSON.stringify(sortedFilter); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Generate cache key for multiple filters |
||||||
|
*/ |
||||||
|
function generateMultiFilterKey(filters: NostrFilter[]): string { |
||||||
|
const keys = filters.map(f => generateFilterKey(f)).sort(); |
||||||
|
return keys.join('|'); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Check if an event matches a filter |
||||||
|
*/ |
||||||
|
function eventMatchesFilter(event: NostrEvent, filter: NostrFilter): boolean { |
||||||
|
// Check kind
|
||||||
|
if (filter.kinds && !filter.kinds.includes(event.kind)) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
// Check authors
|
||||||
|
if (filter.authors && filter.authors.length > 0) { |
||||||
|
if (!filter.authors.includes(event.pubkey)) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check IDs
|
||||||
|
if (filter.ids && filter.ids.length > 0) { |
||||||
|
if (!filter.ids.includes(event.id)) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check #d tag (for parameterized replaceable events)
|
||||||
|
if (filter['#d'] && filter['#d'].length > 0) { |
||||||
|
const dTag = event.tags.find(t => t[0] === 'd')?.[1]; |
||||||
|
if (!dTag || !filter['#d'].includes(dTag)) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check #a tag (for parameterized replaceable events)
|
||||||
|
if (filter['#a'] && filter['#a'].length > 0) { |
||||||
|
const aTag = event.tags.find(t => t[0] === 'a')?.[1]; |
||||||
|
if (!aTag || !filter['#a'].includes(aTag)) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check #e tag
|
||||||
|
if (filter['#e'] && filter['#e'].length > 0) { |
||||||
|
const eTags = event.tags.filter(t => t[0] === 'e').map(t => t[1]); |
||||||
|
if (!eTags.some(e => filter['#e']!.includes(e))) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check #p tag
|
||||||
|
if (filter['#p'] && filter['#p'].length > 0) { |
||||||
|
const pTags = event.tags.filter(t => t[0] === 'p').map(t => t[1]); |
||||||
|
if (!pTags.some(p => filter['#p']!.includes(p))) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check created_at range
|
||||||
|
if (filter.since && event.created_at < filter.since) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
if (filter.until && event.created_at > filter.until) { |
||||||
|
return false; |
||||||
|
} |
||||||
|
|
||||||
|
return true; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Check if an event matches any of the filters |
||||||
|
*/ |
||||||
|
function eventMatchesAnyFilter(event: NostrEvent, filters: NostrFilter[]): boolean { |
||||||
|
return filters.some(filter => eventMatchesFilter(event, filter)); |
||||||
|
} |
||||||
|
|
||||||
|
export class PersistentEventCache { |
||||||
|
private db: IDBDatabase | null = null; |
||||||
|
private initPromise: Promise<void> | null = null; |
||||||
|
private defaultTTL: number = 5 * 60 * 1000; // 5 minutes
|
||||||
|
private profileTTL: number = 30 * 60 * 1000; // 30 minutes for profiles
|
||||||
|
private maxCacheAge: number = 7 * 24 * 60 * 60 * 1000; // 7 days max age
|
||||||
|
|
||||||
|
constructor() { |
||||||
|
this.init(); |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Initialize IndexedDB |
||||||
|
*/ |
||||||
|
private async init(): Promise<void> { |
||||||
|
if (this.initPromise) { |
||||||
|
return this.initPromise; |
||||||
|
} |
||||||
|
|
||||||
|
if (typeof window === 'undefined' || !window.indexedDB) { |
||||||
|
logger.warn('IndexedDB not available, using in-memory cache only'); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
this.initPromise = new Promise((resolve, reject) => { |
||||||
|
const request = indexedDB.open(DB_NAME, DB_VERSION); |
||||||
|
|
||||||
|
request.onerror = () => { |
||||||
|
logger.error('Failed to open IndexedDB'); |
||||||
|
reject(new Error('Failed to open IndexedDB')); |
||||||
|
}; |
||||||
|
|
||||||
|
request.onsuccess = () => { |
||||||
|
this.db = request.result; |
||||||
|
resolve(); |
||||||
|
}; |
||||||
|
|
||||||
|
request.onupgradeneeded = (event) => { |
||||||
|
const db = (event.target as IDBOpenDBRequest).result; |
||||||
|
|
||||||
|
// Events store - stores all events by ID
|
||||||
|
if (!db.objectStoreNames.contains(STORE_EVENTS)) { |
||||||
|
const eventStore = db.createObjectStore(STORE_EVENTS, { keyPath: 'id' }); |
||||||
|
eventStore.createIndex('pubkey', 'event.pubkey', { unique: false }); |
||||||
|
eventStore.createIndex('kind', 'event.kind', { unique: false }); |
||||||
|
eventStore.createIndex('created_at', 'event.created_at', { unique: false }); |
||||||
|
eventStore.createIndex('cachedAt', 'cachedAt', { unique: false }); |
||||||
|
} |
||||||
|
|
||||||
|
// Filter cache store - maps filter keys to event IDs
|
||||||
|
if (!db.objectStoreNames.contains(STORE_FILTERS)) { |
||||||
|
const filterStore = db.createObjectStore(STORE_FILTERS, { keyPath: 'filterKey' }); |
||||||
|
filterStore.createIndex('cachedAt', 'cachedAt', { unique: false }); |
||||||
|
} |
||||||
|
|
||||||
|
// Profiles store - optimized for kind 0 events (latest per pubkey)
|
||||||
|
if (!db.objectStoreNames.contains(STORE_PROFILES)) { |
||||||
|
db.createObjectStore(STORE_PROFILES, { keyPath: 'pubkey' }); |
||||||
|
} |
||||||
|
}; |
||||||
|
}); |
||||||
|
|
||||||
|
return this.initPromise; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get events from cache that match the filters |
||||||
|
*/ |
||||||
|
async get(filters: NostrFilter[]): Promise<NostrEvent[] | null> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const filterKey = generateMultiFilterKey(filters); |
||||||
|
|
||||||
|
// Check filter cache first
|
||||||
|
const filterEntry = await this.getFilterEntry(filterKey); |
||||||
|
if (!filterEntry) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
// Check if filter cache is expired
|
||||||
|
const now = Date.now(); |
||||||
|
if (now - filterEntry.cachedAt > filterEntry.ttl) { |
||||||
|
// Expired, but we can still return events if they exist
|
||||||
|
// Don't delete, just mark as stale
|
||||||
|
} |
||||||
|
|
||||||
|
// Get events from events store
|
||||||
|
const events: NostrEvent[] = []; |
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS); |
||||||
|
|
||||||
|
for (const eventId of filterEntry.eventIds) { |
||||||
|
const request = eventStore.get(eventId); |
||||||
|
const cached = await new Promise<CachedEvent | undefined>((resolve) => { |
||||||
|
request.onsuccess = () => resolve(request.result); |
||||||
|
request.onerror = () => resolve(undefined); |
||||||
|
}); |
||||||
|
|
||||||
|
if (cached) { |
||||||
|
// Check if event is too old (beyond max cache age)
|
||||||
|
if (now - cached.cachedAt < this.maxCacheAge) { |
||||||
|
// Verify event still matches filters (in case filters changed)
|
||||||
|
if (eventMatchesAnyFilter(cached.event, filters)) { |
||||||
|
events.push(cached.event); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// For replaceable events, ensure we only return the latest per pubkey
|
||||||
|
const replaceableEvents = new Map<string, NostrEvent>(); |
||||||
|
const regularEvents: NostrEvent[] = []; |
||||||
|
|
||||||
|
for (const event of events) { |
||||||
|
if (REPLACEABLE_KINDS.includes(event.kind)) { |
||||||
|
const existing = replaceableEvents.get(event.pubkey); |
||||||
|
if (!existing || event.created_at > existing.created_at) { |
||||||
|
replaceableEvents.set(event.pubkey, event); |
||||||
|
} |
||||||
|
} else { |
||||||
|
regularEvents.push(event); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
const result = [...Array.from(replaceableEvents.values()), ...regularEvents]; |
||||||
|
|
||||||
|
// Sort by created_at descending
|
||||||
|
result.sort((a, b) => b.created_at - a.created_at); |
||||||
|
|
||||||
|
return result.length > 0 ? result : null; |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, filters }, 'Error reading from event cache'); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get filter cache entry |
||||||
|
*/ |
||||||
|
private async getFilterEntry(filterKey: string): Promise<FilterCacheEntry | null> { |
||||||
|
if (!this.db) return null; |
||||||
|
|
||||||
|
try { |
||||||
|
const store = this.db.transaction([STORE_FILTERS], 'readonly').objectStore(STORE_FILTERS); |
||||||
|
const request = store.get(filterKey); |
||||||
|
|
||||||
|
return new Promise((resolve) => { |
||||||
|
request.onsuccess = () => resolve(request.result || null); |
||||||
|
request.onerror = () => resolve(null); |
||||||
|
}); |
||||||
|
} catch { |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Store events in cache, merging with existing events |
||||||
|
*/ |
||||||
|
async set(filters: NostrFilter[], events: NostrEvent[], ttl?: number): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const filterKey = generateMultiFilterKey(filters); |
||||||
|
const now = Date.now(); |
||||||
|
const cacheTTL = ttl || this.defaultTTL; |
||||||
|
|
||||||
|
// Determine if this is a profile query
|
||||||
|
const isProfileQuery = filters.some(f =>
|
||||||
|
f.kinds?.includes(0) && f.authors && f.authors.length > 0 |
||||||
|
); |
||||||
|
|
||||||
|
// Use longer TTL for profile events
|
||||||
|
const effectiveTTL = isProfileQuery ? this.profileTTL : cacheTTL; |
||||||
|
|
||||||
|
// Get existing filter entry
|
||||||
|
const existingEntry = await this.getFilterEntry(filterKey); |
||||||
|
const existingEventIds = new Set(existingEntry?.eventIds || []); |
||||||
|
|
||||||
|
// Store/update events
|
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
const newEventIds: string[] = []; |
||||||
|
|
||||||
|
for (const event of events) { |
||||||
|
// For replaceable events, check if we have a newer version for this pubkey
|
||||||
|
if (REPLACEABLE_KINDS.includes(event.kind)) { |
||||||
|
// Check if we already have a newer replaceable event for this pubkey
|
||||||
|
const existingProfile = await this.getProfile(event.pubkey); |
||||||
|
if (existingProfile && existingProfile.kind === event.kind && existingProfile.created_at >= event.created_at) { |
||||||
|
// Existing event is newer or same, skip
|
||||||
|
if (existingEventIds.has(existingProfile.id)) { |
||||||
|
newEventIds.push(existingProfile.id); |
||||||
|
} |
||||||
|
continue; |
||||||
|
} |
||||||
|
} else { |
||||||
|
// For non-replaceable events, check if we already have this event
|
||||||
|
if (existingEventIds.has(event.id)) { |
||||||
|
newEventIds.push(event.id); |
||||||
|
continue; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Store the event
|
||||||
|
const cached: CachedEvent = { |
||||||
|
event, |
||||||
|
cachedAt: now, |
||||||
|
filterKey |
||||||
|
}; |
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = eventStore.put(cached); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
|
||||||
|
newEventIds.push(event.id); |
||||||
|
|
||||||
|
// Also store in profiles store if it's a profile event
|
||||||
|
if (event.kind === 0) { |
||||||
|
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES); |
||||||
|
const existingProfile = await new Promise<CachedEvent | undefined>((resolve) => { |
||||||
|
const req = profileStore.get(event.pubkey); |
||||||
|
req.onsuccess = () => resolve(req.result); |
||||||
|
req.onerror = () => resolve(undefined); |
||||||
|
}); |
||||||
|
|
||||||
|
if (!existingProfile || event.created_at > existingProfile.event.created_at) { |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const req = profileStore.put({ pubkey: event.pubkey, ...cached }); |
||||||
|
req.onsuccess = () => resolve(); |
||||||
|
req.onerror = () => reject(req.error); |
||||||
|
}); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Merge with existing event IDs (don't delete valid events)
|
||||||
|
const mergedEventIds = Array.from(new Set([...existingEntry?.eventIds || [], ...newEventIds])); |
||||||
|
|
||||||
|
// Update filter cache entry
|
||||||
|
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS); |
||||||
|
const filterEntry: FilterCacheEntry = { |
||||||
|
filterKey, |
||||||
|
eventIds: mergedEventIds, |
||||||
|
cachedAt: now, |
||||||
|
ttl: effectiveTTL |
||||||
|
}; |
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = filterStore.put(filterEntry); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
|
||||||
|
logger.debug({
|
||||||
|
filterKey,
|
||||||
|
eventCount: events.length,
|
||||||
|
mergedCount: mergedEventIds.length, |
||||||
|
ttl: effectiveTTL
|
||||||
|
}, 'Cached events in IndexedDB'); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, filters }, 'Error writing to event cache'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get a single event by ID |
||||||
|
*/ |
||||||
|
private async getEventById(eventId: string): Promise<NostrEvent | null> { |
||||||
|
if (!this.db) return null; |
||||||
|
|
||||||
|
try { |
||||||
|
const store = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS); |
||||||
|
const request = store.get(eventId); |
||||||
|
|
||||||
|
return new Promise((resolve) => { |
||||||
|
request.onsuccess = () => { |
||||||
|
const cached = request.result as CachedEvent | undefined; |
||||||
|
resolve(cached?.event || null); |
||||||
|
}; |
||||||
|
request.onerror = () => resolve(null); |
||||||
|
}); |
||||||
|
} catch { |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get profile event (kind 0) for a pubkey |
||||||
|
*/ |
||||||
|
async getProfile(pubkey: string): Promise<NostrEvent | null> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const store = this.db.transaction([STORE_PROFILES], 'readonly').objectStore(STORE_PROFILES); |
||||||
|
const request = store.get(pubkey); |
||||||
|
|
||||||
|
return new Promise((resolve) => { |
||||||
|
request.onsuccess = () => { |
||||||
|
const cached = request.result as CachedEvent | undefined; |
||||||
|
if (cached) { |
||||||
|
// Check if not too old
|
||||||
|
const now = Date.now(); |
||||||
|
if (now - cached.cachedAt < this.maxCacheAge) { |
||||||
|
resolve(cached.event); |
||||||
|
} else { |
||||||
|
resolve(null); |
||||||
|
} |
||||||
|
} else { |
||||||
|
resolve(null); |
||||||
|
} |
||||||
|
}; |
||||||
|
request.onerror = () => resolve(null); |
||||||
|
}); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, pubkey }, 'Error reading profile from cache'); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Set profile event (kind 0) |
||||||
|
*/ |
||||||
|
async setProfile(pubkey: string, event: NostrEvent): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db || event.kind !== 0) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
// Check if we have a newer profile
|
||||||
|
const existing = await this.getProfile(pubkey); |
||||||
|
if (existing && existing.created_at >= event.created_at) { |
||||||
|
return; // Existing is newer or same
|
||||||
|
} |
||||||
|
|
||||||
|
const cached: CachedEvent = { |
||||||
|
event, |
||||||
|
cachedAt: Date.now() |
||||||
|
}; |
||||||
|
|
||||||
|
const store = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES); |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = store.put({ pubkey, ...cached }); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
|
||||||
|
// Also store in events store
|
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = eventStore.put(cached); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, pubkey }, 'Error writing profile to cache'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Invalidate cache for a specific pubkey |
||||||
|
*/ |
||||||
|
async invalidatePubkey(pubkey: string): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
// Remove from profiles store
|
||||||
|
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES); |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = profileStore.delete(pubkey); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
|
||||||
|
// Remove events from events store
|
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
const index = eventStore.index('pubkey'); |
||||||
|
const request = index.openKeyCursor(IDBKeyRange.only(pubkey)); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
request.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
eventStore.delete(cursor.primaryKey); |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
request.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
|
||||||
|
// Invalidate filter entries that reference these events
|
||||||
|
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS); |
||||||
|
const filterRequest = filterStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
filterRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const entry = cursor.value as FilterCacheEntry; |
||||||
|
// Remove event IDs that match this pubkey
|
||||||
|
// We'd need to check each event, but for simplicity, just invalidate the filter
|
||||||
|
filterStore.delete(cursor.primaryKey); |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
filterRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, pubkey }, 'Error invalidating pubkey cache'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Invalidate cache for specific filters |
||||||
|
*/ |
||||||
|
async invalidate(filters: NostrFilter[]): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const filterKey = generateMultiFilterKey(filters); |
||||||
|
const store = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS); |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = store.delete(filterKey); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, filters }, 'Error invalidating filter cache'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Clear all cache |
||||||
|
*/ |
||||||
|
async clear(): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const stores = [STORE_EVENTS, STORE_FILTERS, STORE_PROFILES]; |
||||||
|
for (const storeName of stores) { |
||||||
|
const store = this.db.transaction([storeName], 'readwrite').objectStore(storeName); |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = store.clear(); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
} |
||||||
|
logger.debug('Cleared all event cache'); |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error }, 'Error clearing event cache'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Cleanup old entries |
||||||
|
*/ |
||||||
|
async cleanup(): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const now = Date.now(); |
||||||
|
let cleaned = 0; |
||||||
|
|
||||||
|
// Clean up expired filter entries
|
||||||
|
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS); |
||||||
|
const filterRequest = filterStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
filterRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const entry = cursor.value as FilterCacheEntry; |
||||||
|
if (now - entry.cachedAt > entry.ttl) { |
||||||
|
filterStore.delete(cursor.primaryKey); |
||||||
|
cleaned++; |
||||||
|
} |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
filterRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
|
||||||
|
// Clean up old events (beyond max cache age)
|
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
const eventRequest = eventStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
eventRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const cached = cursor.value as CachedEvent; |
||||||
|
if (now - cached.cachedAt > this.maxCacheAge) { |
||||||
|
eventStore.delete(cursor.primaryKey); |
||||||
|
cleaned++; |
||||||
|
} |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
eventRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
|
||||||
|
// Clean up old profiles
|
||||||
|
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES); |
||||||
|
const profileRequest = profileStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
profileRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const cached = cursor.value as CachedEvent; |
||||||
|
if (now - cached.cachedAt > this.maxCacheAge) { |
||||||
|
profileStore.delete(cursor.primaryKey); |
||||||
|
cleaned++; |
||||||
|
} |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
profileRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
|
||||||
|
if (cleaned > 0) { |
||||||
|
logger.debug({ cleaned }, 'Cleaned up old cache entries'); |
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error }, 'Error during cache cleanup'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Get cache statistics |
||||||
|
*/ |
||||||
|
async getStats(): Promise<{ events: number; filters: number; profiles: number }> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db) { |
||||||
|
return { events: 0, filters: 0, profiles: 0 }; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS); |
||||||
|
const filterStore = this.db.transaction([STORE_FILTERS], 'readonly').objectStore(STORE_FILTERS); |
||||||
|
const profileStore = this.db.transaction([STORE_PROFILES], 'readonly').objectStore(STORE_PROFILES); |
||||||
|
|
||||||
|
const [eventCount, filterCount, profileCount] = await Promise.all([ |
||||||
|
new Promise<number>((resolve) => { |
||||||
|
const request = eventStore.count(); |
||||||
|
request.onsuccess = () => resolve(request.result); |
||||||
|
request.onerror = () => resolve(0); |
||||||
|
}), |
||||||
|
new Promise<number>((resolve) => { |
||||||
|
const request = filterStore.count(); |
||||||
|
request.onsuccess = () => resolve(request.result); |
||||||
|
request.onerror = () => resolve(0); |
||||||
|
}), |
||||||
|
new Promise<number>((resolve) => { |
||||||
|
const request = profileStore.count(); |
||||||
|
request.onsuccess = () => resolve(request.result); |
||||||
|
request.onerror = () => resolve(0); |
||||||
|
}) |
||||||
|
]); |
||||||
|
|
||||||
|
return { events: eventCount, filters: filterCount, profiles: profileCount }; |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error }, 'Error getting cache stats'); |
||||||
|
return { events: 0, filters: 0, profiles: 0 }; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Process deletion events (NIP-09) and remove deleted events from cache |
||||||
|
* @param deletionEvents - Array of kind 5 deletion events |
||||||
|
*/ |
||||||
|
async processDeletionEvents(deletionEvents: NostrEvent[]): Promise<void> { |
||||||
|
await this.init(); |
||||||
|
|
||||||
|
if (!this.db || deletionEvents.length === 0) { |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
const deletedEventIds = new Set<string>(); |
||||||
|
const deletedAddresses = new Set<string>(); // Format: kind:pubkey:d-tag
|
||||||
|
|
||||||
|
// Extract deleted event IDs and addresses from deletion events
|
||||||
|
for (const deletionEvent of deletionEvents) { |
||||||
|
if (deletionEvent.kind !== KIND.DELETION_REQUEST) { |
||||||
|
continue; |
||||||
|
} |
||||||
|
|
||||||
|
// Extract 'e' tags (deleted event IDs)
|
||||||
|
for (const tag of deletionEvent.tags) { |
||||||
|
if (tag[0] === 'e' && tag[1]) { |
||||||
|
deletedEventIds.add(tag[1]); |
||||||
|
} |
||||||
|
// Extract 'a' tags (deleted parameterized replaceable events)
|
||||||
|
if (tag[0] === 'a' && tag[1]) { |
||||||
|
deletedAddresses.add(tag[1]); |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
if (deletedEventIds.size === 0 && deletedAddresses.size === 0) { |
||||||
|
return; // No deletions to process
|
||||||
|
} |
||||||
|
|
||||||
|
let removedCount = 0; |
||||||
|
|
||||||
|
// Remove events by ID
|
||||||
|
if (deletedEventIds.size > 0) { |
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
|
||||||
|
for (const eventId of deletedEventIds) { |
||||||
|
try { |
||||||
|
await new Promise<void>((resolve, reject) => { |
||||||
|
const request = eventStore.delete(eventId); |
||||||
|
request.onsuccess = () => resolve(); |
||||||
|
request.onerror = () => reject(request.error); |
||||||
|
}); |
||||||
|
removedCount++; |
||||||
|
} catch { |
||||||
|
// Event might not exist in cache, ignore
|
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Remove events by address (parameterized replaceable events)
|
||||||
|
if (deletedAddresses.size > 0) { |
||||||
|
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS); |
||||||
|
const cursorRequest = eventStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
cursorRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const cached = cursor.value as CachedEvent; |
||||||
|
const event = cached.event; |
||||||
|
|
||||||
|
// Check if this event matches any deleted address
|
||||||
|
// Format: kind:pubkey:d-tag
|
||||||
|
for (const deletedAddr of deletedAddresses) { |
||||||
|
const parts = deletedAddr.split(':'); |
||||||
|
if (parts.length === 3) { |
||||||
|
const [kindStr, pubkey, dTag] = parts; |
||||||
|
const kind = parseInt(kindStr, 10); |
||||||
|
|
||||||
|
if (event.kind === kind && event.pubkey === pubkey) { |
||||||
|
const eventDTag = event.tags.find(t => t[0] === 'd')?.[1]; |
||||||
|
if (eventDTag === dTag) { |
||||||
|
// This event matches the deleted address
|
||||||
|
cursor.delete(); |
||||||
|
removedCount++; |
||||||
|
break; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
cursorRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
// Remove deleted event IDs from filter cache entries
|
||||||
|
if (deletedEventIds.size > 0 || deletedAddresses.size > 0) { |
||||||
|
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS); |
||||||
|
const filterRequest = filterStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
filterRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const entry = cursor.value as FilterCacheEntry; |
||||||
|
const originalLength = entry.eventIds.length; |
||||||
|
|
||||||
|
// Remove deleted event IDs
|
||||||
|
entry.eventIds = entry.eventIds.filter(id => !deletedEventIds.has(id)); |
||||||
|
|
||||||
|
// If we removed any IDs, update the entry
|
||||||
|
if (entry.eventIds.length !== originalLength) { |
||||||
|
cursor.update(entry); |
||||||
|
} |
||||||
|
|
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
filterRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
// Also remove from profiles store if applicable
|
||||||
|
if (deletedEventIds.size > 0) { |
||||||
|
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES); |
||||||
|
const profileRequest = profileStore.openCursor(); |
||||||
|
|
||||||
|
await new Promise<void>((resolve) => { |
||||||
|
profileRequest.onsuccess = (event) => { |
||||||
|
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result; |
||||||
|
if (cursor) { |
||||||
|
const cached = cursor.value as CachedEvent; |
||||||
|
if (deletedEventIds.has(cached.event.id)) { |
||||||
|
cursor.delete(); |
||||||
|
removedCount++; |
||||||
|
} |
||||||
|
cursor.continue(); |
||||||
|
} else { |
||||||
|
resolve(); |
||||||
|
} |
||||||
|
}; |
||||||
|
profileRequest.onerror = () => resolve(); |
||||||
|
}); |
||||||
|
} |
||||||
|
|
||||||
|
if (removedCount > 0) { |
||||||
|
logger.debug({ removedCount, deletedEventIds: deletedEventIds.size, deletedAddresses: deletedAddresses.size }, 'Processed deletion events and removed from cache'); |
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, deletionEvents: deletionEvents.length }, 'Error processing deletion events'); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Fetch deletion events from relays and process them |
||||||
|
* @param nostrClient - NostrClient instance to fetch events |
||||||
|
* @param userPubkeys - Array of user pubkeys to fetch deletions for (optional) |
||||||
|
*/ |
||||||
|
async fetchAndProcessDeletions(nostrClient: NostrClient, userPubkeys: string[] = []): Promise<void> { |
||||||
|
try { |
||||||
|
// Fetch deletion events (kind 5) for the specified users
|
||||||
|
const filters: NostrFilter[] = []; |
||||||
|
|
||||||
|
if (userPubkeys.length > 0) { |
||||||
|
// Fetch deletions for specific users
|
||||||
|
filters.push({ |
||||||
|
kinds: [KIND.DELETION_REQUEST], |
||||||
|
authors: userPubkeys, |
||||||
|
limit: 100 |
||||||
|
}); |
||||||
|
} else { |
||||||
|
// If no specific users, we can't fetch all deletions (would be too many)
|
||||||
|
// In this case, we'll just process any deletions that are already in cache
|
||||||
|
// or skip this call
|
||||||
|
logger.debug('No user pubkeys provided, skipping deletion fetch'); |
||||||
|
return; |
||||||
|
} |
||||||
|
|
||||||
|
const deletionEvents = await nostrClient.fetchEvents(filters); |
||||||
|
|
||||||
|
if (deletionEvents.length > 0) { |
||||||
|
await this.processDeletionEvents(deletionEvents); |
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
logger.error({ error, userPubkeys: userPubkeys.length }, 'Error fetching and processing deletion events'); |
||||||
|
throw error; |
||||||
|
} |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Singleton instance
|
||||||
|
export const persistentEventCache = new PersistentEventCache(); |
||||||
|
|
||||||
|
// Run cleanup every hour and process deletions every 15 minutes
|
||||||
|
if (typeof window !== 'undefined') { |
||||||
|
setInterval(() => { |
||||||
|
persistentEventCache.cleanup().catch(err => { |
||||||
|
logger.error({ error: err }, 'Error during scheduled cache cleanup'); |
||||||
|
}); |
||||||
|
}, 60 * 60 * 1000); // 1 hour
|
||||||
|
|
||||||
|
// Process deletion events in the background every 15 minutes
|
||||||
|
// This will be triggered when a NostrClient is available
|
||||||
|
let deletionProcessingInterval: ReturnType<typeof setInterval> | null = null; |
||||||
|
|
||||||
|
// Start deletion processing when a client becomes available
|
||||||
|
// We'll set this up in the NostrClient or a service that has access to it
|
||||||
|
(window as any).__startDeletionProcessing = async (nostrClient: any, userPubkeys: string[] = []) => { |
||||||
|
if (deletionProcessingInterval) { |
||||||
|
clearInterval(deletionProcessingInterval); |
||||||
|
} |
||||||
|
|
||||||
|
// Process immediately, then every 15 minutes
|
||||||
|
persistentEventCache.fetchAndProcessDeletions(nostrClient, userPubkeys).catch((err: unknown) => { |
||||||
|
logger.debug({ error: err }, 'Error during initial deletion processing'); |
||||||
|
}); |
||||||
|
|
||||||
|
deletionProcessingInterval = setInterval(() => { |
||||||
|
persistentEventCache.fetchAndProcessDeletions(nostrClient, userPubkeys).catch((err: unknown) => { |
||||||
|
logger.debug({ error: err }, 'Error during scheduled deletion processing'); |
||||||
|
}); |
||||||
|
}, 15 * 60 * 1000); // 15 minutes
|
||||||
|
}; |
||||||
|
} |
||||||
@ -0,0 +1,99 @@ |
|||||||
|
/** |
||||||
|
* Utility to resolve various pubkey formats to hex pubkey |
||||||
|
* Supports: hex pubkey, npub, nprofile, NIP-05 |
||||||
|
*/ |
||||||
|
|
||||||
|
import { nip19 } from 'nostr-tools'; |
||||||
|
import logger from '../services/logger.js'; |
||||||
|
|
||||||
|
/** |
||||||
|
* Resolve a pubkey from various formats to hex |
||||||
|
* Supports: |
||||||
|
* - Hex pubkey (64 hex characters) |
||||||
|
* - npub (NIP-19 encoded pubkey) |
||||||
|
* - nprofile (NIP-19 encoded profile with relays) |
||||||
|
* - NIP-05 (e.g., user@domain.com) |
||||||
|
*/ |
||||||
|
export async function resolvePubkey(input: string): Promise<string | null> { |
||||||
|
if (!input || !input.trim()) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
const trimmed = input.trim(); |
||||||
|
|
||||||
|
// Check if it's already a hex pubkey (64 hex characters)
|
||||||
|
if (/^[0-9a-f]{64}$/i.test(trimmed)) { |
||||||
|
return trimmed.toLowerCase(); |
||||||
|
} |
||||||
|
|
||||||
|
// Check if it's a NIP-19 encoded value (npub or nprofile)
|
||||||
|
if (trimmed.startsWith('npub') || trimmed.startsWith('nprofile')) { |
||||||
|
try { |
||||||
|
const decoded = nip19.decode(trimmed); |
||||||
|
if (decoded.type === 'npub') { |
||||||
|
return decoded.data as string; |
||||||
|
} else if (decoded.type === 'nprofile') { |
||||||
|
// nprofile contains pubkey in data.pubkey
|
||||||
|
return (decoded.data as { pubkey: string }).pubkey; |
||||||
|
} |
||||||
|
} catch (error) { |
||||||
|
logger.debug({ error, input: trimmed }, 'Failed to decode NIP-19 value'); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
// Check if it's a NIP-05 identifier (e.g., user@domain.com)
|
||||||
|
if (trimmed.includes('@')) { |
||||||
|
try { |
||||||
|
const pubkey = await resolveNIP05(trimmed); |
||||||
|
return pubkey; |
||||||
|
} catch (error) { |
||||||
|
logger.debug({ error, input: trimmed }, 'Failed to resolve NIP-05'); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
/** |
||||||
|
* Resolve NIP-05 identifier to hex pubkey |
||||||
|
* Fetches from https://<domain>/.well-known/nostr.json?name=<local-part>
|
||||||
|
*/ |
||||||
|
async function resolveNIP05(nip05: string): Promise<string | null> { |
||||||
|
const [localPart, domain] = nip05.split('@'); |
||||||
|
|
||||||
|
if (!localPart || !domain) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
try { |
||||||
|
// Fetch from well-known endpoint
|
||||||
|
const url = `https://${domain}/.well-known/nostr.json?name=${encodeURIComponent(localPart)}`; |
||||||
|
const response = await fetch(url, { |
||||||
|
headers: { |
||||||
|
'Accept': 'application/json' |
||||||
|
} |
||||||
|
}); |
||||||
|
|
||||||
|
if (!response.ok) { |
||||||
|
return null; |
||||||
|
} |
||||||
|
|
||||||
|
const data = await response.json(); |
||||||
|
|
||||||
|
// NIP-05 format: { "names": { "<local-part>": "<hex-pubkey>" } }
|
||||||
|
if (data.names && data.names[localPart]) { |
||||||
|
const pubkey = data.names[localPart]; |
||||||
|
// Validate it's a hex pubkey
|
||||||
|
if (/^[0-9a-f]{64}$/i.test(pubkey)) { |
||||||
|
return pubkey.toLowerCase(); |
||||||
|
} |
||||||
|
} |
||||||
|
|
||||||
|
return null; |
||||||
|
} catch (error) { |
||||||
|
logger.debug({ error, nip05 }, 'Error fetching NIP-05'); |
||||||
|
return null; |
||||||
|
} |
||||||
|
} |
||||||
Loading…
Reference in new issue