diff --git a/nip66-cron/index.mjs b/nip66-cron/index.mjs index 4ef6cd82..80140b5c 100644 --- a/nip66-cron/index.mjs +++ b/nip66-cron/index.mjs @@ -30,40 +30,40 @@ const RELAY_MONITOR_ANNOUNCEMENT_KIND = 10166 /** * Default URLs to run NIP-11 checks against (30166); always merged with the monitor’s kind 10002 unless overridden. - * Union of relay presets in src/constants.ts: DEFAULT_FAVORITE_RELAYS, BIG_RELAY_URLS, + * Union of relay presets in src/constants.ts: DEFAULT_FAVORITE_RELAYS, FAST_READ_RELAY_URLS, * NIP66_DISCOVERY_RELAY_URLS, BOOKSTR_RELAY_URLS, READ_ONLY_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, * FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS, GIF_RELAY_URLS, SEARCHABLE_RELAY_URLS, * PROFILE_RELAY_URLS, DEFAULT_NOSTRCONNECT_RELAY — deduped, sorted. */ +// Deduplicated list of default relays to monitor (normalized URLs, first occurrence preserved) const DEFAULT_RELAYS_TO_MONITOR = [ - 'wss://aggr.nostr.land', - 'wss://bucket.coracle.social', - 'wss://freelay.sovbit.host', - 'wss://nostr.sovbit.host', - 'wss://hist.nostr.land', - 'wss://nos.lol', - 'wss://nostr.land', - 'wss://nostr.mom', - 'wss://nostr.wine', - 'wss://relay.lumina.rocks', - 'wss://greensoul.space', - 'wss://nostr21.com', + 'wss://theforest.nostr1.com', 'wss://orly-relay.imwald.eu', + 'wss://nostr.land', + 'wss://thecitadel.nostr1.com', + 'wss://relay.nostr.watch', + 'wss://relaypag.es', + 'wss://hist.nostr.land', 'wss://profiles.nostr1.com', 'wss://purplepag.es', + 'wss://nostr.wine', + 'wss://nostr21.com', + 'wss://aggr.nostr.land', 'wss://relay.damus.io', - 'wss://relay.gifbuddy.lol', - 'wss://relay.nostr.watch', - 'wss://relay.nsec.app', 'wss://relay.primal.net', - 'wss://relay.snort.social', - 'wss://relaypag.es', + 'wss://nos.lol', + 'wss://relay.gifbuddy.lol', + 'wss://freelay.sovbit.host', 'wss://search.nos.today', - 'wss://thecitadel.nostr1.com', - 'wss://theforest.nostr1.com', - 'wss://christpill.nostr1.com', + 'wss://relay.snort.social', + 'wss://nostr.mom', + 'wss://relay.noswhere.com', + 'wss://relay.wikifreedia.xyz', 'wss://nostr.einundzwanzig.space', - 'relay.wikifreedia.xyz' + 'wss://relay.lumina.rocks', + 'wss://nostrelites.org', + 'wss://relay.nsec.app', + 'wss://bucket.coracle.social' ] /** Relays to publish 30166/10166 and to REQ kind 10002 from; broad enough for Imwald + NIP-66 discovery. */ diff --git a/src/components/Embedded/EmbeddedNote.tsx b/src/components/Embedded/EmbeddedNote.tsx index 37847711..ec6eafb0 100644 --- a/src/components/Embedded/EmbeddedNote.tsx +++ b/src/components/Embedded/EmbeddedNote.tsx @@ -1,5 +1,5 @@ import { Skeleton } from '@/components/ui/skeleton' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS, ExtendedKind } from '@/constants' +import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS, ExtendedKind } from '@/constants' import { useFetchEvent } from '@/hooks' import { normalizeUrl } from '@/lib/url' import { cn } from '@/lib/utils' @@ -137,7 +137,7 @@ function EmbeddedNoteNotFound({ useEffect(() => { const getExternalRelays = async () => { const alreadyTriedRelaysSet = new Set() - ;[...BIG_RELAY_URLS, ...FAST_READ_RELAY_URLS].forEach(url => { + ;[...FAST_READ_RELAY_URLS].forEach(url => { const normalized = normalizeUrl(url) if (normalized) alreadyTriedRelaysSet.add(normalized) }) diff --git a/src/components/FavoriteRelaysSetting/PullRelaySetsButton.tsx b/src/components/FavoriteRelaysSetting/PullRelaySetsButton.tsx index 463bcfe3..dca43e81 100644 --- a/src/components/FavoriteRelaysSetting/PullRelaySetsButton.tsx +++ b/src/components/FavoriteRelaysSetting/PullRelaySetsButton.tsx @@ -15,7 +15,7 @@ import { DrawerTitle, DrawerTrigger } from '@/components/ui/drawer' -import { BIG_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import { getReplaceableEventIdentifier } from '@/lib/event' import { tagNameEquals } from '@/lib/tag' import { isWebsocketUrl, simplifyUrl } from '@/lib/url' @@ -94,7 +94,7 @@ function RemoteRelaySets({ close }: { close?: () => void }) { const init = async () => { setInitialed(false) const events = await queryService.fetchEvents( - (relayList?.write ?? []).concat(BIG_RELAY_URLS).slice(0, 4), + (relayList?.write ?? []).concat(FAST_READ_RELAY_URLS).slice(0, 4), { kinds: [kinds.Relaysets], authors: [pubkey], diff --git a/src/components/Note/Poll.tsx b/src/components/Note/Poll.tsx index 59dab008..792e4741 100644 --- a/src/components/Note/Poll.tsx +++ b/src/components/Note/Poll.tsx @@ -1,5 +1,5 @@ import { Button } from '@/components/ui/button' -import { BIG_RELAY_URLS, POLL_TYPE } from '@/constants' +import { FAST_READ_RELAY_URLS, POLL_TYPE } from '@/constants' import { useFetchPollResults } from '@/hooks/useFetchPollResults' import { createPollResponseDraftEvent } from '@/lib/draft-event' import { getPollMetadataFromEvent } from '@/lib/event-metadata' @@ -266,7 +266,7 @@ async function ensurePollRelays(_creator: string, poll: { relayUrls: string[] }) const relays = poll.relayUrls.slice(0, 4) // Privacy: Use defaults instead of fetching creator's relays if (!relays.length) { - relays.push(...BIG_RELAY_URLS.slice(0, 4)) + relays.push(...FAST_READ_RELAY_URLS.slice(0, 4)) } return relays } diff --git a/src/components/Note/PublicationIndex/PublicationIndex.tsx b/src/components/Note/PublicationIndex/PublicationIndex.tsx index 86540f3c..34722734 100644 --- a/src/components/Note/PublicationIndex/PublicationIndex.tsx +++ b/src/components/Note/PublicationIndex/PublicationIndex.tsx @@ -444,7 +444,7 @@ export default function PublicationIndex({ const buildComprehensiveRelayList = useCallback(async ( additionalRelays: string[] = [] ): Promise => { - const { FAST_READ_RELAY_URLS, BIG_RELAY_URLS, SEARCHABLE_RELAY_URLS } = await import('@/constants') + const { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } = await import('@/constants') const relayUrls = new Set() // Add FAST_READ_RELAY_URLS @@ -492,8 +492,8 @@ export default function PublicationIndex({ // Ignore if user relay list can't be fetched } - // Add BIG_RELAY_URLS as fallback - BIG_RELAY_URLS.forEach(url => { + // Add FAST_READ_RELAY_URLS as fallback + FAST_READ_RELAY_URLS.forEach(url => { const normalized = normalizeUrl(url) if (normalized) relayUrls.add(normalized) }) diff --git a/src/components/NoteOptions/useMenuActions.tsx b/src/components/NoteOptions/useMenuActions.tsx index 0e9023c9..203db7bc 100644 --- a/src/components/NoteOptions/useMenuActions.tsx +++ b/src/components/NoteOptions/useMenuActions.tsx @@ -11,7 +11,7 @@ import { useCurrentRelays } from '@/providers/CurrentRelaysProvider' import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider' import { useMuteList } from '@/providers/MuteListProvider' import { useNostr } from '@/providers/NostrProvider' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import client from '@/services/client.service' import { eventService, queryService } from '@/services/client.service' import { nip66Service } from '@/services/nip66.service' @@ -84,7 +84,6 @@ export function useMenuActions({ ...currentBrowsingRelayUrls.map(url => normalizeUrl(url) || url), ...favoriteRelays.map(url => normalizeUrl(url) || url), ...relaySets.flatMap(set => set.relayUrls.map(url => normalizeUrl(url) || url)), - ...BIG_RELAY_URLS.map(url => normalizeUrl(url) || url), ...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url), ...FAST_WRITE_RELAY_URLS.map(url => normalizeUrl(url) || url) ].filter(Boolean) as string[] @@ -115,7 +114,7 @@ export function useMenuActions({ const allRelays = [ ...(currentBrowsingRelayUrls || []), ...(favoriteRelays || []), - ...BIG_RELAY_URLS, + ...FAST_READ_RELAY_URLS, ...FAST_READ_RELAY_URLS, ...FAST_WRITE_RELAY_URLS ] @@ -159,7 +158,7 @@ export function useMenuActions({ const allRelays = [ ...(currentBrowsingRelayUrls || []), ...(favoriteRelays || []), - ...BIG_RELAY_URLS, + ...FAST_READ_RELAY_URLS, ...FAST_READ_RELAY_URLS, ...FAST_WRITE_RELAY_URLS ] diff --git a/src/components/NotificationList/index.tsx b/src/components/NotificationList/index.tsx index c821cde0..e12c7fc5 100644 --- a/src/components/NotificationList/index.tsx +++ b/src/components/NotificationList/index.tsx @@ -102,11 +102,13 @@ const NotificationList = forwardRef( (event: NostrEvent) => { if (event.pubkey === pubkey) return setNotifications((oldEvents) => { - const index = oldEvents.findIndex((oldEvent) => compareEvents(oldEvent, event) <= 0) - if (index !== -1 && oldEvents[index].id === event.id) { - return oldEvents + // Check if event already exists + const existingIndex = oldEvents.findIndex((oldEvent) => oldEvent.id === event.id) + if (existingIndex !== -1) { + return oldEvents // Already exists, don't update } - + + const index = oldEvents.findIndex((oldEvent) => compareEvents(oldEvent, event) <= 0) noteStatsService.updateNoteStatsByEvents([event]) if (index === -1) { return [...oldEvents, event] @@ -176,9 +178,9 @@ const NotificationList = forwardRef( onEvents: (events, eosed) => { if (events.length > 0) { setNotifications(events.filter((event) => event.pubkey !== pubkey)) - setLoading(false) } if (eosed) { + setLoading(false) setUntil(events.length > 0 ? events[events.length - 1].created_at - 1 : undefined) noteStatsService.updateNoteStatsByEvents(events) } @@ -227,6 +229,23 @@ const NotificationList = forwardRef( setVisibleNotifications(notifications.slice(0, showCount)) }, [notifications, showCount]) + // Use refs to avoid infinite loops from dependency changes + const notificationsRef = useRef(notifications) + const showCountRef = useRef(showCount) + const loadingRef = useRef(loading) + + useEffect(() => { + notificationsRef.current = notifications + }, [notifications]) + + useEffect(() => { + showCountRef.current = showCount + }, [showCount]) + + useEffect(() => { + loadingRef.current = loading + }, [loading]) + useEffect(() => { const options = { root: null, @@ -235,31 +254,39 @@ const NotificationList = forwardRef( } const loadMore = async () => { - if (showCount < notifications.length) { + // Use refs to avoid dependency on notifications/showCount/loading + const currentNotifications = notificationsRef.current + const currentShowCount = showCountRef.current + const currentLoading = loadingRef.current + + if (currentShowCount < currentNotifications.length) { setShowCount((count) => count + SHOW_COUNT) // preload more - if (notifications.length - showCount > LIMIT / 2) { + if (currentNotifications.length - currentShowCount > LIMIT / 2) { return } } - if (!pubkey || !timelineKey || !until || loading) return + if (!pubkey || !timelineKey || !until || currentLoading) return setLoading(true) - const newNotifications = await client.loadMoreTimeline(timelineKey, until, LIMIT) - setLoading(false) - if (newNotifications.length === 0) { - setUntil(undefined) - return - } + try { + const newNotifications = await client.loadMoreTimeline(timelineKey, until, LIMIT) + if (newNotifications.length === 0) { + setUntil(undefined) + return + } - if (newNotifications.length > 0) { - setNotifications((oldNotifications) => [ - ...oldNotifications, - ...newNotifications.filter((event) => event.pubkey !== pubkey) - ]) - } + if (newNotifications.length > 0) { + setNotifications((oldNotifications) => [ + ...oldNotifications, + ...newNotifications.filter((event) => event.pubkey !== pubkey) + ]) + } - setUntil(newNotifications[newNotifications.length - 1].created_at - 1) + setUntil(newNotifications[newNotifications.length - 1].created_at - 1) + } finally { + setLoading(false) + } } const observerInstance = new IntersectionObserver((entries) => { @@ -279,7 +306,7 @@ const NotificationList = forwardRef( observerInstance.unobserve(currentBottomRef) } } - }, [pubkey, timelineKey, until, loading, showCount, notifications]) + }, [pubkey, timelineKey, until]) // Removed notifications, showCount, loading to prevent infinite loops const refresh = () => { topRef.current?.scrollIntoView({ behavior: 'instant', block: 'start' }) diff --git a/src/components/Profile/ProfileBookmarksAndHashtags.tsx b/src/components/Profile/ProfileBookmarksAndHashtags.tsx index 8e5020b1..f762ef7b 100644 --- a/src/components/Profile/ProfileBookmarksAndHashtags.tsx +++ b/src/components/Profile/ProfileBookmarksAndHashtags.tsx @@ -5,7 +5,7 @@ import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider' import { useNostr } from '@/providers/NostrProvider' import client from '@/services/client.service' import { queryService, replaceableEventService } from '@/services/client.service' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import logger from '@/lib/logger' import { normalizeUrl } from '@/lib/url' import NoteCard from '../NoteCard' @@ -72,7 +72,6 @@ const ProfileBookmarksAndHashtags = forwardRef<{ refresh: () => void }, { ...(myRelayList.read || []), // User's inboxes (kind 10002) ...(myRelayList.write || []), // User's outboxes (kind 10002) ...(favoriteRelays || []), // User's favorite relays (kind 10012) - ...BIG_RELAY_URLS, // Big relays ...FAST_READ_RELAY_URLS, // Fast read relays ...FAST_WRITE_RELAY_URLS // Fast write relays ] diff --git a/src/components/RelayInfo/RelayReviewsPreview.tsx b/src/components/RelayInfo/RelayReviewsPreview.tsx index b44b7651..bd4d3411 100644 --- a/src/components/RelayInfo/RelayReviewsPreview.tsx +++ b/src/components/RelayInfo/RelayReviewsPreview.tsx @@ -7,7 +7,7 @@ import { CarouselNext, CarouselPrevious } from '@/components/ui/carousel' -import { BIG_RELAY_URLS, ExtendedKind } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind } from '@/constants' import { compareEvents } from '@/lib/event' import { getStarsFromRelayReviewEvent } from '@/lib/event-metadata' import { toRelayReviews } from '@/lib/link' @@ -59,7 +59,7 @@ export default function RelayReviewsPreview({ relayUrl }: { relayUrl: string }) if (pubkey) { filters.push({ kinds: [ExtendedKind.RELAY_REVIEW], authors: [pubkey], '#d': [relayUrl] }) } - const events = await queryService.fetchEvents([relayUrl, ...BIG_RELAY_URLS], filters) + const events = await queryService.fetchEvents([relayUrl, ...FAST_READ_RELAY_URLS], filters) const pubkeySet = new Set() const reviews: NostrEvent[] = [] diff --git a/src/components/SearchResult/index.tsx b/src/components/SearchResult/index.tsx index beaddaa9..ae9664fd 100644 --- a/src/components/SearchResult/index.tsx +++ b/src/components/SearchResult/index.tsx @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' import { TSearchParams } from '@/types' import NormalFeed from '../NormalFeed' import Profile from '../Profile' @@ -27,7 +27,7 @@ export default function SearchResult({ searchParams }: { searchParams: TSearchPa if (searchParams.type === 'hashtag') { return ( ) diff --git a/src/components/TrendingNotes/index.tsx b/src/components/TrendingNotes/index.tsx index 05b26ffe..386c89fd 100644 --- a/src/components/TrendingNotes/index.tsx +++ b/src/components/TrendingNotes/index.tsx @@ -177,7 +177,7 @@ export default function TrendingNotes() { relays.push(...FAST_READ_RELAY_URLS) } } else { - // User is not logged in: use FAST_READ_RELAY_URLS (includes all BIG_RELAY_URLS) + // User is not logged in: use FAST_READ_RELAY_URLS (includes all FAST_READ_RELAY_URLS) relays.push(...FAST_READ_RELAY_URLS) } diff --git a/src/components/WebPreview/index.tsx b/src/components/WebPreview/index.tsx index 7f8d3756..84efdb97 100644 --- a/src/components/WebPreview/index.tsx +++ b/src/components/WebPreview/index.tsx @@ -16,7 +16,7 @@ import { cleanUrl, isSafeMediaUrl } from '@/lib/url' import { tagNameEquals } from '@/lib/tag' import { queryService } from '@/services/client.service' import { Event } from 'nostr-tools' -import { BIG_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import { getImetaInfosFromEvent } from '@/lib/event' import MarkdownArticle from '../Note/MarkdownArticle/MarkdownArticle' import AsciidocArticle from '../Note/AsciidocArticle/AsciidocArticle' @@ -271,7 +271,7 @@ export default function WebPreview({ url, className }: { url: string; className? return filter }) - const events = await queryService.fetchEvents(BIG_RELAY_URLS, filters) + const events = await queryService.fetchEvents(FAST_READ_RELAY_URLS, filters) // Find all events with matching d-tag const matchingEvents = events.filter(event => { diff --git a/src/constants.ts b/src/constants.ts index 8a8c8eec..c29f45e0 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -84,20 +84,13 @@ export const FONT_SIZE = { LARGE: 'large' } as const -export const BIG_RELAY_URLS = [ - 'wss://theforest.nostr1.com', - 'wss://orly-relay.imwald.eu', - 'wss://nostr.land', - 'wss://thecitadel.nostr1.com', -] - /** * Random public relays (from NIP-66 lively list; write-tested monitors preferred) merged into the * publish relay picker. More candidates improve odds some accept open writes. */ export const RANDOM_PUBLISH_RELAY_COUNT = 5 -/** Relays to query for NIP-66 relay monitoring events (30166), in addition to BIG_RELAY_URLS. */ +/** Relays to query for NIP-66 relay monitoring events (30166), in addition to FAST_READ_RELAY_URLS. */ export const NIP66_DISCOVERY_RELAY_URLS = [ 'wss://thecitadel.nostr1.com', 'wss://relay.nostr.watch', @@ -128,7 +121,7 @@ export const FAST_READ_RELAY_URLS = [ 'wss://nostr.land', 'wss://nostr21.com', 'wss://thecitadel.nostr1.com', - 'wss://aggr.nostr.land' + 'wss://aggr.nostr.land', ] // Optimized relay list for write operations (no aggregator since it's read-only) @@ -160,17 +153,23 @@ export const SEARCHABLE_RELAY_URLS = [ 'wss://relay.damus.io', 'wss://relay.snort.social', 'wss://nos.lol', - 'wss://nostr.mom' + 'wss://nostr.mom', + 'wss://relay.noswhere.com', + 'wss://relay.wikifreedia.xyz', + 'wss://nostr.einundzwanzig.space', + 'wss://relay.lumina.rocks', + 'wss://nostrelites.org' ] export const PROFILE_RELAY_URLS = [ 'wss://nos.lol', 'wss://relay.damus.io', - 'wss://profiles.nostr1.com' + 'wss://profiles.nostr1.com', + 'wss://purplepag.es' ] -// Combined relay URLs for profile fetching - includes both BIG_RELAY_URLS and SEARCHABLE_RELAY_URLS -export const PROFILE_FETCH_RELAY_URLS = [...SEARCHABLE_RELAY_URLS, ...PROFILE_RELAY_URLS] +// Combined relay URLs for profile fetching - includes both FAST_READ_RELAY_URLS and SEARCHABLE_RELAY_URLS +export const PROFILE_FETCH_RELAY_URLS = [...SEARCHABLE_RELAY_URLS, ...FAST_READ_RELAY_URLS, ...PROFILE_RELAY_URLS] export const GROUP_METADATA_EVENT_KIND = 39000 diff --git a/src/hooks/useFetchProfile.tsx b/src/hooks/useFetchProfile.tsx index f4cf4ea2..bc7115ca 100644 --- a/src/hooks/useFetchProfile.tsx +++ b/src/hooks/useFetchProfile.tsx @@ -2,7 +2,6 @@ import { getProfileFromEvent } from '@/lib/event-metadata' import { userIdToPubkey } from '@/lib/pubkey' import { useNostr } from '@/providers/NostrProvider' import { replaceableEventService } from '@/services/client.service' -import indexedDb from '@/services/indexed-db.service' import { kinds } from 'nostr-tools' import { TProfile } from '@/types' import { useEffect, useState } from 'react' @@ -30,22 +29,28 @@ export function useFetchProfile(id?: string, skipCache = false) { const run = async () => { setIsFetching(true) try { - // Get cached profile from IndexedDB - const cachedEvent = await indexedDb.getReplaceableEvent(pubkey, kinds.Metadata) - const cached = cachedEvent ? getProfileFromEvent(cachedEvent) : undefined - - // Fetch fresh profile + // fetchReplaceableEvent now checks in-memory cache first (instant), then IndexedDB, then network + // This is optimized for speed - memory cache is synchronous const profileEvent = await replaceableEventService.fetchReplaceableEvent(pubkey, kinds.Metadata) - const profile = profileEvent ? getProfileFromEvent(profileEvent) : undefined if (cancelled) return - if (cached) setProfile(cached) - if (profile) setProfile(profile) + if (profileEvent) { + const profile = getProfileFromEvent(profileEvent) + if (profile) { + setProfile(profile) + setIsFetching(false) + return // Return immediately with cached/fetched profile + } + } + + // If we get here, no profile was found + setIsFetching(false) } catch (err) { - if (!cancelled) setError(err as Error) - } finally { - if (!cancelled) setIsFetching(false) + if (!cancelled) { + setError(err as Error) + setIsFetching(false) + } } } diff --git a/src/lib/event-metadata.ts b/src/lib/event-metadata.ts index b492e0e6..d15dd27b 100644 --- a/src/lib/event-metadata.ts +++ b/src/lib/event-metadata.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, POLL_TYPE } from '@/constants' +import { FAST_READ_RELAY_URLS, POLL_TYPE } from '@/constants' import { TEmoji, TPollType, TRelayList, TRelaySet, TPaymentInfo, TProfile } from '@/types' import { Event, kinds } from 'nostr-tools' import { buildATag } from './draft-event' @@ -12,7 +12,7 @@ import logger from '@/lib/logger' export function getRelayListFromEvent(event?: Event | null, blockedRelays?: string[]) { if (!event) { - return { write: BIG_RELAY_URLS, read: BIG_RELAY_URLS, originalRelays: [] } + return { write: FAST_READ_RELAY_URLS, read: FAST_READ_RELAY_URLS, originalRelays: [] } } const torBrowserDetected = isTorBrowser() @@ -48,11 +48,11 @@ export function getRelayListFromEvent(event?: Event | null, blockedRelays?: stri } }) - // If there are too many relays, use the default BIG_RELAY_URLS + // If there are too many relays, use the default FAST_READ_RELAY_URLS // Because they don't know anything about relays, their settings cannot be trusted return { - write: relayList.write.length && relayList.write.length <= 8 ? relayList.write : BIG_RELAY_URLS, - read: relayList.read.length && relayList.write.length <= 8 ? relayList.read : BIG_RELAY_URLS, + write: relayList.write.length && relayList.write.length <= 8 ? relayList.write : FAST_READ_RELAY_URLS, + read: relayList.read.length && relayList.write.length <= 8 ? relayList.read : FAST_READ_RELAY_URLS, originalRelays: relayList.originalRelays } } diff --git a/src/pages/primary/DiscussionsPage/index.tsx b/src/pages/primary/DiscussionsPage/index.tsx index 8248f468..c94529cc 100644 --- a/src/pages/primary/DiscussionsPage/index.tsx +++ b/src/pages/primary/DiscussionsPage/index.tsx @@ -9,7 +9,7 @@ import logger from '@/lib/logger' import { NostrEvent, Event as NostrEventType } from 'nostr-tools' import { kinds } from 'nostr-tools' import { normalizeUrl } from '@/lib/url' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import client from '@/services/client.service' import { queryService } from '@/services/client.service' import discussionFeedCache from '@/services/discussion-feed-cache.service' @@ -365,7 +365,6 @@ const DiscussionsPage = forwardRef((_, ref) => { ...(myRelayList.read || []), // User's inboxes (kind 10002) ...(myRelayList.write || []), // User's outboxes (kind 10002) ...(favoriteRelays || []), // User's favorite relays (kind 10012) - ...BIG_RELAY_URLS, // Big relays ...FAST_READ_RELAY_URLS, // Fast read relays ...FAST_WRITE_RELAY_URLS // Fast write relays ] diff --git a/src/pages/secondary/NoteListPage/index.tsx b/src/pages/secondary/NoteListPage/index.tsx index e3b1fad5..89bfe29c 100644 --- a/src/pages/secondary/NoteListPage/index.tsx +++ b/src/pages/secondary/NoteListPage/index.tsx @@ -1,7 +1,7 @@ import { Favicon } from '@/components/Favicon' import NormalFeed from '@/components/NormalFeed' import { Button } from '@/components/ui/button' -import { BIG_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' import { normalizeUrl } from '@/lib/url' import SecondaryPageLayout from '@/layouts/SecondaryPageLayout' import { toProfileList } from '@/lib/link' @@ -79,7 +79,7 @@ const NoteListPage = forwardRef(({ index, hid setSubRequests([ { filter: { '#t': [hashtag], ...(kinds.length > 0 ? { kinds } : {}) }, - urls: BIG_RELAY_URLS + urls: FAST_READ_RELAY_URLS } ]) // Set controls for hashtag subscribe button - check subscription status @@ -118,7 +118,7 @@ const NoteListPage = forwardRef(({ index, hid { filter: { '#I': [externalContentId], ...(kinds.length > 0 ? { kinds } : {}) }, urls: Array.from(new Set([ - ...BIG_RELAY_URLS.map(url => normalizeUrl(url) || url), + ...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url), ...(relayList?.write || []).map(url => normalizeUrl(url) || url) ])) } @@ -176,7 +176,7 @@ const NoteListPage = forwardRef(({ index, hid setSubRequests([ { filter, - urls: BIG_RELAY_URLS + urls: FAST_READ_RELAY_URLS } ]) } diff --git a/src/pages/secondary/NotePage/NotFound.tsx b/src/pages/secondary/NotePage/NotFound.tsx index 552bed18..25831840 100644 --- a/src/pages/secondary/NotePage/NotFound.tsx +++ b/src/pages/secondary/NotePage/NotFound.tsx @@ -1,6 +1,6 @@ import ClientSelect from '@/components/ClientSelect' import { Button } from '@/components/ui/button' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' import { normalizeUrl } from '@/lib/url' import client from '@/services/client.service' import { AlertCircle, Search } from 'lucide-react' @@ -27,10 +27,10 @@ export default function NotFound({ if (!bech32Id) return const getExternalRelays = async () => { - // Get all relays that have already been tried (BIG_RELAY_URLS + FAST_READ_RELAY_URLS) + // Get all relays that have already been tried (FAST_READ_RELAY_URLS) // These are the relays used in the initial fetch const alreadyTriedRelaysSet = new Set() - ;[...BIG_RELAY_URLS, ...FAST_READ_RELAY_URLS].forEach(url => { + ;[...FAST_READ_RELAY_URLS].forEach(url => { const normalized = normalizeUrl(url) if (normalized) alreadyTriedRelaysSet.add(normalized) }) diff --git a/src/pages/secondary/RelayReviewsPage/index.tsx b/src/pages/secondary/RelayReviewsPage/index.tsx index 390ff940..e700e767 100644 --- a/src/pages/secondary/RelayReviewsPage/index.tsx +++ b/src/pages/secondary/RelayReviewsPage/index.tsx @@ -1,5 +1,5 @@ import NoteList from '@/components/NoteList' -import { BIG_RELAY_URLS, ExtendedKind } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind } from '@/constants' import SecondaryPageLayout from '@/layouts/SecondaryPageLayout' import { normalizeUrl, simplifyUrl } from '@/lib/url' import { forwardRef, useMemo } from 'react' @@ -24,7 +24,7 @@ const RelayReviewsPage = forwardRef(({ url, index, hideTitlebar = false }: { url showKinds={[ExtendedKind.RELAY_REVIEW]} subRequests={[ { - urls: [normalizedUrl, ...BIG_RELAY_URLS], + urls: [normalizedUrl, ...FAST_READ_RELAY_URLS], filter: { '#d': [normalizedUrl] } } ]} diff --git a/src/providers/BookmarksProvider.tsx b/src/providers/BookmarksProvider.tsx index 4de2d706..2bc0242e 100644 --- a/src/providers/BookmarksProvider.tsx +++ b/src/providers/BookmarksProvider.tsx @@ -1,7 +1,7 @@ import { buildATag, buildETag, createBookmarkDraftEvent } from '@/lib/draft-event' import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event' import { normalizeUrl } from '@/lib/url' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import logger from '@/lib/logger' import client from '@/services/client.service' import { replaceableEventService } from '@/services/client.service' @@ -37,7 +37,6 @@ export function BookmarksProvider({ children }: { children: React.ReactNode }) { ...(myRelayList.read || []), // User's inboxes (kind 10002) ...(myRelayList.write || []), // User's outboxes (kind 10002) ...(favoriteRelays || []), // User's favorite relays (kind 10012) - ...BIG_RELAY_URLS, // Big relays ...FAST_READ_RELAY_URLS, // Fast read relays ...FAST_WRITE_RELAY_URLS // Fast write relays ] diff --git a/src/providers/FavoriteRelaysProvider.tsx b/src/providers/FavoriteRelaysProvider.tsx index 4246043d..84788094 100644 --- a/src/providers/FavoriteRelaysProvider.tsx +++ b/src/providers/FavoriteRelaysProvider.tsx @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, DEFAULT_FAVORITE_RELAYS } from '@/constants' +import { FAST_READ_RELAY_URLS, DEFAULT_FAVORITE_RELAYS } from '@/constants' import { createFavoriteRelaysDraftEvent, createBlockedRelaysDraftEvent, createRelaySetDraftEvent } from '@/lib/draft-event' import { getReplaceableEventIdentifier } from '@/lib/event' import { getRelaySetFromEvent } from '@/lib/event-metadata' @@ -47,9 +47,9 @@ export function FavoriteRelaysProvider({ children }: { children: React.ReactNode useEffect(() => { if (!favoriteRelaysEvent) { - // For anonymous users (no login), only use relays from BIG_RELAY_URLS + // For anonymous users (no login), only use relays from FAST_READ_RELAY_URLS // Don't load potentially untrusted relays from local storage - const favoriteRelays: string[] = pubkey ? DEFAULT_FAVORITE_RELAYS : BIG_RELAY_URLS.slice() + const favoriteRelays: string[] = pubkey ? DEFAULT_FAVORITE_RELAYS : FAST_READ_RELAY_URLS.slice() if (pubkey) { // Only add stored relay sets if user is logged in @@ -107,7 +107,7 @@ export function FavoriteRelaysProvider({ children }: { children: React.ReactNode const normalizedRelays = [ ...(relayList?.write ?? []).map(url => normalizeUrl(url) || url), - ...BIG_RELAY_URLS.map(url => normalizeUrl(url) || url) + ...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url) ] const newRelaySetEvents = await queryService.fetchEvents( Array.from(new Set(normalizedRelays)).slice(0, 5), diff --git a/src/providers/FeedProvider.tsx b/src/providers/FeedProvider.tsx index 7cf48b0d..ccb2e897 100644 --- a/src/providers/FeedProvider.tsx +++ b/src/providers/FeedProvider.tsx @@ -174,7 +174,7 @@ export function FeedProvider({ children }: { children: React.ReactNode }) { // Wait for favoriteRelays to be initialized (should have at least default relays) // If favoriteRelays is empty, it might not be initialized yet, so wait if (favoriteRelays.length === 0 && !pubkey) { - // For anonymous users, favoriteRelays should be initialized from BIG_RELAY_URLS + // For anonymous users, favoriteRelays should be initialized from FAST_READ_RELAY_URLS // If it's still empty, something is wrong, but we'll use defaults logger.debug('FeedProvider: favoriteRelays is empty, using defaults') } diff --git a/src/providers/GroupListProvider.tsx b/src/providers/GroupListProvider.tsx index 3604519f..20403b91 100644 --- a/src/providers/GroupListProvider.tsx +++ b/src/providers/GroupListProvider.tsx @@ -3,7 +3,7 @@ import { useNostr } from '@/providers/NostrProvider' import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider' import { ExtendedKind } from '@/constants' import { normalizeUrl } from '@/lib/url' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import client from '@/services/client.service' import { queryService } from '@/services/client.service' import logger from '@/lib/logger' @@ -38,7 +38,6 @@ export function GroupListProvider({ children }: { children: React.ReactNode }) { ...(myRelayList.read || []), // User's inboxes (kind 10002) ...(myRelayList.write || []), // User's outboxes (kind 10002) ...(favoriteRelays || []), // User's favorite relays (kind 10012) - ...BIG_RELAY_URLS, // Big relays ...FAST_READ_RELAY_URLS // Fast read relays ] diff --git a/src/providers/InterestListProvider.tsx b/src/providers/InterestListProvider.tsx index 54ad81e1..cdc032c2 100644 --- a/src/providers/InterestListProvider.tsx +++ b/src/providers/InterestListProvider.tsx @@ -1,7 +1,7 @@ import { createInterestListDraftEvent } from '@/lib/draft-event' import { normalizeTopic } from '@/lib/discussion-topics' import { normalizeUrl } from '@/lib/url' -import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants' import logger from '@/lib/logger' import client from '@/services/client.service' import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react' @@ -44,7 +44,6 @@ export function InterestListProvider({ children }: { children: React.ReactNode } ...(myRelayList.read || []), // User's inboxes (kind 10002) ...(myRelayList.write || []), // User's outboxes (kind 10002) ...(favoriteRelays || []), // User's favorite relays (kind 10012) - ...BIG_RELAY_URLS, // Big relays ...FAST_READ_RELAY_URLS, // Fast read relays ...FAST_WRITE_RELAY_URLS // Fast write relays ] diff --git a/src/providers/NostrProvider/index.tsx b/src/providers/NostrProvider/index.tsx index eae42b5f..30a46341 100644 --- a/src/providers/NostrProvider/index.tsx +++ b/src/providers/NostrProvider/index.tsx @@ -1,5 +1,5 @@ import LoginDialog from '@/components/LoginDialog' -import { BIG_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, PROFILE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, PROFILE_RELAY_URLS } from '@/constants' import { buildAltTag, buildClientTag, @@ -374,11 +374,11 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { } const [relayListEvents, cacheRelayListEvents] = await Promise.all([ - queryService.fetchEvents(BIG_RELAY_URLS, { + queryService.fetchEvents(FAST_READ_RELAY_URLS, { kinds: [kinds.RelayList], authors: [account.pubkey] }), - queryService.fetchEvents(BIG_RELAY_URLS, { + queryService.fetchEvents(FAST_READ_RELAY_URLS, { kinds: [ExtendedKind.CACHE_RELAYS], authors: [account.pubkey] }) @@ -771,12 +771,12 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { const setupNewUser = async (signer: ISigner) => { await Promise.allSettled([ - client.publishEvent(BIG_RELAY_URLS, await signer.signEvent(createFollowListDraftEvent([]))), - client.publishEvent(BIG_RELAY_URLS, await signer.signEvent(createMuteListDraftEvent([]))), + client.publishEvent(FAST_READ_RELAY_URLS, await signer.signEvent(createFollowListDraftEvent([]))), + client.publishEvent(FAST_READ_RELAY_URLS, await signer.signEvent(createMuteListDraftEvent([]))), client.publishEvent( - BIG_RELAY_URLS, + FAST_READ_RELAY_URLS, await signer.signEvent( - createRelayListDraftEvent(BIG_RELAY_URLS.map((url) => ({ url, scope: 'both' }))) + createRelayListDraftEvent(FAST_READ_RELAY_URLS.map((url) => ({ url, scope: 'both' }))) ) ) ]) diff --git a/src/services/client-cache.service.ts b/src/services/client-cache.service.ts index f19ed9b0..2f126150 100644 --- a/src/services/client-cache.service.ts +++ b/src/services/client-cache.service.ts @@ -103,6 +103,7 @@ class ClientCacheService { fetchRelayList: (pubkey: string) => Promise fetchFollowList?: (pubkey: string) => Promise fetchMuteList?: (pubkey: string) => Promise + fetchDeletionEvents?: (relayUrls: string[]) => Promise }): Promise { if (this.warmingUp) { logger.debug('[CacheService] Already warming up, skipping') @@ -161,6 +162,14 @@ class ClientCacheService { } } + // Fetch deletion events in background to update tombstone list + if (fetchFn.fetchDeletionEvents) { + // This will run in background and update tombstone list + fetchFn.fetchDeletionEvents([]).catch(err => + logger.warn('[CacheService] Failed to fetch deletion events', { error: err }) + ) + } + await Promise.allSettled(promises) logger.info('[CacheService] Cache warmup completed', { count: promises.length }) } finally { diff --git a/src/services/client-events.service.ts b/src/services/client-events.service.ts index 2cfafdba..21a87412 100644 --- a/src/services/client-events.service.ts +++ b/src/services/client-events.service.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import logger from '@/lib/logger' import type { Event as NEvent, Filter } from 'nostr-tools' import { nip19 } from 'nostr-tools' @@ -221,9 +221,9 @@ export class EventService { ): Promise { if (!relayUrls.length && filter.authors?.length) { // Would need relay list service - for now use big relays - relayUrls = BIG_RELAY_URLS + relayUrls = FAST_READ_RELAY_URLS } else if (!relayUrls.length && !alreadyFetchedFromBigRelays) { - relayUrls = BIG_RELAY_URLS + relayUrls = FAST_READ_RELAY_URLS } if (!relayUrls.length) return undefined @@ -240,8 +240,8 @@ export class EventService { * Private: Fetch events from big relays (batch) */ private async fetchEventsFromBigRelays(ids: readonly string[]): Promise<(NEvent | undefined)[]> { - const initialRelays = BIG_RELAY_URLS - const relayUrls = initialRelays.length > 0 ? initialRelays : BIG_RELAY_URLS + const initialRelays = FAST_READ_RELAY_URLS + const relayUrls = initialRelays.length > 0 ? initialRelays : FAST_READ_RELAY_URLS const isSingleEventFetch = ids.length === 1 const events = await this.queryService.query(relayUrls, { diff --git a/src/services/client-query.service.ts b/src/services/client-query.service.ts index fcced971..90fdd4d8 100644 --- a/src/services/client-query.service.ts +++ b/src/services/client-query.service.ts @@ -421,8 +421,8 @@ export class QueryService { ): Promise { let relays = Array.from(new Set(urls)) if (relays.length === 0) { - const { BIG_RELAY_URLS } = await import('@/constants') - relays = [...BIG_RELAY_URLS] + const { FAST_READ_RELAY_URLS } = await import('@/constants') + relays = [...FAST_READ_RELAY_URLS] } const filters = Array.isArray(filter) ? filter : [filter] const hasKind1 = filters.some((f) => f.kinds && (Array.isArray(f.kinds) ? f.kinds.includes(1) : f.kinds === 1)) diff --git a/src/services/client-replaceable-events.service.ts b/src/services/client-replaceable-events.service.ts index ef0e9c03..9d678640 100644 --- a/src/services/client-replaceable-events.service.ts +++ b/src/services/client-replaceable-events.service.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, ExtendedKind, PROFILE_FETCH_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind, PROFILE_FETCH_RELAY_URLS } from '@/constants' import { kinds, nip19 } from 'nostr-tools' import type { Event as NEvent, Filter } from 'nostr-tools' import DataLoader from 'dataloader' @@ -10,6 +10,7 @@ import { TProfile } from '@/types' import { LRUCache } from 'lru-cache' import indexedDb from './indexed-db.service' import type { QueryService } from './client-query.service' +import { isReplaceableEvent, getReplaceableCoordinateFromEvent } from '@/lib/event' export class ReplaceableEventService { private queryService: QueryService @@ -18,6 +19,18 @@ export class ReplaceableEventService { max: 50, ttl: 1000 * 60 * 60 }) + // In-memory cache for profiles - instant access, no IndexedDB blocking + private profileMemoryCache = new LRUCache({ + max: 1000, // Cache up to 1000 profiles in memory + ttl: 1000 * 60 * 30, // 30 minutes TTL + updateAgeOnGet: true // Refresh TTL on access + }) + // In-memory cache for all replaceable events - fast access + private replaceableEventMemoryCache = new LRUCache({ + max: 2000, // Cache up to 2000 events in memory + ttl: 1000 * 60 * 30, // 30 minutes TTL + updateAgeOnGet: true + }) private replaceableEventFromBigRelaysDataloader: DataLoader< { pubkey: string; kind: number }, NEvent | null, @@ -58,42 +71,163 @@ export class ReplaceableEventService { /** * Fetch replaceable event (profile, relay list, etc.) + * Always checks in-memory cache FIRST (instant), then IndexedDB, then fetches from relays */ async fetchReplaceableEvent(pubkey: string, kind: number, d?: string): Promise { - if (d) { - const event = await this.replaceableEventDataLoader.load({ pubkey, kind, d }) - return event || undefined + const cacheKey = d ? `${kind}:${pubkey}:${d}` : `${kind}:${pubkey}` + + // 1. Check in-memory cache FIRST - instant return, no async overhead + const memoryCached = this.replaceableEventMemoryCache.get(cacheKey) + if (memoryCached) { + // Check tombstone in background (non-blocking) + this.checkTombstoneAndUpdateCache(memoryCached, kind).catch(() => {}) + // Fetch in background to update cache if newer version exists + this.refreshInBackground(pubkey, kind, d).catch(() => {}) + return memoryCached + } + + // 2. Check IndexedDB (async but faster than network) + try { + const indexedDbCached = await indexedDb.getReplaceableEvent(pubkey, kind, d) + if (indexedDbCached) { + // Check tombstone (non-blocking - check in background) + const tombstoneKey = isReplaceableEvent(kind) + ? getReplaceableCoordinateFromEvent(indexedDbCached) + : indexedDbCached.id + // Check tombstone in background, don't block + indexedDb.isTombstoned(tombstoneKey).then(isTombstoned => { + if (isTombstoned) { + // Remove from caches if tombstoned + this.replaceableEventMemoryCache.delete(cacheKey) + } else { + // Add to memory cache for next time + this.replaceableEventMemoryCache.set(cacheKey, indexedDbCached) + } + }).catch(() => {}) + + // Fetch in background to update cache if newer version exists + this.refreshInBackground(pubkey, kind, d).catch(() => {}) + return indexedDbCached + } + } catch (error) { + // IndexedDB error - continue to network fetch + } + + // 3. Not in cache, fetch from network + const event = d + ? await this.replaceableEventDataLoader.load({ pubkey, kind, d }) + : await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind }) + + if (event) { + // Add to memory cache for instant access next time + this.replaceableEventMemoryCache.set(cacheKey, event) + return event + } + + return undefined + } + + /** + * Check tombstone and update cache (non-blocking background operation) + */ + private async checkTombstoneAndUpdateCache(event: NEvent, kind: number): Promise { + const tombstoneKey = isReplaceableEvent(kind) + ? getReplaceableCoordinateFromEvent(event) + : event.id + const isTombstoned = await indexedDb.isTombstoned(tombstoneKey) + if (isTombstoned) { + const cacheKey = isReplaceableEvent(kind) + ? `${kind}:${event.pubkey}` + : `${kind}:${event.pubkey}:${event.id}` + this.replaceableEventMemoryCache.delete(cacheKey) + } + } + + /** + * Refresh event in background (non-blocking) + */ + private async refreshInBackground(pubkey: string, kind: number, d?: string): Promise { + try { + if (d) { + await this.replaceableEventDataLoader.load({ pubkey, kind, d }) + } else { + const event = await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind }) + if (event) { + const cacheKey = `${kind}:${pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) + } + } + } catch { + // Ignore errors in background refresh } - const event = await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind }) - return event || undefined } /** - * Batch fetch replaceable events from big relays + * Batch fetch replaceable events from profile fetch relays + * Optimized: checks memory cache first (instant), then IndexedDB, then network */ - async fetchReplaceableEventsFromBigRelays(pubkeys: string[], kind: number): Promise<(NEvent | undefined)[]> { - const events = await indexedDb.getManyReplaceableEvents(pubkeys, kind) - const nonExistingPubkeyIndexMap = new Map() + async fetchReplaceableEventsFromProfileFetchRelays(pubkeys: string[], kind: number): Promise<(NEvent | undefined)[]> { + // First check memory cache (instant) + const memoryCached: (NEvent | undefined)[] = [] + const memoryMisses: { pubkey: string; index: number }[] = [] + pubkeys.forEach((pubkey, i) => { - if (events[i] === undefined) { - nonExistingPubkeyIndexMap.set(pubkey, i) + const cacheKey = `${kind}:${pubkey}` + const cached = this.replaceableEventMemoryCache.get(cacheKey) + if (cached) { + memoryCached[i] = cached + } else { + memoryMisses.push({ pubkey, index: i }) } }) - const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany( - Array.from(nonExistingPubkeyIndexMap.keys()).map((pubkey) => ({ pubkey, kind })) - ) - newEvents.forEach((event, idx) => { - if (event && !(event instanceof Error)) { - const pubkey = Array.from(nonExistingPubkeyIndexMap.keys())[idx] - if (pubkey) { - const index = nonExistingPubkeyIndexMap.get(pubkey) - if (index !== undefined) { - events[index] = event ?? undefined + + // For memory misses, check IndexedDB in parallel + const indexedDbPromises = memoryMisses.map(async ({ pubkey, index }) => { + try { + const event = await indexedDb.getReplaceableEvent(pubkey, kind) + if (event) { + // Add to memory cache + const cacheKey = `${kind}:${pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) + if (kind === kinds.Metadata) { + this.profileMemoryCache.set(pubkey, event) } + memoryCached[index] = event + return { index, event } } + } catch { + // Ignore errors } + return null }) - return events.map(e => e ?? undefined) + + await Promise.allSettled(indexedDbPromises) + + // Find what's still missing and fetch from network + const stillMissing = memoryMisses.filter(({ index }) => memoryCached[index] === undefined) + if (stillMissing.length > 0) { + const newEvents = await this.replaceableEventFromBigRelaysDataloader.loadMany( + stillMissing.map(({ pubkey }) => ({ pubkey, kind })) + ) + newEvents.forEach((event, idx) => { + if (event && !(event instanceof Error)) { + const { index } = stillMissing[idx]! + if (index !== undefined) { + memoryCached[index] = event ?? undefined + // Add to memory cache + if (event) { + const cacheKey = `${kind}:${stillMissing[idx]!.pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) + if (kind === kinds.Metadata) { + this.profileMemoryCache.set(stillMissing[idx]!.pubkey, event) + } + } + } + } + }) + } + + return memoryCached } /** @@ -109,6 +243,28 @@ export class ReplaceableEventService { clearCaches(): void { this.replaceableEventFromBigRelaysDataloader.clearAll() this.replaceableEventDataLoader.clearAll() + this.replaceableEventMemoryCache.clear() + this.profileMemoryCache.clear() + } + + /** + * Pre-load profiles into memory cache for instant access + */ + async preloadProfiles(pubkeys: string[]): Promise { + // Load from IndexedDB in parallel + const promises = pubkeys.map(async (pubkey) => { + try { + const event = await indexedDb.getReplaceableEvent(pubkey, kinds.Metadata) + if (event) { + const cacheKey = `${kinds.Metadata}:${pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) + this.profileMemoryCache.set(pubkey, event) + } + } catch { + // Ignore errors + } + }) + await Promise.allSettled(promises) } /** @@ -128,15 +284,22 @@ export class ReplaceableEventService { const eventsMap = new Map() await Promise.allSettled( Array.from(groups.entries()).map(async ([kind, pubkeys]) => { + // Use more relays in parallel for better performance + // Browsers can handle many concurrent subscriptions, so we use all available relays let relayUrls: string[] if (kind === kinds.Metadata || kind === kinds.RelayList) { - const base = Array.from(new Set([...BIG_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS])) + // Combine all available relays for profiles and relay lists + const base = Array.from(new Set([...FAST_READ_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS])) // TODO: Inject relay list service to get user's relays relayUrls = base } else { - relayUrls = BIG_RELAY_URLS + // Use all big relays for other replaceable events + relayUrls = FAST_READ_RELAY_URLS } + // Use all relays in parallel - browsers can handle many concurrent subscriptions + // The QueryService manages per-relay concurrency limits to avoid overloading individual relays + const events = await this.queryService.query(relayUrls, { authors: pubkeys, kinds: [kind] @@ -147,10 +310,25 @@ export class ReplaceableEventService { }) for (const event of events) { + // Check tombstone in background (non-blocking) + const tombstoneKey = isReplaceableEvent(event.kind) + ? getReplaceableCoordinateFromEvent(event) + : event.id + // Don't block on tombstone check - do it in background + indexedDb.isTombstoned(tombstoneKey).then(isTombstoned => { + if (isTombstoned) { + const cacheKey = `${event.kind}:${event.pubkey}` + this.replaceableEventMemoryCache.delete(cacheKey) + } + }).catch(() => {}) + const key = `${event.pubkey}:${event.kind}` const existing = eventsMap.get(key) if (!existing || existing.created_at < event.created_at) { eventsMap.set(key, event) + // Add to memory cache + const cacheKey = `${event.kind}:${event.pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) } } }) @@ -160,6 +338,12 @@ export class ReplaceableEventService { const key = `${pubkey}:${kind}` const event = eventsMap.get(key) if (event) { + // Add to memory cache for instant access + const cacheKey = `${kind}:${pubkey}` + this.replaceableEventMemoryCache.set(cacheKey, event) + if (kind === kinds.Metadata) { + this.profileMemoryCache.set(pubkey, event) + } indexedDb.putReplaceableEvent(event) return event } else { @@ -189,7 +373,7 @@ export class ReplaceableEventService { Array.from(groups.entries()).map(async ([, items]) => { const { kind, d } = items[0]! const pubkeys = items.map(item => item.pubkey) - const relayUrls = BIG_RELAY_URLS + const relayUrls = FAST_READ_RELAY_URLS const filter: Filter = { authors: pubkeys, @@ -206,10 +390,25 @@ export class ReplaceableEventService { }) for (const event of events) { + // Check tombstone in background (non-blocking) + const tombstoneKey = isReplaceableEvent(event.kind) + ? getReplaceableCoordinateFromEvent(event) + : event.id + // Don't block on tombstone check - do it in background + indexedDb.isTombstoned(tombstoneKey).then(isTombstoned => { + if (isTombstoned) { + const cacheKey = `${event.kind}:${event.pubkey}:${d ?? ''}` + this.replaceableEventMemoryCache.delete(cacheKey) + } + }).catch(() => {}) + const eventKey = `${event.pubkey}:${event.kind}:${d ?? ''}` const existing = eventsMap.get(eventKey) if (!existing || existing.created_at < event.created_at) { eventsMap.set(eventKey, event) + // Add to memory cache + const cacheKey = `${event.kind}:${event.pubkey}:${d ?? ''}` + this.replaceableEventMemoryCache.set(cacheKey, event) } } }) @@ -219,6 +418,12 @@ export class ReplaceableEventService { const eventKey = `${pubkey}:${kind}:${d ?? ''}` const event = eventsMap.get(eventKey) if (event) { + // Add to memory cache for instant access + const cacheKey = `${kind}:${pubkey}:${d ?? ''}` + this.replaceableEventMemoryCache.set(cacheKey, event) + if (kind === kinds.Metadata) { + this.profileMemoryCache.set(pubkey, event) + } indexedDb.putReplaceableEvent(event) return event } else { @@ -354,7 +559,7 @@ export class ReplaceableEventService { async fetchProfilesForPubkeys(pubkeys: string[]): Promise { const deduped = Array.from(new Set(pubkeys.filter((p) => p && p.length === 64))) if (deduped.length === 0) return [] - const events = await this.fetchReplaceableEventsFromBigRelays(deduped, kinds.Metadata) + const events = await this.fetchReplaceableEventsFromProfileFetchRelays(deduped, kinds.Metadata) const profiles: TProfile[] = [] for (let i = 0; i < deduped.length; i++) { const ev = events[i] @@ -485,7 +690,7 @@ export class ReplaceableEventService { private async _fetchFollowingFavoriteRelays(pubkey: string): Promise<[string, string[]][]> { const followings = await this.fetchFollowings(pubkey) - const favoriteRelaysEvents = await this.fetchReplaceableEventsFromBigRelays( + const favoriteRelaysEvents = await this.fetchReplaceableEventsFromProfileFetchRelays( followings.slice(0, 100), ExtendedKind.FAVORITE_RELAYS ) diff --git a/src/services/client.service.refactored.ts b/src/services/client.service.refactored.ts index 1874f11f..f273c7aa 100644 --- a/src/services/client.service.refactored.ts +++ b/src/services/client.service.refactored.ts @@ -11,7 +11,7 @@ * This maintains backward compatibility while improving maintainability. */ -import { BIG_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, NIP66_DISCOVERY_RELAY_URLS, PROFILE_RELAY_URLS, READ_ONLY_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, NIP66_DISCOVERY_RELAY_URLS, PROFILE_RELAY_URLS, READ_ONLY_RELAY_URLS } from '@/constants' import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' import logger from '@/lib/logger' import { formatPubkey, isValidPubkey, pubkeyToNpub, userIdToPubkey } from '@/lib/pubkey' @@ -122,7 +122,7 @@ class ClientService extends EventTarget { private async fetchNip66RelayDiscovery(): Promise { try { - const discoveryRelays = Array.from(new Set([...BIG_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) + const discoveryRelays = Array.from(new Set([...FAST_READ_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) const events = await this.queryService.query( discoveryRelays, { kinds: [ExtendedKind.RELAY_DISCOVERY] }, @@ -139,7 +139,7 @@ class ClientService extends EventTarget { } async fetchNip66DiscoveryForRelay(relayUrl: string): Promise { - const discoveryRelays = Array.from(new Set([...BIG_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) + const discoveryRelays = Array.from(new Set([...FAST_READ_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) const dTag = normalizeUrl(relayUrl) || relayUrl const { simplifyUrl } = await import('@/lib/url') const shortForm = simplifyUrl(dTag) @@ -818,7 +818,7 @@ class ClientService extends EventTarget { ExtendedKind.RELAY_REVIEW ].includes(event.kind) ) { - _additionalRelayUrls.push(...BIG_RELAY_URLS, ...PROFILE_RELAY_URLS) + _additionalRelayUrls.push(...FAST_READ_RELAY_URLS, ...PROFILE_RELAY_URLS) } else if (event.kind === ExtendedKind.FAVORITE_RELAYS) { _additionalRelayUrls.push(...FAST_WRITE_RELAY_URLS) } else if (event.kind === ExtendedKind.RSS_FEED_LIST) { diff --git a/src/services/client.service.ts b/src/services/client.service.ts index 2b5f1fbf..f252a559 100644 --- a/src/services/client.service.ts +++ b/src/services/client.service.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, NIP66_DISCOVERY_RELAY_URLS, PROFILE_RELAY_URLS, READ_ONLY_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, NIP66_DISCOVERY_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, READ_ONLY_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' /** NIP-01 filter keys only; NIP-50 adds `search` which non-searchable relays reject. */ function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter { @@ -126,7 +126,7 @@ class ClientService extends EventTarget { /** NIP-66: fetch relay discovery events (30166) in background to supplement search/NIP support. */ private async fetchNip66RelayDiscovery(): Promise { try { - const discoveryRelays = Array.from(new Set([...BIG_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) + const discoveryRelays = Array.from(new Set([...FAST_READ_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) const events = await this.queryService.query( discoveryRelays, { kinds: [ExtendedKind.RELAY_DISCOVERY] }, @@ -147,7 +147,7 @@ class ClientService extends EventTarget { * filter by #d so we get the newest report for this relay and can show monitor (author) info. */ async fetchNip66DiscoveryForRelay(relayUrl: string): Promise { - const discoveryRelays = Array.from(new Set([...BIG_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) + const discoveryRelays = Array.from(new Set([...FAST_READ_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS])) const dTag = normalizeUrl(relayUrl) || relayUrl const shortForm = simplifyUrl(dTag) const dValues = dTag !== shortForm ? [dTag, shortForm] : [dTag] @@ -177,7 +177,7 @@ class ClientService extends EventTarget { * Determine which relays to publish an event to. * Fallbacks (used when user relay list is empty or fetch fails): * - General events (reactions, notes, etc.): FAST_WRITE_RELAY_URLS - * - Relay list / cache relays / contacts: BIG_RELAY_URLS + PROFILE_RELAY_URLS (added to additional) + * - Relay list / cache relays / contacts: FAST_READ_RELAY_URLS + PROFILE_RELAY_URLS (added to additional) * - Favorite relays: FAST_WRITE_RELAY_URLS (added to additional) * - Report events: FAST_WRITE_RELAY_URLS when no user/seen relays */ @@ -331,11 +331,10 @@ class ClientService extends EventTarget { ExtendedKind.RELAY_REVIEW ].includes(event.kind) ) { - _additionalRelayUrls.push(...BIG_RELAY_URLS, ...PROFILE_RELAY_URLS) - logger.debug('[DetermineTargetRelays] Relay list event detected, adding BIG_RELAY_URLS and PROFILE_RELAY_URLS', { + _additionalRelayUrls.push(...PROFILE_FETCH_RELAY_URLS) + logger.debug('[DetermineTargetRelays] Relay list event detected, adding PROFILE_FETCH_RELAY_URLS', { kind: event.kind, - bigRelays: BIG_RELAY_URLS, - profileRelays: PROFILE_RELAY_URLS, + profileFetchRelays: PROFILE_FETCH_RELAY_URLS, additionalRelayCount: _additionalRelayUrls.length }) } else if (event.kind === ExtendedKind.FAVORITE_RELAYS) { @@ -347,7 +346,7 @@ class ClientService extends EventTarget { additionalRelayCount: _additionalRelayUrls.length }) } else if (event.kind === ExtendedKind.RSS_FEED_LIST) { - _additionalRelayUrls.push(...FAST_WRITE_RELAY_URLS, ...PROFILE_RELAY_URLS) + _additionalRelayUrls.push(...FAST_WRITE_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS) } if (event.kind === kinds.RelayList || event.kind === ExtendedKind.FAVORITE_RELAYS) { @@ -466,7 +465,7 @@ class ClientService extends EventTarget { presetStriked: string[] } { const presetSet = new Set() - for (const u of [...FAST_WRITE_RELAY_URLS, ...BIG_RELAY_URLS]) { + for (const u of [...FAST_WRITE_RELAY_URLS, ...FAST_READ_RELAY_URLS]) { const n = normalizeUrl(u) || u if (n) presetSet.add(n) } @@ -1388,7 +1387,7 @@ class ClientService extends EventTarget { } = {} ) { let relays = Array.from(new Set(urls)) - if (relays.length === 0) relays = [...BIG_RELAY_URLS] + if (relays.length === 0) relays = [...FAST_READ_RELAY_URLS] const filters = Array.isArray(filter) ? filter : [filter] const hasKind1 = filters.some((f) => f.kinds && (Array.isArray(f.kinds) ? f.kinds.includes(1) : f.kinds === 1)) if (hasKind1 && KIND_1_BLOCKED_RELAY_URLS.length > 0) { @@ -1442,7 +1441,7 @@ class ClientService extends EventTarget { /** * Fetch a single event by id (hex, note1, nevent1, naddr1). - * Relay order: (1) session/DataLoader cache (2) buildInitialRelayList (user's FAST_READ + favorite + read) or BIG_RELAY_URLS + * Relay order: (1) session/DataLoader cache (2) buildInitialRelayList (user's FAST_READ + favorite + read) or FAST_READ_RELAY_URLS * (3) for nevent/naddr: bech32 relay hints + author's read (inbox) + author's write (outbox) from kind 10002 * (4) if still missing and filter has authors: author's read+write again in tryHarderToFetchEvent * (5) SEARCHABLE_RELAY_URLS as final fallback. Author relays are used so embedded notes load from the author's relays. @@ -1537,6 +1536,64 @@ class ClientService extends EventTarget { * (2) local index, (3) relay search on SEARCHABLE_RELAY_URLS (same as search page). * Returns cached results immediately, then streams relay results via callback. */ + /** + * Fetch deletion events (kind 5) and update tombstone list + * This should be called during cache warmup to remove deleted events from cache + */ + async fetchDeletionEvents(relayUrls: string[] = []): Promise { + // Use all available relays if none specified + const relays = relayUrls.length > 0 + ? relayUrls + : Array.from(new Set([...PROFILE_FETCH_RELAY_URLS])) + + logger.info('[ClientService] Fetching deletion events', { profileFetchRelays: PROFILE_FETCH_RELAY_URLS, relayCount: relays.length }) + + try { + // Fetch latest 100 deletion events + const deletionEvents = await this.queryService.query(relays, { + kinds: [kinds.EventDeletion], + limit: 100 + }, undefined, { + replaceableRace: true, + eoseTimeout: 500, + globalTimeout: 5000 + }) + + logger.debug('[ClientService] Fetched deletion events', { count: deletionEvents.length }) + + // Process each deletion event and add to tombstone list + for (const deletionEvent of deletionEvents) { + // Deletion events have 'e' tags for non-replaceable events or 'a' tags for replaceable events + const eTag = deletionEvent.tags.find(tag => tag[0] === 'e') + const aTag = deletionEvent.tags.find(tag => tag[0] === 'a') + const kTag = deletionEvent.tags.find(tag => tag[0] === 'k') + + if (eTag && eTag[1]) { + // Non-replaceable event - use event ID + await indexedDb.addTombstone(eTag[1]) + } else if (aTag && aTag[1]) { + // Replaceable event - a tag format is "kind:pubkey:d" which is already the coordinate + await indexedDb.addTombstone(aTag[1]) + } else if (kTag && kTag[1] && deletionEvent.pubkey) { + // Fallback: if we have kind and pubkey, construct coordinate + const kind = parseInt(kTag[1], 10) + if (!isNaN(kind)) { + const coordinate = `${kind}:${deletionEvent.pubkey}` + await indexedDb.addTombstone(coordinate) + } + } + } + + // Remove tombstoned events from cache + const removed = await indexedDb.removeTombstonedFromCache() + if (removed > 0) { + logger.info('[ClientService] Removed tombstoned events from cache', { count: removed }) + } + } catch (error) { + logger.warn('[ClientService] Failed to fetch deletion events', { error }) + } + } + async searchNpubsForMention( query: string, limit: number = 100, @@ -1774,9 +1831,9 @@ class ClientService extends EventTarget { ) // Then fetch from relays (will update cache if newer) - const relayEvents = await this.replaceableEventService.fetchReplaceableEventsFromBigRelays(pubkeys, kinds.RelayList) + const relayEvents = await this.replaceableEventService.fetchReplaceableEventsFromProfileFetchRelays(pubkeys, kinds.RelayList) - // Fetch cache relays from multiple sources: BIG_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, and user's inboxes/outboxes + // Fetch cache relays from multiple sources: FAST_READ_RELAY_URLS, PROFILE_RELAY_URLS, and user's inboxes/outboxes const cacheRelayEvents = await this.fetchCacheRelayEventsFromMultipleSources(pubkeys, relayEvents, storedRelayEvents) return pubkeys.map((_pubkey, index) => { @@ -1829,14 +1886,14 @@ class ClientService extends EventTarget { if (storedCacheEvent) { const cacheRelayList = getRelayListFromEvent(storedCacheEvent) return { - write: cacheRelayList.write.length > 0 ? cacheRelayList.write : BIG_RELAY_URLS, - read: cacheRelayList.read.length > 0 ? cacheRelayList.read : BIG_RELAY_URLS, + write: cacheRelayList.write.length > 0 ? cacheRelayList.write : PROFILE_FETCH_RELAY_URLS, + read: cacheRelayList.read.length > 0 ? cacheRelayList.read : PROFILE_FETCH_RELAY_URLS, originalRelays: cacheRelayList.originalRelays } } return { - write: BIG_RELAY_URLS, - read: BIG_RELAY_URLS, + write: PROFILE_FETCH_RELAY_URLS, + read: PROFILE_FETCH_RELAY_URLS, originalRelays: [] } } @@ -1851,7 +1908,6 @@ class ClientService extends EventTarget { /** * Fetch cache relay events (kind 10432) from multiple sources: - * - BIG_RELAY_URLS * - PROFILE_FETCH_RELAY_URLS * - User's inboxes (read relays from kind 10002) * - User's outboxes (write relays from kind 10002) @@ -1873,8 +1929,8 @@ class ClientService extends EventTarget { return storedCacheRelayEvents } - // Fetch from BIG_RELAY_URLS and PROFILE_FETCH_RELAY_URLS - const cacheRelayEvents = await this.replaceableEventService.fetchReplaceableEventsFromBigRelays( + // Fetch from PROFILE_FETCH_RELAY_URLS + const cacheRelayEvents = await this.replaceableEventService.fetchReplaceableEventsFromProfileFetchRelays( pubkeysToFetch, ExtendedKind.CACHE_RELAYS ) @@ -1995,10 +2051,10 @@ class ClientService extends EventTarget { // If many websocket connections are initiated simultaneously, it will be // very slow on Safari (for unknown reason) if (isSafari()) { - let urls = BIG_RELAY_URLS + let urls = FAST_READ_RELAY_URLS if (myPubkey) { const relayList = await this.fetchRelayList(myPubkey) - urls = relayList.read.concat(BIG_RELAY_URLS).slice(0, 5) + urls = relayList.read.concat(FAST_READ_RELAY_URLS).slice(0, 5) } return [{ urls, filter: { authors: pubkeys } }] } diff --git a/src/services/indexed-db.service.ts b/src/services/indexed-db.service.ts index 58cf5719..ebd94f9a 100644 --- a/src/services/indexed-db.service.ts +++ b/src/services/indexed-db.service.ts @@ -3,7 +3,7 @@ import { tagNameEquals } from '@/lib/tag' import { TNip66RelayDiscovery, TRelayInfo } from '@/types' import type { Event } from 'nostr-tools' import { kinds } from 'nostr-tools' -import { isReplaceableEvent } from '@/lib/event' +import { isReplaceableEvent, getReplaceableCoordinateFromEvent } from '@/lib/event' import logger from '@/lib/logger' type TValue = { @@ -46,11 +46,13 @@ export const StoreNames = { /** App settings (replaces in-memory/localStorage for persisted settings). Key: setting key, value: string. */ SETTINGS: 'settings', /** NIP-A7 spell events (kind 777). Key: event id. */ - SPELL_EVENTS: 'spellEvents' + SPELL_EVENTS: 'spellEvents', + /** Tombstone list for deleted events (kind 5). Key: event id or replaceable coordinate. */ + TOMBSTONE_LIST: 'tombstoneList' } /** Schema version we expect. When adding stores or migrations, bump this. */ -const DB_VERSION = 26 +const DB_VERSION = 27 /** Max age for profile and payment info cache before we refetch (5 min). */ const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000 @@ -225,6 +227,9 @@ class IndexedDbService { if (!db.objectStoreNames.contains(StoreNames.SPELL_EVENTS)) { db.createObjectStore(StoreNames.SPELL_EVENTS, { keyPath: 'key' }) } + if (!db.objectStoreNames.contains(StoreNames.TOMBSTONE_LIST)) { + db.createObjectStore(StoreNames.TOMBSTONE_LIST, { keyPath: 'key' }) + } } } ); @@ -271,6 +276,16 @@ class IndexedDbService { } async putReplaceableEvent(event: Event): Promise { + // Check if tombstoned before caching + const tombstoneKey = isReplaceableEvent(event.kind) + ? getReplaceableCoordinateFromEvent(event) + : event.id + const isTombstoned = await this.isTombstoned(tombstoneKey) + if (isTombstoned) { + logger.debug('[IndexedDB] Skipping tombstoned event', { tombstoneKey, eventId: event.id?.substring(0, 8) }) + return Promise.reject(new Error('Event is tombstoned')) + } + // Remove relayStatuses before storing (it's metadata for logging, not part of the event) const cleanEvent = { ...event } delete (cleanEvent as any).relayStatuses @@ -1917,6 +1932,141 @@ class IndexedDbService { async setSpellFavoriteIds(ids: string[]): Promise { await this.setSetting(IndexedDbService.SPELL_FAVORITE_IDS_KEY, JSON.stringify(ids)) } + + /** + * Check if an event is tombstoned (deleted) + */ + async isTombstoned(key: string): Promise { + await this.initPromise + return new Promise((resolve) => { + if (!this.db) { + return resolve(false) + } + if (!this.db.objectStoreNames.contains(StoreNames.TOMBSTONE_LIST)) { + return resolve(false) + } + const transaction = this.db.transaction(StoreNames.TOMBSTONE_LIST, 'readonly') + const store = transaction.objectStore(StoreNames.TOMBSTONE_LIST) + const request = store.get(key) + + request.onsuccess = () => { + const row = request.result as TValue | undefined + transaction.commit() + resolve(row !== undefined && row.value !== null) + } + + request.onerror = () => { + transaction.commit() + resolve(false) + } + }) + } + + /** + * Add event to tombstone list (mark as deleted) + * Key format: event ID for non-replaceable events, or "kind:pubkey" or "kind:pubkey:d" for replaceable events + */ + async addTombstone(key: string, deletedAt: number = Date.now()): Promise { + await this.initPromise + return new Promise((resolve, reject) => { + if (!this.db) { + return reject(new Error('Database not initialized')) + } + if (!this.db.objectStoreNames.contains(StoreNames.TOMBSTONE_LIST)) { + return reject(new Error('Tombstone store not found')) + } + const transaction = this.db.transaction(StoreNames.TOMBSTONE_LIST, 'readwrite') + const store = transaction.objectStore(StoreNames.TOMBSTONE_LIST) + const value = this.formatValue(key, { deletedAt }) + const request = store.put(value) + + request.onsuccess = () => { + transaction.commit() + resolve() + } + + request.onerror = (event) => { + transaction.commit() + reject(idbEventToError(event)) + } + }) + } + + /** + * Get all tombstoned keys + */ + async getAllTombstones(): Promise> { + await this.initPromise + return new Promise((resolve, reject) => { + if (!this.db) { + return resolve(new Set()) + } + if (!this.db.objectStoreNames.contains(StoreNames.TOMBSTONE_LIST)) { + return resolve(new Set()) + } + const transaction = this.db.transaction(StoreNames.TOMBSTONE_LIST, 'readonly') + const store = transaction.objectStore(StoreNames.TOMBSTONE_LIST) + const request = store.getAll() + + request.onsuccess = () => { + const rows = request.result as TValue[] + const keys = new Set() + for (const row of rows) { + if (row.value !== null) { + keys.add(row.key) + } + } + transaction.commit() + resolve(keys) + } + + request.onerror = (event) => { + transaction.commit() + reject(idbEventToError(event)) + } + }) + } + + /** + * Remove tombstoned events from cache (cleanup) + */ + async removeTombstonedFromCache(): Promise { + const tombstones = await this.getAllTombstones() + let removed = 0 + + for (const key of tombstones) { + // Parse key format: could be event id or "kind:pubkey" or "kind:pubkey:d" (replaceable coordinate) + // Or just event ID for non-replaceable events + const parts = key.split(':') + if (parts.length === 1) { + // Event ID - remove from publication store + try { + await this.deleteStoreItem(StoreNames.PUBLICATION_EVENTS, key) + removed++ + } catch { + // Ignore errors + } + } else if (parts.length >= 2) { + // Replaceable event coordinate format: "kind:pubkey" or "kind:pubkey:d" + const kind = parseInt(parts[0]!, 10) + const pubkey = parts[1]! + const d = parts[2] + if (!isNaN(kind)) { + try { + const storeName = this.getStoreNameByKind(kind) + if (storeName) { + await this.deleteStoreItem(storeName, this.getReplaceableEventKey(pubkey, d)) + removed++ + } + } catch { + // Ignore errors + } + } + } + } + + return removed + } } const instance = IndexedDbService.getInstance() diff --git a/src/services/lightning.service.ts b/src/services/lightning.service.ts index ca45057d..b0272978 100644 --- a/src/services/lightning.service.ts +++ b/src/services/lightning.service.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS, CODY_PUBKEY, JUMBLE_PUBKEY } from '@/constants' +import { FAST_READ_RELAY_URLS, CODY_PUBKEY, JUMBLE_PUBKEY } from '@/constants' import { getZapInfoFromEvent } from '@/lib/event-metadata' import { TProfile } from '@/types' import { init, launchPaymentModal } from '@getalby/bitcoin-connect-react' @@ -58,7 +58,7 @@ class LightningService { })(), sender ? client.fetchRelayList(sender) // Keep using client for relay list merging - : Promise.resolve({ read: BIG_RELAY_URLS, write: BIG_RELAY_URLS }) + : Promise.resolve({ read: FAST_READ_RELAY_URLS, write: FAST_READ_RELAY_URLS }) ]) if (!profile) { throw new Error('Recipient not found') @@ -73,7 +73,7 @@ class LightningService { ...(event ? { event } : { pubkey: recipient }), amount, // Privacy: Only use sender's relays + defaults, not recipient's relays - relays: senderRelayList.write.slice(0, 4).concat(BIG_RELAY_URLS), + relays: senderRelayList.write.slice(0, 4).concat(FAST_READ_RELAY_URLS), comment }) const zapRequest = await client.signer.signEvent(zapRequestDraft) @@ -134,7 +134,7 @@ class LightningService { filter['#e'] = [event.id] } subCloser = client.subscribe( - senderRelayList.write.concat(BIG_RELAY_URLS).slice(0, 4), + senderRelayList.write.concat(FAST_READ_RELAY_URLS).slice(0, 4), filter, { onevent: (evt) => { @@ -180,7 +180,7 @@ class LightningService { return this.recentSupportersCache } // Privacy: Use defaults instead of fetching CODY_PUBKEY's relays - const events = await queryService.fetchEvents(BIG_RELAY_URLS.slice(0, 4), { + const events = await queryService.fetchEvents(FAST_READ_RELAY_URLS.slice(0, 4), { authors: ['79f00d3f5a19ec806189fcab03c1be4ff81d18ee4f653c88fac41fe03570f432'], // alby kinds: [kinds.Zap], '#p': OFFICIAL_PUBKEYS, diff --git a/src/services/nip66-monitor.ts b/src/services/nip66-monitor.ts index ec984f3d..860aa256 100644 --- a/src/services/nip66-monitor.ts +++ b/src/services/nip66-monitor.ts @@ -4,7 +4,7 @@ * and no-op builders so relay-info and bootstrap can keep calling without branching. */ -import { BIG_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import { normalizeUrl } from '@/lib/url' import { TRelayInfo } from '@/types' import { Event as NEvent, finalizeEvent } from 'nostr-tools' @@ -97,7 +97,7 @@ export function publishMonitorAnnouncementOnce(): void { if (!event) return publishedAnnouncementThisSession = true logger.info('NIP-66: publishing monitor announcement (10166)') - client.publishEvent([...BIG_RELAY_URLS.slice(0, 4)], event).then((res) => { + client.publishEvent([...FAST_READ_RELAY_URLS.slice(0, 4)], event).then((res) => { if (res.successCount > 0) { logger.info('NIP-66: published monitor announcement (10166)', { successCount: res.successCount }) } diff --git a/src/services/relay-info.service.ts b/src/services/relay-info.service.ts index 2364b343..fb3c666d 100644 --- a/src/services/relay-info.service.ts +++ b/src/services/relay-info.service.ts @@ -1,4 +1,4 @@ -import { BIG_RELAY_URLS } from '@/constants' +import { FAST_READ_RELAY_URLS } from '@/constants' import { simplifyUrl } from '@/lib/url' import indexDb from '@/services/indexed-db.service' import { TAwesomeRelayCollection, TRelayInfo } from '@/types' @@ -212,7 +212,7 @@ class RelayInfoService { } this.lastNip66PublishByUrl.set(key, now) - const urls = [relayInfo.url, ...BIG_RELAY_URLS.slice(0, 3)] + const urls = [relayInfo.url, ...FAST_READ_RELAY_URLS.slice(0, 3)] logger.info('NIP-66: publishing relay discovery (30166)', { url: relayInfo.url }) client.publishEvent(urls, event).then((res) => { if (res.successCount > 0) {