diff --git a/src/PageManager.tsx b/src/PageManager.tsx
index 4088b800..a2dc0c4e 100644
--- a/src/PageManager.tsx
+++ b/src/PageManager.tsx
@@ -1,11 +1,8 @@
-import Sidebar from '@/components/Sidebar'
import { Button } from '@/components/ui/button'
import { cn } from '@/lib/utils'
import logger from '@/lib/logger'
import { ChevronLeft } from 'lucide-react'
import { NavigationService } from '@/services/navigation.service'
-import NoteListPage from '@/pages/primary/NoteListPage'
-import SecondaryNoteListPage from '@/pages/secondary/NoteListPage'
// Page imports needed for primary note view
import NoteDrawer from '@/components/NoteDrawer'
import SecondaryProfilePage from '@/pages/secondary/ProfilePage'
@@ -28,6 +25,7 @@ import {
createRef,
isValidElement,
lazy,
+ type ReactElement,
type ReactNode,
RefObject,
Suspense,
@@ -38,26 +36,39 @@ import {
useRef,
useState
} from 'react'
-import BottomNavigationBar from './components/BottomNavigationBar'
import { useTranslation } from 'react-i18next'
-import TooManyRelaysAlertDialog from './components/TooManyRelaysAlertDialog'
+import { KeyboardShortcutsHelpProvider } from '@/components/KeyboardShortcutsHelp'
import { normalizeUrl } from './lib/url'
-import ExplorePage from './pages/primary/ExplorePage'
-import MePage from './pages/primary/MePage'
-import ProfilePage from './pages/primary/ProfilePage'
-import RelayPage from './pages/primary/RelayPage'
-import SearchPage from './pages/primary/SearchPage'
-import RssPage from './pages/primary/RssPage'
-import SettingsPrimaryPage from './pages/primary/SettingsPrimaryPage'
+import modalManager from './services/modal-manager.service'
+import { routes } from './routes'
import { useScreenSize } from './providers/ScreenSizeProvider'
/** Lazy-loaded so PageManager does not synchronously import SpellsPage (avoids HMR cycle: SpellsPage → PrimaryPageLayout → PageManager → SpellsPage). */
const SpellsPageLazy = lazy(() => import('./pages/primary/SpellsPage'))
-import { routes } from './routes'
-import modalManager from './services/modal-manager.service'
-import CreateWalletGuideToast from './components/CreateWalletGuideToast'
-import { KeyboardShortcutsHelpProvider } from '@/components/KeyboardShortcutsHelp'
-
+/** Lazy NoteList pages break: PageManager → … → NoteList → NoteCard → useSmartNoteNavigation → PageManager */
+const NoteListPageLazy = lazy(() => import('@/pages/primary/NoteListPage'))
+const SecondaryNoteListPageLazy = lazy(() => import('@/pages/secondary/NoteListPage'))
+
+const primaryPageLazyFallback = (
+
+ Loading…
+
+)
+
+/** Lazy primary pages: each may import PrimaryPageLayout → usePrimaryPage → would sync-import PageManager. */
+const ExplorePageLazy = lazy(() => import('./pages/primary/ExplorePage'))
+const MePageLazy = lazy(() => import('./pages/primary/MePage'))
+const ProfilePageLazy = lazy(() => import('./pages/primary/ProfilePage'))
+const RelayPageLazy = lazy(() => import('./pages/primary/RelayPage'))
+const SearchPageLazy = lazy(() => import('./pages/primary/SearchPage'))
+const RssPageLazy = lazy(() => import('./pages/primary/RssPage'))
+const SettingsPrimaryPageLazy = lazy(() => import('./pages/primary/SettingsPrimaryPage'))
+
+/** Lazy chrome: Sidebar / bottom bar / dialogs import hooks from PageManager — must not be sync-imported here. */
+const SidebarLazy = lazy(() => import('@/components/Sidebar'))
+const BottomNavigationBarLazy = lazy(() => import('@/components/BottomNavigationBar'))
+const TooManyRelaysAlertDialogLazy = lazy(() => import('@/components/TooManyRelaysAlertDialog'))
+const CreateWalletGuideToastLazy = lazy(() => import('@/components/CreateWalletGuideToast'))
type TPrimaryPageContext = {
navigate: (page: TPrimaryPageName, props?: object) => void
@@ -96,22 +107,48 @@ const PRIMARY_PAGE_REF_MAP = {
// Lazy function to create PRIMARY_PAGE_MAP to avoid circular dependency
// This is only evaluated when called, not at module load time
const getPrimaryPageMap = () => ({
- home: ,
- feed: ,
- me: ,
- profile: ,
- relay: ,
- search: ,
- rss: ,
- settings: ,
+ home: (
+
+
+
+ ),
+ feed: (
+
+
+
+ ),
+ me: (
+
+
+
+ ),
+ profile: (
+
+
+
+ ),
+ relay: (
+
+
+
+ ),
+ search: (
+
+
+
+ ),
+ rss: (
+
+
+
+ ),
+ settings: (
+
+
+
+ ),
spells: (
-
- Loading…
-
- }
- >
+
)
@@ -443,7 +480,12 @@ export function useSmartHashtagNavigation() {
// Use a key based on the hashtag and navigation counter to force remounting when hashtag changes
// This ensures the component reads the new URL parameters when it mounts
// setPrimaryNoteView will increment the counter, so we use counter + 1 for the key
- setPrimaryNoteView(, 'hashtag')
+ setPrimaryNoteView(
+
+
+ ,
+ 'hashtag'
+ )
// Dispatch custom event as a fallback for components that might be reused
window.dispatchEvent(new CustomEvent('hashtag-navigation', { detail: { url: parsedUrl } }))
}
@@ -1520,9 +1562,15 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
noteId={drawerNoteId}
/>
)}
-
-
-
+
+
+
+
+
+
+
+
+
@@ -1562,7 +1610,9 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
maxWidth: '1920px'
}}
>
-
+
+
+
{(() => {
if (panelMode === 'double') {
// Double-pane mode: show feed on left (flexible, maintains width), secondary stack on right (1042px, same as drawer)
@@ -1667,8 +1717,12 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
)}
-
-
+
+
+
+
+
+
@@ -1715,6 +1769,20 @@ function isCurrentPage(stack: TStackItem[], url: string) {
return currentPage.url === url
}
+/** Route elements are `` — props must be applied to the lazy leaf, not Suspense. */
+function cloneSecondaryRouteElement(
+ element: ReactElement,
+ props: Record
+): ReactElement {
+ if (element.type === Suspense) {
+ const inner = element.props.children
+ if (isValidElement(inner)) {
+ return cloneElement(element, undefined, cloneElement(inner, props as any))
+ }
+ }
+ return cloneElement(element, props as any)
+}
+
function findAndCreateComponent(url: string, index: number) {
const path = url.split('?')[0].split('#')[0]
logger.component('PageManager', 'findAndCreateComponent called', { url, path, routes: routes.length })
@@ -1739,7 +1807,7 @@ function findAndCreateComponent(url: string, index: number) {
logger.component('PageManager', 'Creating component with params', { params, index })
try {
- const component = cloneElement(element, { ...params, index, ref } as any)
+ const component = cloneSecondaryRouteElement(element, { ...params, index, ref })
logger.component('PageManager', 'Component created successfully', { hasComponent: !!component })
return { component, ref }
} catch (error) {
diff --git a/src/components/NoteList/index.tsx b/src/components/NoteList/index.tsx
index 02a437a0..bcc5cea7 100644
--- a/src/components/NoteList/index.tsx
+++ b/src/components/NoteList/index.tsx
@@ -161,6 +161,11 @@ const NoteList = forwardRef(
]
)
+ const shouldHideEventRef = useRef(shouldHideEvent)
+ useEffect(() => {
+ shouldHideEventRef.current = shouldHideEvent
+ }, [shouldHideEvent])
+
const filteredEvents = useMemo(() => {
const idSet = new Set()
@@ -292,6 +297,8 @@ const NoteList = forwardRef(
onEvents: (events: Event[], eosed: boolean) => {
if (events.length > 0) {
setEvents(events)
+ // Do not wait for full EOSE across many relays — otherwise loading/skeleton stays up for 10–30s+
+ setLoading(false)
// CRITICAL: Prefetch profiles for initial events (optimized for faster initial load)
// Only prefetch for first 50 events to reduce initial load time
@@ -369,7 +376,7 @@ const NoteList = forwardRef(
if (!isReply && !showKind1OPs) return
}
if (event.kind === ExtendedKind.COMMENT && !showKind1111) return
- if (shouldHideEvent(event)) return
+ if (shouldHideEventRef.current(event)) return
if (pubkey && event.pubkey === pubkey) {
// If the new event is from the current user, insert it directly into the feed
setEvents((oldEvents) =>
@@ -414,7 +421,7 @@ const NoteList = forwardRef(
showKind1Replies,
showKind1111,
useFilterAsIs,
- shouldHideEvent
+ areAlgoRelays
])
// Use refs to avoid dependency issues and ensure latest values in async callbacks
diff --git a/src/components/SimpleNoteFeed/index.tsx b/src/components/SimpleNoteFeed/index.tsx
index fda3171e..31006069 100644
--- a/src/components/SimpleNoteFeed/index.tsx
+++ b/src/components/SimpleNoteFeed/index.tsx
@@ -3,7 +3,7 @@ import { useTranslation } from 'react-i18next'
import { RefreshCw } from 'lucide-react'
import { useNostr } from '@/providers/NostrProvider'
import { normalizeUrl } from '@/lib/url'
-import { FAST_READ_RELAY_URLS } from '@/constants'
+import { FAST_READ_RELAY_URLS, FIRST_RELAY_RESULT_GRACE_MS } from '@/constants'
import client from '@/services/client.service'
import { Event } from 'nostr-tools'
import { kinds } from 'nostr-tools'
@@ -87,7 +87,9 @@ const SimpleNoteFeed = forwardRef<
// Fetch events
logger.component('SimpleNoteFeed', 'Calling client.fetchEvents')
const { queryService } = await import('@/services/client.service')
- const fetchedEvents = await queryService.fetchEvents(allRelays, [filter])
+ const fetchedEvents = await queryService.fetchEvents(allRelays, [filter], {
+ firstRelayResultGraceMs: FIRST_RELAY_RESULT_GRACE_MS
+ })
logger.component('SimpleNoteFeed', 'Fetched events', { count: fetchedEvents.length })
diff --git a/src/constants.ts b/src/constants.ts
index b524081e..32bbcd1f 100644
--- a/src/constants.ts
+++ b/src/constants.ts
@@ -14,6 +14,28 @@ export const DEFAULT_FAVORITE_RELAYS = [
'wss://nostr.land'
]
+/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
+export const FIRST_RELAY_RESULT_GRACE_MS = 2000
+
+/**
+ * Implicit query feed grace ({@link FIRST_RELAY_RESULT_GRACE_MS}) applies only when the largest `limit` among
+ * filters is at least this value. Omitting `limit` counts as 0 (no implicit grace).
+ */
+export const FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT = 200
+
+/**
+ * Batched kind-0 queries (ReplaceableEventService) over many relays (inbox, favorites, cache, defaults).
+ * Too low causes empty profiles and NIP-05 gaps when relays are slow or many URLs are queried.
+ */
+export const METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS = 16000
+export const METADATA_BATCH_QUERY_EOSE_TIMEOUT_MS = 500
+
+/**
+ * useFetchProfile: outer Promise.race on fetchProfileEvent and wait-for-shared-promise timeouts.
+ * Must be greater than {@link METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS} so the batch can finish first.
+ */
+export const PROFILE_FETCH_PROMISE_TIMEOUT_MS = 20000
+
export const RECOMMENDED_RELAYS = DEFAULT_FAVORITE_RELAYS.concat([])
export const RECOMMENDED_BLOSSOM_SERVERS = [
diff --git a/src/hooks/useFetchCalendarRsvps.tsx b/src/hooks/useFetchCalendarRsvps.tsx
index 144a49a6..365307b4 100644
--- a/src/hooks/useFetchCalendarRsvps.tsx
+++ b/src/hooks/useFetchCalendarRsvps.tsx
@@ -65,11 +65,15 @@ export function useFetchCalendarRsvps(calendarEvent: Event | undefined) {
.then((relayUrls: string[] | undefined) => {
if (cancelled) return
const urls = relayUrls?.length ? relayUrls : Array.from(baseUrls)
- return queryService.fetchEvents(urls, {
- kinds: [ExtendedKind.CALENDAR_EVENT_RSVP],
- '#a': [coordinate],
- limit: 200
- })
+ return queryService.fetchEvents(
+ urls,
+ {
+ kinds: [ExtendedKind.CALENDAR_EVENT_RSVP],
+ '#a': [coordinate],
+ limit: 200
+ },
+ { firstRelayResultGraceMs: false }
+ )
})
.then((events) => {
if (cancelled) return
diff --git a/src/hooks/useFetchProfile.tsx b/src/hooks/useFetchProfile.tsx
index 79b15a14..dd042206 100644
--- a/src/hooks/useFetchProfile.tsx
+++ b/src/hooks/useFetchProfile.tsx
@@ -1,3 +1,4 @@
+import { PROFILE_FETCH_PROMISE_TIMEOUT_MS } from '@/constants'
import { getProfileFromEvent } from '@/lib/event-metadata'
import { userIdToPubkey } from '@/lib/pubkey'
import { useNostr } from '@/providers/NostrProvider'
@@ -79,7 +80,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
pubkey: pubkey.substring(0, 8)
})
resolve(null)
- }, 5000) // 5 seconds
+ }, PROFILE_FETCH_PROMISE_TIMEOUT_MS)
})
const existingProfile = await Promise.race([existingPromise, timeoutPromise])
@@ -135,8 +136,8 @@ export function useFetchProfile(id?: string, skipCache = false) {
logger.warn('[useFetchProfile] Retry promise timeout, not starting duplicate fetch', {
pubkey: pubkey.substring(0, 8)
})
- resolve(null)
- }, 5000) // 5 seconds
+ resolve(null)
+ }, PROFILE_FETCH_PROMISE_TIMEOUT_MS)
})
const retryProfile = await Promise.race([retryPromise, timeoutPromise])
@@ -183,12 +184,15 @@ export function useFetchProfile(id?: string, skipCache = false) {
globalFetchingPubkeys.add(pubkey)
const startTime = Date.now()
- // CRITICAL: Add timeout to prevent infinite hangs
- // Use Promise.race to timeout after 5 seconds
+ // CRITICAL: Add timeout to prevent infinite hangs (must exceed batched metadata query globalTimeout)
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => {
- reject(new Error(`Profile fetch timeout after 5s for pubkey ${pubkey.substring(0, 8)}`))
- }, 5000) // 5 second timeout
+ reject(
+ new Error(
+ `Profile fetch timeout after ${PROFILE_FETCH_PROMISE_TIMEOUT_MS}ms for pubkey ${pubkey.substring(0, 8)}`
+ )
+ )
+ }, PROFILE_FETCH_PROMISE_TIMEOUT_MS)
})
// Use fetchProfileEvent which includes author's relay list for better profile discovery
@@ -207,12 +211,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
fetchTime: `${fetchTime}ms`
})
}
-
- if (cancelled.current) {
- logger.info('[useFetchProfile] Fetch cancelled after fetch', { pubkey })
- return null
- }
-
+
if (profileEvent) {
// getProfileFromEvent always returns a profile object (with fallback username)
const newProfile = getProfileFromEvent(profileEvent)
@@ -221,8 +220,13 @@ export function useFetchProfile(id?: string, skipCache = false) {
pubkey: pubkey.substring(0, 8),
username: newProfile.username,
hasAvatar: !!newProfile.avatar,
- fetchTime: `${fetchTime}ms`
+ fetchTime: `${fetchTime}ms`,
+ unmounted: cancelled.current
})
+ // CRITICAL: Always return the profile from this shared promise, even when the
+ // originating hook cleaned up (list virtualization, Strict Mode, feed switch).
+ // Returning null here made every waiter treat the result like a timeout, applied
+ // cooldowns, and left avatars empty (especially busy feeds e.g. all-favorites).
return newProfile
}
// Only log warnings for missing profiles if skipCache is true (user explicitly requested)
diff --git a/src/lib/spell-list-import.ts b/src/lib/spell-list-import.ts
index 53ecff17..a41662dc 100644
--- a/src/lib/spell-list-import.ts
+++ b/src/lib/spell-list-import.ts
@@ -180,8 +180,14 @@ export async function resolveSpellListATags(
try {
const events =
relays.length > 0
- ? await queryService.fetchEvents(relays, filter, { globalTimeout: 12_000 })
- : await queryService.fetchEvents([], filter, { globalTimeout: 12_000 })
+ ? await queryService.fetchEvents(relays, filter, {
+ globalTimeout: 12_000,
+ firstRelayResultGraceMs: false
+ })
+ : await queryService.fetchEvents([], filter, {
+ globalTimeout: 12_000,
+ firstRelayResultGraceMs: false
+ })
if (!events.length) {
notices.push(`listImportATagNotFound:${at.slice(0, 48)}`)
return
diff --git a/src/pages/primary/DiscussionsPage/index.tsx b/src/pages/primary/DiscussionsPage/index.tsx
index 53462f0f..0b9cebb6 100644
--- a/src/pages/primary/DiscussionsPage/index.tsx
+++ b/src/pages/primary/DiscussionsPage/index.tsx
@@ -422,12 +422,16 @@ const DiscussionsPage = forwardRef(function Di
logger.debug('[DiscussionsPage] Using relays:', allRelays.slice(0, 10), '... (total:', allRelays.length, ')')
// Step 1: Fetch all discussion threads (kind 11)
- const discussionThreads = await queryService.fetchEvents(allRelays, [
- {
- kinds: [11], // ExtendedKind.DISCUSSION
- limit: 500 // Increased from 100 to load more threads per request
- }
- ])
+ const discussionThreads = await queryService.fetchEvents(
+ allRelays,
+ [
+ {
+ kinds: [11], // ExtendedKind.DISCUSSION
+ limit: 500 // Increased from 100 to load more threads per request
+ }
+ ],
+ { firstRelayResultGraceMs: false }
+ )
logger.debug('[DiscussionsPage] Fetched', discussionThreads.length, 'discussion threads')
if (discussionThreads.length > 0) {
@@ -456,20 +460,32 @@ const DiscussionsPage = forwardRef(function Di
const allThreadIdsArray = Array.from(allThreadIds)
const [comments, reactions] = await Promise.all([
- allThreadIdsArray.length > 0 ? queryService.fetchEvents(allRelays, [
- {
- kinds: [1111], // ExtendedKind.COMMENT
- '#e': allThreadIdsArray,
- limit: 500 // Increased from 100 to load more comments per request
- }
- ]) : Promise.resolve([]),
- allThreadIdsArray.length > 0 ? queryService.fetchEvents(allRelays, [
- {
- kinds: [kinds.Reaction],
- '#e': allThreadIdsArray,
- limit: 500 // Increased from 100 to load more reactions per request
- }
- ]) : Promise.resolve([])
+ allThreadIdsArray.length > 0
+ ? queryService.fetchEvents(
+ allRelays,
+ [
+ {
+ kinds: [1111], // ExtendedKind.COMMENT
+ '#e': allThreadIdsArray,
+ limit: 500 // Increased from 100 to load more comments per request
+ }
+ ],
+ { firstRelayResultGraceMs: false }
+ )
+ : Promise.resolve([]),
+ allThreadIdsArray.length > 0
+ ? queryService.fetchEvents(
+ allRelays,
+ [
+ {
+ kinds: [kinds.Reaction],
+ '#e': allThreadIdsArray,
+ limit: 500 // Increased from 100 to load more reactions per request
+ }
+ ],
+ { firstRelayResultGraceMs: false }
+ )
+ : Promise.resolve([])
])
logger.debug('[DiscussionsPage] Fetched', comments.length, 'comments and', reactions.length, 'reactions for', allThreadIdsArray.length, 'threads (', threadIds.length, 'new,', (cachedDataBeforeFetch?.eventMap.size || 0), 'cached)')
diff --git a/src/routes.tsx b/src/routes.tsx
index 7d15641b..6ce89458 100644
--- a/src/routes.tsx
+++ b/src/routes.tsx
@@ -1,58 +1,75 @@
import { match } from 'path-to-regexp'
-import { isValidElement } from 'react'
-import FollowingListPage from './pages/secondary/FollowingListPage'
-import GeneralSettingsPage from './pages/secondary/GeneralSettingsPage'
-import MuteListPage from './pages/secondary/MuteListPage'
-import NoteListPage from './pages/secondary/NoteListPage'
-import NotePage from './pages/secondary/NotePage'
-import OthersRelaySettingsPage from './pages/secondary/OthersRelaySettingsPage'
-import PostSettingsPage from './pages/secondary/PostSettingsPage'
-import ProfileEditorPage from './pages/secondary/ProfileEditorPage'
-import ProfileListPage from './pages/secondary/ProfileListPage'
-import ProfilePage from './pages/secondary/ProfilePage'
-import RelayPage from './pages/secondary/RelayPage'
-import RelayReviewsPage from './pages/secondary/RelayReviewsPage'
-import RelaySettingsPage from './pages/secondary/RelaySettingsPage'
-import CacheSettingsPage from './pages/secondary/CacheSettingsPage'
-import RssFeedSettingsPage from './pages/secondary/RssFeedSettingsPage'
-import SearchPage from './pages/secondary/SearchPage'
-import SettingsPage from './pages/secondary/SettingsPage'
-import TranslationPage from './pages/secondary/TranslationPage'
-import WalletPage from './pages/secondary/WalletPage'
-import FollowPacksRedirect from './pages/secondary/FollowPacksRedirect'
+import {
+ isValidElement,
+ lazy,
+ Suspense,
+ type ComponentType,
+ type LazyExoticComponent,
+ type ReactElement
+} from 'react'
+
+/** Lazy + Suspense so importing `routes` does not sync-pull pages that depend on PageManager (breaks Vite HMR cycles). */
+const FollowingListPageLazy = lazy(() => import('./pages/secondary/FollowingListPage'))
+const GeneralSettingsPageLazy = lazy(() => import('./pages/secondary/GeneralSettingsPage'))
+const MuteListPageLazy = lazy(() => import('./pages/secondary/MuteListPage'))
+const NoteListPageLazy = lazy(() => import('./pages/secondary/NoteListPage'))
+const NotePageLazy = lazy(() => import('./pages/secondary/NotePage'))
+const OthersRelaySettingsPageLazy = lazy(() => import('./pages/secondary/OthersRelaySettingsPage'))
+const PostSettingsPageLazy = lazy(() => import('./pages/secondary/PostSettingsPage'))
+const ProfileEditorPageLazy = lazy(() => import('./pages/secondary/ProfileEditorPage'))
+const ProfileListPageLazy = lazy(() => import('./pages/secondary/ProfileListPage'))
+const ProfilePageLazy = lazy(() => import('./pages/secondary/ProfilePage'))
+const RelayPageLazy = lazy(() => import('./pages/secondary/RelayPage'))
+const RelayReviewsPageLazy = lazy(() => import('./pages/secondary/RelayReviewsPage'))
+const RelaySettingsPageLazy = lazy(() => import('./pages/secondary/RelaySettingsPage'))
+const CacheSettingsPageLazy = lazy(() => import('./pages/secondary/CacheSettingsPage'))
+const RssFeedSettingsPageLazy = lazy(() => import('./pages/secondary/RssFeedSettingsPage'))
+const SearchPageLazy = lazy(() => import('./pages/secondary/SearchPage'))
+const SettingsPageLazy = lazy(() => import('./pages/secondary/SettingsPage'))
+const TranslationPageLazy = lazy(() => import('./pages/secondary/TranslationPage'))
+const WalletPageLazy = lazy(() => import('./pages/secondary/WalletPage'))
+const FollowPacksRedirectLazy = lazy(() => import('./pages/secondary/FollowPacksRedirect'))
+
+const routeSuspenseFallback = null
+
+function SR(C: LazyExoticComponent>>): ReactElement {
+ return (
+
+
+
+ )
+}
const ROUTES = [
- { path: '/notes', element: },
- { path: '/notes/:id', element: },
- // Contextual note routes (e.g., /discussions/notes/:id, /search/notes/:id)
- { path: '/discussions/notes/:id', element: },
- { path: '/search/notes/:id', element: },
- { path: '/profile/notes/:id', element: },
- { path: '/explore/notes/:id', element: },
- { path: '/home/notes/:id', element: },
- { path: '/feed/notes/:id', element: },
- { path: '/spells/notes/:id', element: },
- { path: '/users', element: },
- { path: '/users/:id', element: },
- { path: '/users/:id/following', element: },
- { path: '/users/:id/relays', element: },
- { path: '/relays/:url', element: },
- { path: '/relays/:url/reviews', element: },
- // Contextual relay routes (home = explore; legacy /explore)
- { path: '/home/relays/:url', element: },
- { path: '/explore/relays/:url', element: },
- { path: '/search', element: },
- { path: '/settings', element: },
- { path: '/settings/relays', element: },
- { path: '/settings/cache', element: },
- { path: '/settings/wallet', element: },
- { path: '/settings/posts', element: },
- { path: '/settings/general', element: },
- { path: '/settings/translation', element: },
- { path: '/settings/rss-feeds', element: },
- { path: '/profile-editor', element: },
- { path: '/mutes', element: },
- { path: '/follow-packs', element: }
+ { path: '/notes', element: SR(NoteListPageLazy) },
+ { path: '/notes/:id', element: SR(NotePageLazy) },
+ { path: '/discussions/notes/:id', element: SR(NotePageLazy) },
+ { path: '/search/notes/:id', element: SR(NotePageLazy) },
+ { path: '/profile/notes/:id', element: SR(NotePageLazy) },
+ { path: '/explore/notes/:id', element: SR(NotePageLazy) },
+ { path: '/home/notes/:id', element: SR(NotePageLazy) },
+ { path: '/feed/notes/:id', element: SR(NotePageLazy) },
+ { path: '/spells/notes/:id', element: SR(NotePageLazy) },
+ { path: '/users', element: SR(ProfileListPageLazy) },
+ { path: '/users/:id', element: SR(ProfilePageLazy) },
+ { path: '/users/:id/following', element: SR(FollowingListPageLazy) },
+ { path: '/users/:id/relays', element: SR(OthersRelaySettingsPageLazy) },
+ { path: '/relays/:url', element: SR(RelayPageLazy) },
+ { path: '/relays/:url/reviews', element: SR(RelayReviewsPageLazy) },
+ { path: '/home/relays/:url', element: SR(RelayPageLazy) },
+ { path: '/explore/relays/:url', element: SR(RelayPageLazy) },
+ { path: '/search', element: SR(SearchPageLazy) },
+ { path: '/settings', element: SR(SettingsPageLazy) },
+ { path: '/settings/relays', element: SR(RelaySettingsPageLazy) },
+ { path: '/settings/cache', element: SR(CacheSettingsPageLazy) },
+ { path: '/settings/wallet', element: SR(WalletPageLazy) },
+ { path: '/settings/posts', element: SR(PostSettingsPageLazy) },
+ { path: '/settings/general', element: SR(GeneralSettingsPageLazy) },
+ { path: '/settings/translation', element: SR(TranslationPageLazy) },
+ { path: '/settings/rss-feeds', element: SR(RssFeedSettingsPageLazy) },
+ { path: '/profile-editor', element: SR(ProfileEditorPageLazy) },
+ { path: '/mutes', element: SR(MuteListPageLazy) },
+ { path: '/follow-packs', element: SR(FollowPacksRedirectLazy) }
]
export const routes = ROUTES.map(({ path, element }) => ({
diff --git a/src/services/client-query.service.ts b/src/services/client-query.service.ts
index 62f6cb92..d730c0c6 100644
--- a/src/services/client-query.service.ts
+++ b/src/services/client-query.service.ts
@@ -1,4 +1,9 @@
-import { KIND_1_BLOCKED_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants'
+import {
+ FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT,
+ FIRST_RELAY_RESULT_GRACE_MS,
+ KIND_1_BLOCKED_RELAY_URLS,
+ SEARCHABLE_RELAY_URLS
+} from '@/constants'
import logger from '@/lib/logger'
import { normalizeUrl } from '@/lib/url'
import type { Filter, Event as NEvent } from 'nostr-tools'
@@ -23,6 +28,13 @@ export interface QueryOptions {
replaceableRaceWaitMs?: number
/** For non-replaceable single events: return immediately on first match */
immediateReturn?: boolean
+ /**
+ * Multi-relay feed / batch: after first event, wait this many ms then close and return.
+ * `false` disables (wait for normal EOSE / global timeout). When omitted, implicit grace uses
+ * {@link FIRST_RELAY_RESULT_GRACE_MS} only if the largest filter `limit` is at least
+ * {@link FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT} (and not replaceableRace / immediateReturn / single-event fetch).
+ */
+ firstRelayResultGraceMs?: number | false
}
export interface SubscribeCallbacks {
@@ -115,7 +127,7 @@ export class QueryService {
const eoseTimeout = options?.eoseTimeout ?? 500
const globalTimeout = options?.globalTimeout ?? 10000
const replaceableRace = options?.replaceableRace ?? false
- const replaceableRaceWaitMs = options?.replaceableRaceWaitMs ?? 2000
+ const replaceableRaceWaitMs = options?.replaceableRaceWaitMs ?? FIRST_RELAY_RESULT_GRACE_MS
const immediateReturn = options?.immediateReturn ?? false
const isExternalSearch = eoseTimeout > 1000
@@ -131,12 +143,25 @@ export class QueryService {
})
}
- const FIRST_RESULT_GRACE_MS = 1200
+ const filtersForGrace = Array.isArray(filter) ? filter : [filter]
+ const maxLimitForGrace = Math.max(...filtersForGrace.map((f) => (f.limit ?? 0) as number), 0)
+ const isSingleEventFetchForGrace = maxLimitForGrace === 1
+ const useImplicitFeedFirstRelayGrace =
+ maxLimitForGrace >= FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT && !isSingleEventFetchForGrace
+ const feedGraceMsResolved: number | null =
+ options?.firstRelayResultGraceMs === false
+ ? null
+ : typeof options?.firstRelayResultGraceMs === 'number'
+ ? options.firstRelayResultGraceMs
+ : !replaceableRace && !immediateReturn && useImplicitFeedFirstRelayGrace
+ ? FIRST_RELAY_RESULT_GRACE_MS
+ : null
return await new Promise((resolve) => {
const events: NEvent[] = []
let resolveTimeout: ReturnType | null = null
let firstResultGraceTimeoutId: ReturnType | null = null
+ let feedFirstResultGraceTimeoutId: ReturnType | null = null
let replaceableRaceTimeoutId: ReturnType | null = null
let allEosed = false
let eventCount = 0
@@ -178,6 +203,7 @@ export class QueryService {
resolved = true
if (resolveTimeout) clearTimeout(resolveTimeout)
if (firstResultGraceTimeoutId) clearTimeout(firstResultGraceTimeoutId)
+ if (feedFirstResultGraceTimeoutId) clearTimeout(feedFirstResultGraceTimeoutId)
if (replaceableRaceTimeoutId) clearTimeout(replaceableRaceTimeoutId)
if (globalTimeoutId) clearTimeout(globalTimeoutId)
@@ -220,11 +246,23 @@ export class QueryService {
}, replaceableRaceWaitMs)
}
+ if (
+ feedGraceMsResolved != null &&
+ events.length >= 1 &&
+ !feedFirstResultGraceTimeoutId &&
+ !replaceableRace
+ ) {
+ feedFirstResultGraceTimeoutId = setTimeout(() => {
+ feedFirstResultGraceTimeoutId = null
+ resolveWithEvents()
+ }, feedGraceMsResolved)
+ }
+
if (!replaceableRace && !immediateReturn && isSingleEventFetch && events.length === 1 && !firstResultGraceTimeoutId) {
firstResultGraceTimeoutId = setTimeout(() => {
firstResultGraceTimeoutId = null
resolveWithEvents()
- }, FIRST_RESULT_GRACE_MS)
+ }, FIRST_RELAY_RESULT_GRACE_MS)
}
if (hasIdFilter && isSingleEventFetch && events.length > 0 && allEosed && !replaceableRace && !immediateReturn) {
@@ -251,6 +289,7 @@ export class QueryService {
}
if (firstResultGraceTimeoutId) clearTimeout(firstResultGraceTimeoutId)
+ if (feedFirstResultGraceTimeoutId) clearTimeout(feedFirstResultGraceTimeoutId)
if (resolveTimeout) clearTimeout(resolveTimeout)
resolveTimeout = setTimeout(() => resolveWithEvents(), eoseTimeout)
}
@@ -446,8 +485,6 @@ export class QueryService {
filter: Filter | Filter[],
options?: {
onevent?: (evt: NEvent) => void
- eoseTimeout?: number
- globalTimeout?: number
} & QueryOptions
): Promise {
let relays = Array.from(new Set(urls))
@@ -461,6 +498,7 @@ export class QueryService {
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
relays = relays.filter((url) => !kind1BlockedSet.has(normalizeUrl(url) || url))
}
- return this.query(relays, filter, options?.onevent, options)
+ const { onevent, ...queryOpts } = options ?? {}
+ return this.query(relays, filter, onevent, queryOpts)
}
}
diff --git a/src/services/client-replaceable-events.service.ts b/src/services/client-replaceable-events.service.ts
index 7a79651a..22b48c5e 100644
--- a/src/services/client-replaceable-events.service.ts
+++ b/src/services/client-replaceable-events.service.ts
@@ -1,4 +1,10 @@
-import { ExtendedKind, FAST_READ_RELAY_URLS, PROFILE_FETCH_RELAY_URLS } from '@/constants'
+import {
+ ExtendedKind,
+ FAST_READ_RELAY_URLS,
+ METADATA_BATCH_QUERY_EOSE_TIMEOUT_MS,
+ METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS,
+ PROFILE_FETCH_RELAY_URLS
+} from '@/constants'
import { kinds, nip19 } from 'nostr-tools'
import type { Event as NEvent, Filter } from 'nostr-tools'
import DataLoader from 'dataloader'
@@ -105,7 +111,7 @@ export class ReplaceableEventService {
containingEventRelays: string[] = []
): Promise {
const cacheKey = d ? `${kind}:${pubkey}:${d}` : `${kind}:${pubkey}`
- logger.info('[ReplaceableEventService] fetchReplaceableEvent start', {
+ logger.debug('[ReplaceableEventService] fetchReplaceableEvent start', {
pubkey,
kind,
d,
@@ -119,14 +125,14 @@ export class ReplaceableEventService {
let event: NEvent | undefined
if (containingEventRelays.length > 0 && kind === kinds.Metadata && !d) {
// For profiles with containing event relays (author's relay list), check IndexedDB first, then query directly
- logger.info('[ReplaceableEventService] Checking IndexedDB for profile with containing relays', {
+ logger.debug('[ReplaceableEventService] Checking IndexedDB for profile with containing relays', {
pubkey,
kind
})
try {
const indexedDbCached = await indexedDb.getReplaceableEvent(pubkey, kind, d)
if (indexedDbCached) {
- logger.info('[ReplaceableEventService] Found in IndexedDB', {
+ logger.debug('[ReplaceableEventService] Found in IndexedDB', {
pubkey,
kind,
eventId: indexedDbCached.id
@@ -144,27 +150,32 @@ export class ReplaceableEventService {
}
// Not in IndexedDB, fetch from network with custom relay list
- logger.info('[ReplaceableEventService] Building relay list with containing event relays', {
+ logger.debug('[ReplaceableEventService] Building relay list with containing event relays', {
pubkey,
containingRelayCount: containingEventRelays.length
})
const relayUrls = await this.buildComprehensiveRelayListForAuthor(pubkey, kind, containingEventRelays, [])
- logger.info('[ReplaceableEventService] Querying relays', {
+ logger.debug('[ReplaceableEventService] Querying relays', {
pubkey,
relayCount: relayUrls.length,
relays: relayUrls.slice(0, 5)
})
const startTime = Date.now()
- const events = await this.queryService.query(relayUrls, {
- authors: [pubkey],
- kinds: [kind]
- }, undefined, {
- replaceableRace: true,
- eoseTimeout: 100, // Reduced from 200ms for faster early returns
- globalTimeout: 2000 // Reduced from 3000ms to prevent long waits when many relays are slow
- })
+ const events = await this.queryService.query(
+ relayUrls,
+ {
+ authors: [pubkey],
+ kinds: [kind]
+ },
+ undefined,
+ {
+ replaceableRace: true,
+ eoseTimeout: METADATA_BATCH_QUERY_EOSE_TIMEOUT_MS,
+ globalTimeout: METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS
+ }
+ )
const queryTime = Date.now() - startTime
- logger.info('[ReplaceableEventService] Query completed', {
+ logger.debug('[ReplaceableEventService] Query completed', {
pubkey,
eventCount: events.length,
queryTime: `${queryTime}ms`
@@ -173,7 +184,7 @@ export class ReplaceableEventService {
event = sortedEvents.length > 0 ? sortedEvents[0] : undefined
} else {
// Use DataLoader for batching (IndexedDB checks and network fetches are batched)
- logger.info('[ReplaceableEventService] Using DataLoader (batches IndexedDB + network)', {
+ logger.debug('[ReplaceableEventService] Using DataLoader (batches IndexedDB + network)', {
pubkey,
kind,
d
@@ -183,7 +194,7 @@ export class ReplaceableEventService {
? await this.replaceableEventDataLoader.load({ pubkey, kind, d })
: await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind })
const loadTime = Date.now() - startTime
- logger.info('[ReplaceableEventService] DataLoader completed', {
+ logger.debug('[ReplaceableEventService] DataLoader completed', {
pubkey,
found: !!loadedEvent,
loadTime: `${loadTime}ms`
@@ -192,7 +203,7 @@ export class ReplaceableEventService {
}
if (event) {
- logger.info('[ReplaceableEventService] Event found', {
+ logger.debug('[ReplaceableEventService] Event found', {
pubkey,
kind,
eventId: event.id,
@@ -225,7 +236,7 @@ export class ReplaceableEventService {
}
}
- logger.info('[ReplaceableEventService] fetchReplaceableEvent returning undefined', {
+ logger.debug('[ReplaceableEventService] fetchReplaceableEvent returning undefined', {
pubkey,
kind
})
@@ -316,7 +327,7 @@ export class ReplaceableEventService {
): Promise<(NEvent | null)[]> {
// CRITICAL: Reduce logging during rapid scrolling - only log large batches
if (params.length > 50) {
- logger.info('[ReplaceableEventService] Large batch load function called', {
+ logger.debug('[ReplaceableEventService] Large batch load function called', {
paramCount: params.length,
kind: params[0]?.kind
})
@@ -399,7 +410,7 @@ export class ReplaceableEventService {
// Only log at info level for large batches
if (missingParams.length > 50) {
- logger.info('[ReplaceableEventService] Fetching missing events from network', {
+ logger.debug('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
totalCount: params.length
})
@@ -427,14 +438,38 @@ export class ReplaceableEventService {
// For each pubkey, build comprehensive relay list
// CRITICAL FIX: For batch fetches, use default relays instead of fetching relay lists for each author
// Fetching relay lists for hundreds of authors causes infinite loops and browser crashes
- // Use PROFILE_FETCH_RELAY_URLS + FAST_READ_RELAY_URLS for profiles, or FAST_READ_RELAY_URLS for other kinds
- const relayUrls = kind === kinds.Metadata
- ? Array.from(new Set([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]))
- : [...FAST_READ_RELAY_URLS]
+ // Use PROFILE_FETCH_RELAY_URLS + FAST_READ_RELAY_URLS for profiles, or FAST_READ_RELAY_URLS for other kinds.
+ // For metadata with a logged-in user, merge defaults with {@link buildComprehensiveRelayList}: inboxes (read),
+ // local/cache relays (10432), favorite relays (10012), plus profile + fast read — same idea as favorites feed
+ // / inbox-scoped discovery without per-author relay list fetches.
+ let relayUrls: string[]
+ if (kind === kinds.Metadata) {
+ const userPk = client.pubkey
+ if (userPk) {
+ try {
+ relayUrls = await buildComprehensiveRelayList({
+ userPubkey: userPk,
+ includeUserOwnRelays: false,
+ includeProfileFetchRelays: true,
+ includeFastReadRelays: true,
+ includeFavoriteRelays: true,
+ includeLocalRelays: true,
+ includeFastWriteRelays: false,
+ includeSearchableRelays: false
+ })
+ } catch {
+ relayUrls = Array.from(new Set([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]))
+ }
+ } else {
+ relayUrls = Array.from(new Set([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]))
+ }
+ } else {
+ relayUrls = [...FAST_READ_RELAY_URLS]
+ }
// Only log at info level for large batches
if (pubkeys.length > 50) {
- logger.info('[ReplaceableEventService] Starting query for large batch', {
+ logger.debug('[ReplaceableEventService] Starting query for large batch', {
kind,
pubkeyCount: pubkeys.length,
relayCount: relayUrls.length
@@ -446,17 +481,23 @@ export class ReplaceableEventService {
relayCount: relayUrls.length
})
}
- const events = await this.queryService.query(relayUrls, {
- authors: pubkeys,
- kinds: [kind]
- }, undefined, {
- replaceableRace: true,
- eoseTimeout: 100, // Reduced from 200ms for faster early returns
- globalTimeout: 2000 // Reduced from 3000ms to prevent long waits when many relays are slow
- })
+ const isMetadataBatch = kind === kinds.Metadata
+ const events = await this.queryService.query(
+ relayUrls,
+ {
+ authors: pubkeys,
+ kinds: [kind]
+ },
+ undefined,
+ {
+ replaceableRace: true,
+ eoseTimeout: isMetadataBatch ? METADATA_BATCH_QUERY_EOSE_TIMEOUT_MS : 100,
+ globalTimeout: isMetadataBatch ? METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS : 2000
+ }
+ )
// Only log at info level for large batches or if many events found
if (pubkeys.length > 50 || events.length > 100) {
- logger.info('[ReplaceableEventService] Query completed for batch', {
+ logger.debug('[ReplaceableEventService] Query completed for batch', {
kind,
pubkeyCount: pubkeys.length,
eventCount: events.length
@@ -488,7 +529,7 @@ export class ReplaceableEventService {
}
// Convert back to array, but limit to reasonable size
const limitedEvents = Array.from(eventsByPubkey.values()).slice(0, 500)
- logger.info('[ReplaceableEventService] Limited batch size', {
+ logger.debug('[ReplaceableEventService] Limited batch size', {
originalCount: events.length,
limitedCount: limitedEvents.length
})
@@ -550,7 +591,7 @@ export class ReplaceableEventService {
// Only log at info level for large batches
if (params.length > 50) {
- logger.info('[ReplaceableEventService] Batch load function completed', {
+ logger.debug('[ReplaceableEventService] Batch load function completed', {
paramCount: params.length,
foundCount: results.filter(r => r !== null).length,
indexedDbCount: params.length - missingParams.length,
@@ -645,17 +686,17 @@ export class ReplaceableEventService {
* Fetch profile event by id (hex, npub, nprofile)
*/
async fetchProfileEvent(id: string, _skipCache: boolean = false): Promise {
- logger.info('[ReplaceableEventService] fetchProfileEvent start', { id })
+ logger.debug('[ReplaceableEventService] fetchProfileEvent start', { id })
let pubkey: string | undefined
let relays: string[] = []
if (/^[0-9a-f]{64}$/.test(id)) {
pubkey = id
- logger.info('[ReplaceableEventService] ID is hex pubkey', { pubkey })
+ logger.debug('[ReplaceableEventService] ID is hex pubkey', { pubkey })
} else {
try {
const { data, type } = nip19.decode(id)
- logger.info('[ReplaceableEventService] Decoded bech32 ID', { type })
+ logger.debug('[ReplaceableEventService] Decoded bech32 ID', { type })
switch (type) {
case 'npub':
pubkey = data
@@ -663,7 +704,7 @@ export class ReplaceableEventService {
case 'nprofile':
pubkey = data.pubkey
if (data.relays) relays = data.relays
- logger.info('[ReplaceableEventService] nprofile has relay hints', { relayCount: relays.length })
+ logger.debug('[ReplaceableEventService] nprofile has relay hints', { relayCount: relays.length })
break
}
} catch (error) {
@@ -687,7 +728,7 @@ export class ReplaceableEventService {
// CRITICAL: Do NOT pass relay hints here - passing any relays bypasses DataLoader and creates individual subscriptions
// DataLoader already uses default relays internally and batches all profile fetches
// We'll use relay hints in Step 2/3 only if Step 1 fails
- logger.info('[ReplaceableEventService] Step 1: Trying with DataLoader (checks cache first, uses default relays, batched)', {
+ logger.debug('[ReplaceableEventService] Step 1: Trying with DataLoader (checks cache first, uses default relays, batched)', {
pubkey,
relayHintCount: relayHints.length,
hasRelayHints: relayHints.length > 0
@@ -698,7 +739,7 @@ export class ReplaceableEventService {
const profileEvent = await this.fetchReplaceableEvent(pubkey, kinds.Metadata, undefined, [])
if (profileEvent) {
- logger.info('[ReplaceableEventService] Profile found with relay hints + default relays', {
+ logger.debug('[ReplaceableEventService] Profile found with relay hints + default relays', {
pubkey,
eventId: profileEvent.id
})
@@ -710,7 +751,7 @@ export class ReplaceableEventService {
// This prevents creating many individual subscriptions when profiles aren't found
// If we have relay hints, it's worth trying author relays. Otherwise, Step 1 should be sufficient.
if (relayHints.length > 0) {
- logger.info('[ReplaceableEventService] Step 2: Profile not found, but we have relay hints - fetching author relay list as fallback', {
+ logger.debug('[ReplaceableEventService] Step 2: Profile not found, but we have relay hints - fetching author relay list as fallback', {
pubkey,
relayHintCount: relayHints.length
})
@@ -730,7 +771,7 @@ export class ReplaceableEventService {
})
authorRelayList = await Promise.race([relayListPromise, timeoutPromise])
const relayListTime = Date.now() - relayListStartTime
- logger.info('[ReplaceableEventService] Author relay list fetched', {
+ logger.debug('[ReplaceableEventService] Author relay list fetched', {
pubkey,
hasRelayList: !!authorRelayList,
fetchTime: `${relayListTime}ms`
@@ -757,7 +798,7 @@ export class ReplaceableEventService {
...FAST_READ_RELAY_URLS // Fast read relays
])]
- logger.info('[ReplaceableEventService] Step 3: Trying with relay hints + author relays', {
+ logger.debug('[ReplaceableEventService] Step 3: Trying with relay hints + author relays', {
pubkey,
relayHintCount: relayHints.length,
authorRelayCount: authorRelays.length,
@@ -773,7 +814,7 @@ export class ReplaceableEventService {
)
if (profileEventFromAuthorRelays) {
- logger.info('[ReplaceableEventService] Profile found with relay hints + author relays', {
+ logger.debug('[ReplaceableEventService] Profile found with relay hints + author relays', {
pubkey,
eventId: profileEventFromAuthorRelays.id
})
@@ -795,7 +836,7 @@ export class ReplaceableEventService {
// when user explicitly navigates to a profile page. For feed rendering, missing profiles are acceptable.
// Only run comprehensive search if we have relay hints (suggesting user intent to find this specific profile)
if (relayHints.length > 0) {
- logger.info('[ReplaceableEventService] Step 3: Profile not found, trying comprehensive relay list (all available relays)', {
+ logger.debug('[ReplaceableEventService] Step 3: Profile not found, trying comprehensive relay list (all available relays)', {
pubkey,
hasRelayHints: relayHints.length > 0
})
@@ -815,7 +856,7 @@ export class ReplaceableEventService {
includeLocalRelays: true // Include local/cache relays
})
- logger.info('[ReplaceableEventService] Comprehensive relay list built', {
+ logger.debug('[ReplaceableEventService] Comprehensive relay list built', {
pubkey,
relayCount: comprehensiveRelays.length,
relays: comprehensiveRelays.slice(0, 10) // Log first 10 for debugging
@@ -834,7 +875,7 @@ export class ReplaceableEventService {
})
const queryTime = Date.now() - startTime
- logger.info('[ReplaceableEventService] Comprehensive search completed', {
+ logger.debug('[ReplaceableEventService] Comprehensive search completed', {
pubkey,
eventCount: events.length,
queryTime: `${queryTime}ms`,
@@ -844,7 +885,7 @@ export class ReplaceableEventService {
if (events.length > 0) {
const sortedEvents = events.sort((a, b) => b.created_at - a.created_at)
const profileEvent = sortedEvents[0]
- logger.info('[ReplaceableEventService] Profile found via comprehensive search', {
+ logger.debug('[ReplaceableEventService] Profile found via comprehensive search', {
pubkey,
eventId: profileEvent.id
})
diff --git a/src/services/client.service.ts b/src/services/client.service.ts
index e2f132a5..56e9be9a 100644
--- a/src/services/client.service.ts
+++ b/src/services/client.service.ts
@@ -1,4 +1,14 @@
-import { FAST_READ_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, KIND_1_BLOCKED_RELAY_URLS, NIP66_DISCOVERY_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, READ_ONLY_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants'
+import {
+ FAST_READ_RELAY_URLS,
+ ExtendedKind,
+ FAST_WRITE_RELAY_URLS,
+ FIRST_RELAY_RESULT_GRACE_MS,
+ KIND_1_BLOCKED_RELAY_URLS,
+ NIP66_DISCOVERY_RELAY_URLS,
+ PROFILE_FETCH_RELAY_URLS,
+ READ_ONLY_RELAY_URLS,
+ SEARCHABLE_RELAY_URLS
+} from '@/constants'
/** NIP-01 filter keys only; NIP-50 adds `search` which non-searchable relays reject. */
function filterForRelay(f: Filter, relaySupportsSearch: boolean): Filter {
@@ -1004,6 +1014,15 @@ class ClientService extends EventTarget {
return { url, filters: filtersForRelay }
})
+ // Kind-1 queries drop KIND_1_BLOCKED_RELAY_URLS; if every URL was removed, no subs run and
+ // oneose would never fire — timelines stay loading forever (e.g. favorites feed).
+ if (groupedRequests.length === 0) {
+ queueMicrotask(() => oneose?.(true))
+ return {
+ close: () => {}
+ }
+ }
+
const eosesReceived: boolean[] = []
const closesReceived: (string | undefined)[] = []
const handleEose = (i: number) => {
@@ -1243,6 +1262,7 @@ class ClientService extends EventTarget {
const PROGRESSIVE_INTERVAL_MS = 100 // Poll for more events while relays are still streaming
const MIN_NEW_EVENTS_AFTER_FIRST = 5 // After first paint, batch updates to limit re-renders
let progressiveIntervalId: ReturnType | null = null
+ let firstRelayResultGraceTimer: ReturnType | null = null
const deliverProgressive = () => {
if (eosedAt || events.length === 0) return
const sortedEvents = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
@@ -1261,6 +1281,88 @@ class ClientService extends EventTarget {
onEvents(needSort && useCache ? snap.concat(cachedEvents).slice(0, filter.limit) : snap, false)
}
}
+
+ const handleTimelineEose = (eosed: boolean) => {
+ if (eosed && eosedAt != null) return
+
+ if (eosed && !eosedAt) {
+ if (firstRelayResultGraceTimer != null) {
+ clearTimeout(firstRelayResultGraceTimer)
+ firstRelayResultGraceTimer = null
+ }
+ eosedAt = dayjs().unix()
+ if (progressiveIntervalId) {
+ clearInterval(progressiveIntervalId)
+ progressiveIntervalId = null
+ }
+ }
+ // (algo feeds) no need to sort and cache
+ if (!needSort) {
+ return onEvents([...events], !!eosedAt)
+ }
+ if (!eosed) {
+ events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
+ // Only include cached events if caching is enabled
+ return onEvents([...(useCache ? events.concat(cachedEvents).slice(0, filter.limit) : events)], false)
+ }
+
+ events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
+
+ // Only update timeline cache if caching is enabled
+ if (useCache) {
+ const timeline = that.timelines[key]
+ // no cache yet
+ if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
+ that.timelines[key] = {
+ refs: events.map((evt) => [evt.id, evt.created_at]),
+ filter,
+ urls
+ }
+ return onEvents([...events], true)
+ }
+
+ // Prevent concurrent requests from duplicating the same event
+ const firstRefCreatedAt = timeline.refs[0][1]
+ const newRefs = events
+ .filter((evt) => evt.created_at > firstRefCreatedAt)
+ .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
+
+ if (events.length >= filter.limit) {
+ // if new refs are more than limit, means old refs are too old, replace them
+ timeline.refs = newRefs
+ onEvents([...events], true)
+ } else {
+ // merge new refs with old refs
+ timeline.refs = newRefs.concat(timeline.refs)
+ onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
+ }
+ } else {
+ // No caching for initial load, but still need to initialize timeline.refs for loadMoreTimeline pagination
+ const timeline = that.timelines[key]
+ if (!timeline || Array.isArray(timeline)) {
+ // Initialize timeline with refs for pagination (even though we don't use cache for initial load)
+ that.timelines[key] = {
+ refs: events.map((evt) => [evt.id, evt.created_at]),
+ filter,
+ urls
+ }
+ } else {
+ // Update refs with new events for pagination tracking
+ const firstRefCreatedAt = timeline.refs.length > 0 ? timeline.refs[0][1] : dayjs().unix()
+ const newRefs = events
+ .filter((evt) => evt.created_at > firstRefCreatedAt)
+ .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
+ if (events.length >= filter.limit) {
+ timeline.refs = newRefs
+ } else {
+ timeline.refs = newRefs.concat(timeline.refs)
+ }
+ }
+ // Return events directly (no cache concatenation)
+ onEvents([...events], true)
+ }
+ }
+
const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, {
startLogin,
onevent: (evt: NEvent) => {
@@ -1268,6 +1370,12 @@ class ClientService extends EventTarget {
// not eosed yet, push to events
if (!eosedAt) {
events.push(evt)
+ if (firstRelayResultGraceTimer == null) {
+ firstRelayResultGraceTimer = setTimeout(() => {
+ firstRelayResultGraceTimer = null
+ handleTimelineEose(true)
+ }, FIRST_RELAY_RESULT_GRACE_MS)
+ }
// Deliver as soon as we have any event while waiting for EOSE (then batch further updates)
if (needSort && events.length >= 1 && !initialBatchScheduled) {
initialBatchScheduled = true
@@ -1313,86 +1421,17 @@ class ClientService extends EventTarget {
// insert the event to the right position
timeline.refs.splice(idx, 0, [evt.id, evt.created_at])
},
- oneose: (eosed) => {
- if (eosed && !eosedAt) {
- eosedAt = dayjs().unix()
- if (progressiveIntervalId) {
- clearInterval(progressiveIntervalId)
- progressiveIntervalId = null
- }
- }
- // (algo feeds) no need to sort and cache
- if (!needSort) {
- return onEvents([...events], !!eosedAt)
- }
- if (!eosed) {
- events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
- // Only include cached events if caching is enabled
- return onEvents([...(useCache ? events.concat(cachedEvents).slice(0, filter.limit) : events)], false)
- }
-
- events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
-
- // Only update timeline cache if caching is enabled
- if (useCache) {
- const timeline = that.timelines[key]
- // no cache yet
- if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
- that.timelines[key] = {
- refs: events.map((evt) => [evt.id, evt.created_at]),
- filter,
- urls
- }
- return onEvents([...events], true)
- }
-
- // Prevent concurrent requests from duplicating the same event
- const firstRefCreatedAt = timeline.refs[0][1]
- const newRefs = events
- .filter((evt) => evt.created_at > firstRefCreatedAt)
- .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
-
- if (events.length >= filter.limit) {
- // if new refs are more than limit, means old refs are too old, replace them
- timeline.refs = newRefs
- onEvents([...events], true)
- } else {
- // merge new refs with old refs
- timeline.refs = newRefs.concat(timeline.refs)
- onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
- }
- } else {
- // No caching for initial load, but still need to initialize timeline.refs for loadMoreTimeline pagination
- const timeline = that.timelines[key]
- if (!timeline || Array.isArray(timeline)) {
- // Initialize timeline with refs for pagination (even though we don't use cache for initial load)
- that.timelines[key] = {
- refs: events.map((evt) => [evt.id, evt.created_at]),
- filter,
- urls
- }
- } else {
- // Update refs with new events for pagination tracking
- const firstRefCreatedAt = timeline.refs.length > 0 ? timeline.refs[0][1] : dayjs().unix()
- const newRefs = events
- .filter((evt) => evt.created_at > firstRefCreatedAt)
- .map((evt) => [evt.id, evt.created_at] as TTimelineRef)
- if (events.length >= filter.limit) {
- timeline.refs = newRefs
- } else {
- timeline.refs = newRefs.concat(timeline.refs)
- }
- }
- // Return events directly (no cache concatenation)
- onEvents([...events], true)
- }
- },
+ oneose: handleTimelineEose,
onclose: onClose
})
return {
timelineKey: key,
closer: () => {
+ if (firstRelayResultGraceTimer != null) {
+ clearTimeout(firstRelayResultGraceTimer)
+ firstRelayResultGraceTimer = null
+ }
if (progressiveIntervalId) {
clearInterval(progressiveIntervalId)
progressiveIntervalId = null
@@ -1529,6 +1568,7 @@ class ClientService extends EventTarget {
replaceableRace?: boolean
/** For non-replaceable single events: return immediately on first match */
immediateReturn?: boolean
+ firstRelayResultGraceMs?: number | false
}
) {
return this.queryService.query(urls, filter, onevent, options)
@@ -1543,12 +1583,18 @@ class ClientService extends EventTarget {
onevent,
cache = false,
eoseTimeout,
- globalTimeout
+ globalTimeout,
+ firstRelayResultGraceMs,
+ replaceableRace,
+ immediateReturn
}: {
onevent?: (evt: NEvent) => void
cache?: boolean
eoseTimeout?: number
globalTimeout?: number
+ firstRelayResultGraceMs?: number | false
+ replaceableRace?: boolean
+ immediateReturn?: boolean
} = {}
) {
let relays = Array.from(new Set(urls))
@@ -1559,12 +1605,13 @@ class ClientService extends EventTarget {
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
relays = relays.filter((url) => !kind1BlockedSet.has(normalizeUrl(url) || url))
}
- const events = await this.queryService.query(
- relays,
- filter,
- onevent,
- { eoseTimeout, globalTimeout }
- )
+ const events = await this.queryService.query(relays, filter, onevent, {
+ eoseTimeout,
+ globalTimeout,
+ firstRelayResultGraceMs,
+ replaceableRace,
+ immediateReturn
+ })
if (cache) {
events.forEach((evt) => {
this.addEventToCache(evt)
diff --git a/src/services/navigation.service.ts b/src/services/navigation.service.ts
index 7a747529..33eb6262 100644
--- a/src/services/navigation.service.ts
+++ b/src/services/navigation.service.ts
@@ -5,7 +5,7 @@
* Handles all navigation logic in a clean, testable way.
*/
-import React, { ReactNode } from 'react'
+import React, { lazy, ReactNode, Suspense } from 'react'
// Page components
import SettingsPage from '@/pages/secondary/SettingsPage'
@@ -21,7 +21,14 @@ import FollowingListPage from '@/pages/secondary/FollowingListPage'
import MuteListPage from '@/pages/secondary/MuteListPage'
import OthersRelaySettingsPage from '@/pages/secondary/OthersRelaySettingsPage'
import SecondaryRelayPage from '@/pages/secondary/RelayPage'
-import SecondaryNoteListPage from '@/pages/secondary/NoteListPage'
+/** Lazy avoids: NavigationService → NoteListPage → NormalFeed → NoteList → PageManager → navigation.service */
+const SecondaryNoteListPageLazy = lazy(() => import('@/pages/secondary/NoteListPage'))
+
+const navLazyFallback = React.createElement(
+ 'div',
+ { className: 'flex flex-1 items-center justify-center p-8 text-sm text-muted-foreground' },
+ 'Loading…'
+)
export type ViewType = 'note' | 'settings' | 'settings-sub' | 'profile' | 'hashtag' | 'relay' | 'following' | 'mute' | 'others-relay-settings' | null
@@ -87,7 +94,11 @@ export class ComponentFactory {
}
static createHashtagPage(): ReactNode {
- return React.createElement(SecondaryNoteListPage, { hideTitlebar: true })
+ return React.createElement(
+ Suspense,
+ { fallback: navLazyFallback },
+ React.createElement(SecondaryNoteListPageLazy, { hideTitlebar: true })
+ )
}
static createFollowingListPage(profileId: string): ReactNode {