Browse Source

bug-fix

imwald
Silberengel 1 month ago
parent
commit
1f1f4bddbb
  1. 2
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  2. 481
      src/components/NoteList/index.tsx
  3. 100
      src/components/NotificationList/index.tsx
  4. 3
      src/components/QuoteList/index.tsx
  5. 103
      src/hooks/useFetchProfile.tsx
  6. 2
      src/hooks/useProfileNotesTimeline.tsx
  7. 2
      src/hooks/useProfileTimeline.tsx
  8. 17
      src/pages/primary/DiscussionsPage/index.tsx
  9. 6
      src/pages/primary/SpellsPage/index.tsx
  10. 26
      src/providers/NotificationProvider.tsx
  11. 121
      src/services/client-replaceable-events.service.ts
  12. 81
      src/services/client.service.ts

2
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -537,7 +537,7 @@ export default function PublicationIndex({ @@ -537,7 +537,7 @@ export default function PublicationIndex({
},
onNew: () => {} // Not needed for one-time fetch
},
{ needSort: false }
{ needSort: false, useCache: false } // NO CACHING - stream raw from relays
)
// Wait for up to 10 seconds for events to arrive or eosed

481
src/components/NoteList/index.tsx

@ -2,6 +2,7 @@ import NewNotesButton from '@/components/NewNotesButton' @@ -2,6 +2,7 @@ import NewNotesButton from '@/components/NewNotesButton'
import { Button } from '@/components/ui/button'
import { ExtendedKind } from '@/constants'
import {
getEmbeddedNoteBech32Ids,
getReplaceableCoordinateFromEvent,
isMentioningMutedUsers,
isReplaceableEvent,
@ -17,6 +18,7 @@ import { useNostr } from '@/providers/NostrProvider' @@ -17,6 +18,7 @@ import { useNostr } from '@/providers/NostrProvider'
import { useUserTrust } from '@/providers/UserTrustProvider'
import { useZap } from '@/providers/ZapProvider'
import client from '@/services/client.service'
import logger from '@/lib/logger'
import { TFeedSubRequest } from '@/types'
import dayjs from 'dayjs'
import { Event, kinds } from 'nostr-tools'
@ -35,9 +37,9 @@ import PullToRefresh from 'react-simple-pull-to-refresh' @@ -35,9 +37,9 @@ import PullToRefresh from 'react-simple-pull-to-refresh'
import { toast } from 'sonner'
import NoteCard, { NoteCardLoadingSkeleton } from '../NoteCard'
const LIMIT = 200
const ALGO_LIMIT = 500
const SHOW_COUNT = 10
const LIMIT = 500 // Increased from 200 to load more events per request
const ALGO_LIMIT = 1000 // Increased from 500 for algorithm feeds
const SHOW_COUNT = 50 // Increased from 10 to show more events at once, reducing scroll load frequency
const NoteList = forwardRef(
(
@ -88,6 +90,8 @@ const NoteList = forwardRef( @@ -88,6 +90,8 @@ const NoteList = forwardRef(
const supportTouch = useMemo(() => isTouchDevice(), [])
const bottomRef = useRef<HTMLDivElement | null>(null)
const topRef = useRef<HTMLDivElement | null>(null)
const consecutiveEmptyRef = useRef(0) // Track consecutive empty results to prevent infinite retries
const loadMoreTimeoutRef = useRef<NodeJS.Timeout | null>(null) // Throttle loadMore calls to prevent stuttering
// Memoize subRequests serialization to avoid expensive JSON.stringify on every render
const subRequestsKey = useMemo(() => {
@ -212,6 +216,7 @@ const NoteList = forwardRef( @@ -212,6 +216,7 @@ const NoteList = forwardRef(
setEvents([])
setNewEvents([])
setHasMore(true)
consecutiveEmptyRef.current = 0 // Reset counter on refresh
if (showKinds.length === 0) {
setLoading(false)
@ -234,13 +239,57 @@ const NoteList = forwardRef( @@ -234,13 +239,57 @@ const NoteList = forwardRef(
onEvents: (events, eosed) => {
if (events.length > 0) {
setEvents(events)
// CRITICAL: Prefetch profiles for initial events (reduced batch size for faster initial load)
// This ensures profiles are ready before user starts scrolling
// Reduced from 300 to 150 to reduce initial load time
const initialPubkeys = Array.from(
new Set(events.slice(0, 150).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (initialPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = initialPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking)
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}
}
// CRITICAL: Prefetch embedded events for initial events
// Extract embedded event IDs from initial events
const initialEmbeddedEventIds = new Set<string>()
events.slice(0, 150).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => initialEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(initialEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking)
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
}
}
if (areAlgoRelays) {
setHasMore(false)
}
if (eosed) {
setLoading(false)
setHasMore(events.length > 0)
// CRITICAL FIX: Always set hasMore to true on eosed, even if we have few events
// The initial load might only return a few events due to filtering or relay limits
// We should still try to load more on scroll - the loadMore logic will handle stopping
// Only set to false if we explicitly know there are no more events (handled in loadMore)
setHasMore(true)
}
},
onNew: (event) => {
@ -297,7 +346,8 @@ const NoteList = forwardRef( @@ -297,7 +346,8 @@ const NoteList = forwardRef(
},
{
startLogin,
needSort: !areAlgoRelays
needSort: !areAlgoRelays,
useCache: false // Main feeds should always fetch fresh from relays, not use cache
}
)
setTimelineKey(timelineKey)
@ -310,39 +360,199 @@ const NoteList = forwardRef( @@ -310,39 +360,199 @@ const NoteList = forwardRef(
}
}, [subRequestsKey, refreshCount, showKinds, showKind1OPs, showKind1Replies, showKind1111, useFilterAsIs])
// Use refs to avoid dependency issues and ensure latest values in async callbacks
const eventsRef = useRef(events)
const showCountRef = useRef(showCount)
const loadingRef = useRef(loading)
const hasMoreRef = useRef(hasMore)
const timelineKeyRef = useRef(timelineKey)
useEffect(() => {
eventsRef.current = events
}, [events])
useEffect(() => {
showCountRef.current = showCount
}, [showCount])
useEffect(() => {
const options = {
loadingRef.current = loading
}, [loading])
useEffect(() => {
hasMoreRef.current = hasMore
}, [hasMore])
useEffect(() => {
timelineKeyRef.current = timelineKey
}, [timelineKey])
useEffect(() => {
const options: IntersectionObserverInit = {
root: null,
rootMargin: '10px',
threshold: 0.1
}
const loadMore = async () => {
if (showCount < events.length) {
setShowCount((prev) => prev + SHOW_COUNT)
// preload more
if (events.length - showCount > LIMIT / 2) {
const loadMore = async (): Promise<void> => {
const currentEvents = eventsRef.current
const currentShowCount = showCountRef.current
const currentLoading = loadingRef.current
const currentHasMore = hasMoreRef.current
const currentTimelineKey = timelineKeyRef.current
// CRITICAL: Throttle loadMore calls to prevent stuttering during rapid scrolling
if (loadMoreTimeoutRef.current) {
return // Already scheduled, skip
}
// Show more events immediately if we have them cached
if (currentShowCount < currentEvents.length) {
// Show more aggressively: increase by SHOW_COUNT, but also check if we should show even more
const remaining = currentEvents.length - currentShowCount
const increment = Math.min(SHOW_COUNT * 2, remaining) // Show up to 2x SHOW_COUNT if available
setShowCount((prev) => prev + increment)
// Only preload more if we have plenty cached (more than 3/4 of LIMIT)
// BUT: Always try to load more if we have very few events (might be due to filtering)
if (currentEvents.length - currentShowCount > LIMIT * 0.75 && currentEvents.length >= 50) {
return
}
// If we have very few events, always try to load more (might be aggressive filtering)
if (currentEvents.length < 50) {
// Continue to loadMore below even if we have cached events
// This ensures we keep loading when filtering is aggressive
}
}
if (!timelineKey || loading || !hasMore) return
if (!currentTimelineKey || currentLoading || !currentHasMore) return
// Schedule loadMore with a small delay to throttle rapid calls
loadMoreTimeoutRef.current = setTimeout(async () => {
loadMoreTimeoutRef.current = null
const latestEvents = eventsRef.current
const latestTimelineKey = timelineKeyRef.current
const latestLoading = loadingRef.current
const latestHasMore = hasMoreRef.current
if (!latestTimelineKey || latestLoading || !latestHasMore) return
setLoading(true)
const newEvents = await client.loadMoreTimeline(
timelineKey,
events.length ? events[events.length - 1].created_at - 1 : dayjs().unix(),
let newEvents: Event[] = []
try {
const until = latestEvents.length ? latestEvents[latestEvents.length - 1].created_at - 1 : dayjs().unix()
newEvents = await client.loadMoreTimeline(
latestTimelineKey,
until,
LIMIT
)
setLoading(false)
// CRITICAL FIX: Be extremely conservative about stopping the feed
// Only stop if we're absolutely certain there are no more events
if (newEvents.length === 0) {
// Check if timeline has more cached refs that we haven't loaded yet
const hasMoreCached = client.hasMoreTimelineEvents?.(latestTimelineKey, until) ?? false
if (hasMoreCached) {
// There are more cached events, keep hasMore true and try again
setLoading(false)
// Retry after a short delay to allow IndexedDB to catch up
setTimeout(() => {
if (hasMoreRef.current && !loadingRef.current) {
loadMore()
}
}, 300)
return
}
// No cached events and network returned empty
// Be VERY patient - don't stop too early, especially when we have few events
// This prevents stopping due to temporary relay issues or slow relays
consecutiveEmptyRef.current += 1
// CRITICAL FIX: Only stop if we have MANY consecutive empty results
// This ensures we don't stop prematurely when relays are slow or filtering is aggressive
// Even with few visible events, we might have many events that are filtered out
if (consecutiveEmptyRef.current >= 20) {
// After 20 consecutive empty results, assume we've reached the end
// Increased from 10 to 20 to be even more patient with slow relays
setHasMore(false)
}
// Otherwise, keep hasMore true to allow retry on next scroll
// This ensures the feed continues trying even if relays are slow
setLoading(false)
return
}
// Reset consecutive empty counter on success
consecutiveEmptyRef.current = 0
setEvents((oldEvents) => [...oldEvents, ...newEvents])
// NEVER automatically set hasMore to false based on result count
// Only stop when we get consecutive empty results
// This ensures the feed continues loading even with partial results
// CRITICAL: Prefetch profiles for newly loaded events (throttled to reduce frequency)
// This ensures profiles are ready before user scrolls to them
if (newEvents.length > 0) {
// Throttle profile prefetching for newly loaded events to reduce network load
setTimeout(() => {
const newPubkeys = Array.from(
new Set(newEvents.map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const pubkeysToFetch = newPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (pubkeysToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking)
client.fetchProfilesForPubkeys(pubkeysToFetch).catch(() => {
// On error, remove from prefetched set so we can retry later
pubkeysToFetch.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events
const newEmbeddedEventIds = new Set<string>()
newEvents.forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => newEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(newEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking)
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
}
}, 100) // Small delay to batch with other profile fetches
}
} catch (error) {
// On error, don't set hasMore to false - might be temporary network issue
logger.error('[NoteList] Error loading more events', { error })
consecutiveEmptyRef.current += 1
// Only stop after MANY consecutive errors - be very patient with network issues
// This prevents stopping when relays are temporarily down or slow
if (consecutiveEmptyRef.current >= 25) {
// Increased from 15 to 25 to be even more patient with network issues
setHasMore(false)
}
} finally {
setLoading(false)
}
}, 50) // Reduced delay from 100ms to 50ms for more responsive scrolling
}
const observerInstance = new IntersectionObserver((entries) => {
if (entries[0].isIntersecting && hasMore) {
if (entries[0].isIntersecting && hasMoreRef.current && !loadingRef.current) {
// Throttle: only trigger if not already loading and not already scheduled
loadMore()
}
}, options)
@ -357,22 +567,241 @@ const NoteList = forwardRef( @@ -357,22 +567,241 @@ const NoteList = forwardRef(
if (observerInstance && currentBottomRef) {
observerInstance.unobserve(currentBottomRef)
}
// Clean up timeout on unmount
if (loadMoreTimeoutRef.current) {
clearTimeout(loadMoreTimeoutRef.current)
loadMoreTimeoutRef.current = null
}
}, [loading, hasMore, events, showCount, timelineKey])
}
// Dependencies are handled via refs to avoid stale closures in async callbacks
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
// Prefetch profiles for visible authors in one batched request (IndexedDB + one relay request)
// CRITICAL: Prefetch profiles for visible authors + upcoming events in one batched request
// This prevents browser crashes during rapid scrolling by pre-loading profiles before they're needed
const visiblePubkeysRef = useRef<Set<string>>(new Set())
const prefetchedPubkeysRef = useRef<Set<string>>(new Set())
const prefetchTimeoutRef = useRef<NodeJS.Timeout | null>(null)
// CRITICAL: Prefetch embedded events (referenced in e tags, a tags, and content)
// This ensures embedded events are ready before user scrolls to them
const prefetchedEventIdsRef = useRef<Set<string>>(new Set())
const prefetchEmbeddedEventsTimeoutRef = useRef<NodeJS.Timeout | null>(null)
// Helper function to extract all embedded event IDs from an event
const extractEmbeddedEventIds = useCallback((evt: Event): string[] => {
const eventIds: string[] = []
// 1. Extract from 'e' tags (event references)
evt.tags
.filter((tag) => tag[0] === 'e' && tag[1] && tag[1].length === 64)
.forEach((tag) => {
const eventId = tag[1]
if (eventId && /^[0-9a-f]{64}$/.test(eventId)) {
eventIds.push(eventId)
}
})
// 2. Extract from 'a' tags (addressable events) - get event ID if present
evt.tags
.filter((tag) => tag[0] === 'a' && tag[3]) // tag[3] is the event ID for version tracking
.forEach((tag) => {
const eventId = tag[3]
if (eventId && /^[0-9a-f]{64}$/.test(eventId)) {
eventIds.push(eventId)
}
})
// 3. Extract from content (nostr: links)
// Note: getEmbeddedNoteBech32Ids returns hex IDs (despite the name)
const embeddedNoteIds = getEmbeddedNoteBech32Ids(evt)
embeddedNoteIds.forEach((id) => {
// The function already returns hex IDs, so use them directly
if (id && /^[0-9a-f]{64}$/.test(id)) {
eventIds.push(id)
}
})
return Array.from(new Set(eventIds)) // Deduplicate
}, [])
useEffect(() => {
const pubkeys = Array.from(
new Set(filteredEvents.slice(0, 80).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
// Throttle profile prefetching to reduce frequency during rapid scrolling
// Clear any existing timeout
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
}
// Debounce profile prefetching by 200ms to reduce frequency during rapid scrolling
prefetchTimeoutRef.current = setTimeout(() => {
// Prefetch profiles for:
// 1. Currently visible events (first 60, reduced from 80)
// 2. Upcoming events that will be visible when scrolling (next 150, reduced from 300)
// This ensures profiles are ready before they're needed during rapid scrolling
const visiblePubkeys = Array.from(
new Set(filteredEvents.slice(0, 60).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
const upcomingPubkeys = Array.from(
new Set(events.slice(0, 150).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (pubkeys.length === 0) return
// Combine visible and upcoming, but prioritize visible ones
const allPubkeys = Array.from(new Set([...visiblePubkeys, ...upcomingPubkeys]))
if (allPubkeys.length === 0) return
// Check if we've already prefetched these exact pubkeys
const prev = visiblePubkeysRef.current
const same = pubkeys.length === prev.size && pubkeys.every((p) => prev.has(p))
const same = allPubkeys.length === prev.size && allPubkeys.every((p) => prev.has(p))
if (same) return
visiblePubkeysRef.current = new Set(pubkeys)
client.fetchProfilesForPubkeys(pubkeys).catch(() => {})
}, [filteredEvents])
// Find pubkeys that haven't been prefetched yet
const newPubkeys = allPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length === 0) {
// All pubkeys already prefetched, just update the ref
visiblePubkeysRef.current = new Set(allPubkeys)
return
}
// Update refs
visiblePubkeysRef.current = new Set(allPubkeys)
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch profiles for new pubkeys (IndexedDB + network in one request)
// This is the key optimization: batch processing prevents individual fetches during scrolling
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}, 200) // Debounce by 200ms to reduce frequency
return () => {
if (prefetchTimeoutRef.current) {
clearTimeout(prefetchTimeoutRef.current)
prefetchTimeoutRef.current = null
}
}
}, [filteredEvents, events, extractEmbeddedEventIds])
// CRITICAL: Prefetch embedded events for visible events
useEffect(() => {
// Throttle embedded event prefetching to reduce frequency during rapid scrolling
// Clear any existing timeout
if (prefetchEmbeddedEventsTimeoutRef.current) {
clearTimeout(prefetchEmbeddedEventsTimeoutRef.current)
}
// Debounce embedded event prefetching by 300ms to reduce frequency during rapid scrolling
prefetchEmbeddedEventsTimeoutRef.current = setTimeout(() => {
// Extract embedded event IDs from visible events (first 60)
const visibleEmbeddedEventIds = new Set<string>()
filteredEvents.slice(0, 60).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => visibleEmbeddedEventIds.add(id))
})
// Also extract from upcoming events (next 150)
const upcomingEmbeddedEventIds = new Set<string>()
events.slice(0, 150).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => upcomingEmbeddedEventIds.add(id))
})
// Combine visible and upcoming
const allEmbeddedEventIds = Array.from(
new Set([...visibleEmbeddedEventIds, ...upcomingEmbeddedEventIds])
)
if (allEmbeddedEventIds.length === 0) return
// Filter out already prefetched event IDs
const eventIdsToFetch = allEmbeddedEventIds.filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length === 0) return
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking)
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
}, 300) // Debounce by 300ms to reduce frequency
return () => {
if (prefetchEmbeddedEventsTimeoutRef.current) {
clearTimeout(prefetchEmbeddedEventsTimeoutRef.current)
prefetchEmbeddedEventsTimeoutRef.current = null
}
}
}, [filteredEvents, events, extractEmbeddedEventIds])
// Also prefetch when loading more events (scrolling down)
// Throttled to reduce frequency during rapid scrolling
const prefetchNewEventsTimeoutRef = useRef<NodeJS.Timeout | null>(null)
useEffect(() => {
if (loading || !hasMore) return
// Clear any existing timeout
if (prefetchNewEventsTimeoutRef.current) {
clearTimeout(prefetchNewEventsTimeoutRef.current)
}
// Debounce profile prefetching for newly loaded events
prefetchNewEventsTimeoutRef.current = setTimeout(() => {
// When we have more events loaded, prefetch profiles for the newly loaded ones
// Reduced from 200 to 100 to reduce batch size
const newlyLoadedPubkeys = Array.from(
new Set(events.slice(showCount, showCount + 100).map((ev) => ev.pubkey).filter((p) => p?.length === 64))
)
if (newlyLoadedPubkeys.length > 0) {
// Filter out already prefetched pubkeys
const newPubkeys = newlyLoadedPubkeys.filter((p) => !prefetchedPubkeysRef.current.has(p))
if (newPubkeys.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.add(p))
// Batch fetch in background (non-blocking)
client.fetchProfilesForPubkeys(newPubkeys).catch(() => {
// On error, remove from prefetched set so we can retry later
newPubkeys.forEach((p) => prefetchedPubkeysRef.current.delete(p))
})
}
}
// CRITICAL: Prefetch embedded events for newly loaded events
const newlyLoadedEmbeddedEventIds = new Set<string>()
events.slice(showCount, showCount + 100).forEach((ev) => {
const embeddedIds = extractEmbeddedEventIds(ev)
embeddedIds.forEach((id) => newlyLoadedEmbeddedEventIds.add(id))
})
const eventIdsToFetch = Array.from(newlyLoadedEmbeddedEventIds).filter(
(id) => !prefetchedEventIdsRef.current.has(id)
)
if (eventIdsToFetch.length > 0) {
// Mark as prefetched immediately to prevent duplicate requests
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.add(id))
// Batch fetch embedded events in background (non-blocking)
Promise.all(eventIdsToFetch.map((id) => client.fetchEvent(id))).catch(() => {
// On error, remove from prefetched set so we can retry later
eventIdsToFetch.forEach((id) => prefetchedEventIdsRef.current.delete(id))
})
}
}, 300) // Debounce by 300ms to reduce frequency during rapid scrolling
return () => {
if (prefetchNewEventsTimeoutRef.current) {
clearTimeout(prefetchNewEventsTimeoutRef.current)
prefetchNewEventsTimeoutRef.current = null
}
}
}, [events.length, showCount, loading, hasMore])
const showNewEvents = () => {
setEvents((oldEvents) => [...newEvents, ...oldEvents])

100
src/components/NotificationList/index.tsx

@ -24,8 +24,8 @@ import PullToRefresh from 'react-simple-pull-to-refresh' @@ -24,8 +24,8 @@ import PullToRefresh from 'react-simple-pull-to-refresh'
import { NotificationItem } from './NotificationItem'
import { NotificationSkeleton } from './NotificationItem/Notification'
import { isTouchDevice } from '@/lib/utils'
const LIMIT = 100
const SHOW_COUNT = 30
const LIMIT = 500 // Increased from 100 to load more notifications per request
const SHOW_COUNT = 50 // Increased from 30 to show more notifications at once
const NotificationList = forwardRef(
(
@ -52,6 +52,7 @@ const NotificationList = forwardRef( @@ -52,6 +52,7 @@ const NotificationList = forwardRef(
const supportTouch = useMemo(() => isTouchDevice(), [])
const topRef = useRef<HTMLDivElement | null>(null)
const bottomRef = useRef<HTMLDivElement | null>(null)
const consecutiveEmptyRef = useRef(0) // Track consecutive empty results to prevent premature stopping
const filterKinds = useMemo(() => {
switch (notificationType) {
case 'mentions':
@ -98,6 +99,21 @@ const NotificationList = forwardRef( @@ -98,6 +99,21 @@ const NotificationList = forwardRef(
setShowCount(SHOW_COUNT)
}, [notificationType])
// Batch stats updates to avoid calling updateNoteStatsByEvents for every single event
const pendingStatsEventsRef = useRef<NostrEvent[]>([])
const statsBatchTimeoutRef = useRef<NodeJS.Timeout | null>(null)
const flushStatsBatch = useCallback(() => {
if (pendingStatsEventsRef.current.length > 0) {
noteStatsService.updateNoteStatsByEvents(pendingStatsEventsRef.current)
pendingStatsEventsRef.current = []
}
if (statsBatchTimeoutRef.current) {
clearTimeout(statsBatchTimeoutRef.current)
statsBatchTimeoutRef.current = null
}
}, [])
const handleNewEvent = useCallback(
(event: NostrEvent) => {
if (event.pubkey === pubkey) return
@ -109,14 +125,20 @@ const NotificationList = forwardRef( @@ -109,14 +125,20 @@ const NotificationList = forwardRef(
}
const index = oldEvents.findIndex((oldEvent) => compareEvents(oldEvent, event) <= 0)
noteStatsService.updateNoteStatsByEvents([event])
// Batch stats updates instead of calling for each event
pendingStatsEventsRef.current.push(event)
if (!statsBatchTimeoutRef.current) {
statsBatchTimeoutRef.current = setTimeout(flushStatsBatch, 500) // Batch every 500ms
}
if (index === -1) {
return [...oldEvents, event]
}
return [...oldEvents.slice(0, index), event, ...oldEvents.slice(index)]
})
},
[pubkey]
[pubkey, flushStatsBatch]
)
useEffect(() => {
@ -182,12 +204,19 @@ const NotificationList = forwardRef( @@ -182,12 +204,19 @@ const NotificationList = forwardRef(
if (eosed) {
setLoading(false)
setUntil(events.length > 0 ? events[events.length - 1].created_at - 1 : undefined)
// Batch stats update for initial load - only process events that don't have stats yet
// This avoids redundant processing since updateNoteStatsByEvents is idempotent but still expensive
if (events.length > 0) {
noteStatsService.updateNoteStatsByEvents(events)
}
}
},
onNew: (event) => {
handleNewEvent(event)
}
},
{
useCache: false // Notifications should always fetch fresh from relays, not use cache
}
)
setTimelineKey(timelineKey)
@ -197,8 +226,15 @@ const NotificationList = forwardRef( @@ -197,8 +226,15 @@ const NotificationList = forwardRef(
const promise = init()
return () => {
promise.then((closer) => closer?.())
// Clean up stats batch timeout on unmount
if (statsBatchTimeoutRef.current) {
clearTimeout(statsBatchTimeoutRef.current)
statsBatchTimeoutRef.current = null
}
flushStatsBatch() // Flush any pending stats updates
consecutiveEmptyRef.current = 0 // Reset counter on refresh
}
}, [pubkey, refreshCount, filterKinds, current])
}, [pubkey, refreshCount, filterKinds, current, flushStatsBatch])
useEffect(() => {
if (!active || !pubkey) return
@ -260,22 +296,61 @@ const NotificationList = forwardRef( @@ -260,22 +296,61 @@ const NotificationList = forwardRef(
const currentLoading = loadingRef.current
if (currentShowCount < currentNotifications.length) {
setShowCount((count) => count + SHOW_COUNT)
// preload more
if (currentNotifications.length - currentShowCount > LIMIT / 2) {
// Show more aggressively: increase by SHOW_COUNT, but also check if we should show even more
const remaining = currentNotifications.length - currentShowCount
const increment = Math.min(SHOW_COUNT * 2, remaining) // Show up to 2x SHOW_COUNT if available
setShowCount((count) => count + increment)
// Only preload more if we have plenty cached (more than 3/4 of LIMIT)
// BUT: Always try to load more if we have very few notifications (might be due to filtering)
if (currentNotifications.length - currentShowCount > LIMIT * 0.75 && currentNotifications.length >= 50) {
return
}
// If we have very few notifications, always try to load more (might be aggressive filtering)
if (currentNotifications.length < 50) {
// Continue to loadMore below even if we have cached notifications
// This ensures we keep loading when filtering is aggressive
}
}
if (!pubkey || !timelineKey || !until || currentLoading) return
setLoading(true)
try {
const newNotifications = await client.loadMoreTimeline(timelineKey, until, LIMIT)
// CRITICAL FIX: Don't stop immediately on empty results - might be temporary relay issues
// Only stop if we've tried many times with no results
if (newNotifications.length === 0) {
// Check if timeline has more cached refs that we haven't loaded yet
const hasMoreCached = client.hasMoreTimelineEvents?.(timelineKey, until) ?? false
if (hasMoreCached) {
// There are more cached notifications, keep trying
consecutiveEmptyRef.current = 0 // Reset counter when we have cached events
setLoading(false)
// Retry after a short delay to allow IndexedDB to catch up
setTimeout(() => {
if (until) {
loadMore()
}
}, 300)
return
}
// No cached notifications and network returned empty
// Be patient - don't stop too early, especially when we have few notifications
consecutiveEmptyRef.current += 1
// Only stop after MANY consecutive empty results (similar to NoteList)
if (consecutiveEmptyRef.current >= 20) {
// After 20 consecutive empty results, assume we've reached the end
setUntil(undefined)
setLoading(false)
return
}
// Otherwise, keep trying on next scroll
setLoading(false)
return
}
// Reset consecutive empty counter on success
consecutiveEmptyRef.current = 0
if (newNotifications.length > 0) {
setNotifications((oldNotifications) => [
...oldNotifications,
@ -284,6 +359,14 @@ const NotificationList = forwardRef( @@ -284,6 +359,14 @@ const NotificationList = forwardRef(
}
setUntil(newNotifications[newNotifications.length - 1].created_at - 1)
} catch (error) {
// On error, don't stop immediately - might be temporary network issue
logger.error('[NotificationList] Error loading more notifications', { error })
consecutiveEmptyRef.current += 1
// Only stop after MANY consecutive errors - be very patient with network issues
if (consecutiveEmptyRef.current >= 25) {
setUntil(undefined)
}
} finally {
setLoading(false)
}
@ -310,6 +393,7 @@ const NotificationList = forwardRef( @@ -310,6 +393,7 @@ const NotificationList = forwardRef(
const refresh = () => {
topRef.current?.scrollIntoView({ behavior: 'instant', block: 'start' })
consecutiveEmptyRef.current = 0 // Reset counter on refresh
setTimeout(() => {
setRefreshCount((count) => count + 1)
}, 500)

3
src/components/QuoteList/index.tsx

@ -90,6 +90,9 @@ export default function QuoteList({ event, className }: { event: Event; classNam @@ -90,6 +90,9 @@ export default function QuoteList({ event, className }: { event: Event; classNam
[event, ...oldEvents].sort((a, b) => b.created_at - a.created_at)
)
}
},
{
useCache: false // NO CACHING - stream raw from relays
}
)
setTimelineKey(timelineKey)

103
src/hooks/useFetchProfile.tsx

@ -29,38 +29,35 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -29,38 +29,35 @@ export function useFetchProfile(id?: string, skipCache = false) {
// fetchProfileEvent already checks: 1) IndexedDB, 2) network (with author's relays)
// Memoize to prevent recreation on every render
const checkProfile = useCallback(async (pubkey: string, cancelled: { current: boolean }) => {
logger.info('[useFetchProfile] checkProfile called', {
pubkey,
// CRITICAL: Reduce logging during rapid scrolling to prevent performance issues
// Only log at debug level during normal operations
logger.debug('[useFetchProfile] checkProfile called', {
pubkey: pubkey.substring(0, 8),
cancelled: cancelled.current,
skipCache
})
if (cancelled.current) {
logger.info('[useFetchProfile] Already cancelled, returning false')
logger.debug('[useFetchProfile] Already cancelled, returning false')
return false
}
logger.info('[useFetchProfile] Starting profile fetch', {
pubkey,
skipCache
})
try {
const startTime = Date.now()
logger.info('[useFetchProfile] Calling fetchProfileEvent', {
pubkey
})
// Use fetchProfileEvent which includes author's relay list for better profile discovery
const profileEvent = await replaceableEventService.fetchProfileEvent(pubkey, skipCache)
const fetchTime = Date.now() - startTime
logger.info('[useFetchProfile] fetchProfileEvent returned', {
pubkey,
// Only log at info level if profile was found or if fetch took a long time
if (profileEvent || fetchTime > 1000) {
logger.info('[useFetchProfile] fetchProfileEvent completed', {
pubkey: pubkey.substring(0, 8),
hasEvent: !!profileEvent,
eventId: profileEvent?.id,
eventId: profileEvent?.id?.substring(0, 8),
fetchTime: `${fetchTime}ms`
})
}
if (cancelled.current) {
logger.info('[useFetchProfile] Fetch cancelled after fetch', { pubkey })
@ -70,11 +67,11 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -70,11 +67,11 @@ export function useFetchProfile(id?: string, skipCache = false) {
if (profileEvent) {
// getProfileFromEvent always returns a profile object (with fallback username)
const newProfile = getProfileFromEvent(profileEvent)
logger.info('[useFetchProfile] Profile found', {
pubkey,
// Only log at debug level to reduce noise during rapid scrolling
logger.debug('[useFetchProfile] Profile found', {
pubkey: pubkey.substring(0, 8),
username: newProfile.username,
hasAvatar: !!newProfile.avatar,
eventId: profileEvent.id,
fetchTime: `${fetchTime}ms`
})
setProfile(newProfile)
@ -91,10 +88,13 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -91,10 +88,13 @@ export function useFetchProfile(id?: string, skipCache = false) {
effectRunCountRef.current.delete(pubkey)
return true
}
logger.warn('[useFetchProfile] No profile event found', {
pubkey,
// Only log warnings for missing profiles if skipCache is true (user explicitly requested)
if (skipCache) {
logger.debug('[useFetchProfile] No profile event found', {
pubkey: pubkey.substring(0, 8),
fetchTime: `${fetchTime}ms`
})
}
return false
} catch (err) {
logger.error('[useFetchProfile] Profile fetch error', {
@ -214,22 +214,13 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -214,22 +214,13 @@ export function useFetchProfile(id?: string, skipCache = false) {
}
const cancelled = { current: false }
logger.info('[useFetchProfile] Attempting to extract pubkey', {
id,
// CRITICAL: Reduce logging during rapid scrolling - only log at debug level
logger.debug('[useFetchProfile] Extracting pubkey', {
idLength: id.length,
idStartsWithNpub: id.startsWith('npub1'),
idStartsWithNprofile: id.startsWith('nprofile1')
})
// Use the already-extracted pubkey from above
// const extractedPubkey = userIdToPubkey(id) // Already extracted above
logger.info('[useFetchProfile] Extracted pubkey result', {
id,
extractedPubkey: extractedPubkey || 'null',
pubkeyLength: extractedPubkey ? extractedPubkey.length : 0,
isValidPubkey: extractedPubkey ? /^[0-9a-f]{64}$/.test(extractedPubkey) : false
})
if (!extractedPubkey) {
logger.error('[useFetchProfile] Invalid id - could not extract pubkey', {
id,
@ -287,32 +278,27 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -287,32 +278,27 @@ export function useFetchProfile(id?: string, skipCache = false) {
if (pubkey !== extractedPubkey) {
setPubkey(extractedPubkey)
}
logger.info('[useFetchProfile] Starting profile fetch async', {
extractedPubkey,
currentPubkeyState: pubkey || 'null'
// CRITICAL: Reduce logging during rapid scrolling
logger.debug('[useFetchProfile] Starting profile fetch', {
pubkey: extractedPubkey?.substring(0, 8) || 'null'
})
const run = async () => {
logger.info('[useFetchProfile] run() async function started', {
pubkey: extractedPubkey
})
try {
setIsFetching(true)
setError(null)
logger.info('[useFetchProfile] Calling checkProfile', {
pubkey: extractedPubkey
})
// Initial fetch - fetchReplaceableEvent checks: 1) in-memory, 2) IndexedDB, 3) network
const found = await checkProfile(extractedPubkey, cancelled)
logger.info('[useFetchProfile] checkProfile returned', {
pubkey: extractedPubkey,
// Only log if profile was found or if cancelled (important events)
if (found || cancelled.current) {
logger.debug('[useFetchProfile] checkProfile completed', {
pubkey: extractedPubkey?.substring(0, 8),
found,
cancelled: cancelled.current
})
}
if (cancelled.current) {
logger.info('[useFetchProfile] Cancelled after checkProfile, cleaning up')
@ -321,22 +307,26 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -321,22 +307,26 @@ export function useFetchProfile(id?: string, skipCache = false) {
}
if (found) {
logger.info('[useFetchProfile] Profile found, done')
// Profile found (from cache or network), we're done
return
}
logger.info('[useFetchProfile] No profile found, setting up interval retry')
logger.debug('[useFetchProfile] No profile found, considering retry')
// No profile found yet - set fetching to false so UI can show fallback
// The profile will remain null, allowing components to show npub fallback
setIsFetching(false)
setError(null) // Clear any previous errors
// CRITICAL FIX: Disable retry intervals during rapid scrolling to prevent browser crashes
// Only retry if skipCache is true (user explicitly wants to refresh)
// For normal feed scrolling, missing profiles are acceptable and will be fetched on-demand
// This prevents accumulation of hundreds of intervals during rapid scrolling
if (skipCache) {
// If no profile was found, periodically re-check (profiles might load asynchronously)
// REDUCED: Check every 5 seconds for up to 20 seconds (4 checks) to prevent too many intervals
// REDUCED: Check every 10 seconds for up to 30 seconds (3 checks) to prevent too many intervals
// This reduces memory usage when many profiles are being fetched (e.g., trending page)
let checkCount = 0
const maxChecks = 4 // Reduced from 15 to prevent browser crashes
const maxChecks = 3 // Reduced from 4 to further reduce load
checkIntervalRef.current = setInterval(async () => {
if (cancelled.current || checkCount >= maxChecks) {
@ -356,7 +346,15 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -356,7 +346,15 @@ export function useFetchProfile(id?: string, skipCache = false) {
checkIntervalRef.current = null
}
}
}, 5000) // Increased from 2 seconds to 5 seconds to reduce load
}, 10000) // Increased from 5 seconds to 10 seconds to reduce load
} else {
// For normal feed scrolling, don't set up retry intervals
// Profiles will be fetched on-demand when user navigates to profile page
// This prevents accumulation of intervals during rapid scrolling
logger.debug('[useFetchProfile] Skipping retry intervals for normal feed scrolling', {
pubkey: extractedPubkey
})
}
} catch (err) {
logger.error('[useFetchProfile] run() error', {
pubkey: extractedPubkey,
@ -387,11 +385,18 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -387,11 +385,18 @@ export function useFetchProfile(id?: string, skipCache = false) {
if (processingPubkeyRef.current === extractedPubkey) {
processingPubkeyRef.current = null
}
// Clear interval on cleanup
// CRITICAL: Always clear interval on cleanup to prevent memory leaks
// This is especially important during rapid scrolling when many components mount/unmount
if (checkIntervalRef.current) {
clearInterval(checkIntervalRef.current)
checkIntervalRef.current = null
}
// Clear run count and initialized status on cleanup to allow fresh fetches if component remounts
if (extractedPubkey) {
effectRunCountRef.current.delete(extractedPubkey)
// Don't clear initializedPubkeysRef here - keep it to prevent re-fetching on remount
// Only clear it if explicitly requested via skipCache
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [id, skipCache]) // checkProfile is memoized and stable, no need to include it

2
src/hooks/useProfileNotesTimeline.tsx

@ -157,7 +157,7 @@ export function useProfileNotesTimeline({ @@ -157,7 +157,7 @@ export function useProfileNotesTimeline({
})
}
},
{ needSort: true }
{ needSort: true, useCache: false } // NO CACHING - stream raw from relays
)
subscriptionRef.current = () => closer()

2
src/hooks/useProfileTimeline.tsx

@ -199,7 +199,7 @@ export function useProfileTimeline({ @@ -199,7 +199,7 @@ export function useProfileTimeline({
})
}
},
{ needSort: true }
{ needSort: true, useCache: false } // NO CACHING - stream raw from relays
)
subscriptionRef.current = () => closer()

17
src/pages/primary/DiscussionsPage/index.tsx

@ -420,7 +420,7 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -420,7 +420,7 @@ const DiscussionsPage = forwardRef((_, ref) => {
const discussionThreads = await queryService.fetchEvents(allRelays, [
{
kinds: [11], // ExtendedKind.DISCUSSION
limit: 100
limit: 500 // Increased from 100 to load more threads per request
}
])
@ -455,14 +455,14 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -455,14 +455,14 @@ const DiscussionsPage = forwardRef((_, ref) => {
{
kinds: [1111], // ExtendedKind.COMMENT
'#e': allThreadIdsArray,
limit: 100
limit: 500 // Increased from 100 to load more comments per request
}
]) : Promise.resolve([]),
allThreadIdsArray.length > 0 ? queryService.fetchEvents(allRelays, [
{
kinds: [kinds.Reaction],
'#e': allThreadIdsArray,
limit: 100
limit: 500 // Increased from 100 to load more reactions per request
}
]) : Promise.resolve([])
])
@ -638,11 +638,20 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -638,11 +638,20 @@ const DiscussionsPage = forwardRef((_, ref) => {
logger.debug('[DiscussionsPage] Updated UI with', categorizedEventMap.size, 'threads (merged from cache and new fetch)')
} catch (error) {
logger.error('[DiscussionsPage] Error fetching events:', error)
// Get cached data for error logging (if available)
const cachedDataForError = discussionFeedCache.getCachedDiscussionsList()
logger.error('[DiscussionsPage] Error fetching events:', error, {
hasCachedData,
cachedThreadCount: cachedDataForError?.eventMap.size || 0
})
// If we had cached data and fetch failed, at least we have something to show
if (!hasCachedData) {
setLoading(false)
}
// Log specific relay errors if available
if (error instanceof Error && error.message) {
logger.warn('[DiscussionsPage] Fetch error details:', error.message)
}
} finally {
if (!hasCachedData || forceRefresh) {
setLoading(false)

6
src/pages/primary/SpellsPage/index.tsx

@ -255,9 +255,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(_, ref) { @@ -255,9 +255,11 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(_, ref) {
closer()
spellCatalogCloserRef.current = null
},
onNew: () => {}
onNew: () => {} // Not needed
},
{ needSort: true }
{
useCache: false // NO CACHING - stream raw from relays
}
)
if (cancelled) {
closer()

26
src/providers/NotificationProvider.tsx

@ -107,16 +107,24 @@ export function NotificationProvider({ children }: { children: React.ReactNode } @@ -107,16 +107,24 @@ export function NotificationProvider({ children }: { children: React.ReactNode }
if (evt.pubkey !== pubkey) {
const prev = notificationBufferRef.current
if (!discussionEosed) {
// Before EOSE: just buffer events, limit size
if (prev.length < 100) {
notificationBufferRef.current = [evt, ...prev]
}
return
}
if (prev.length && compareEvents(prev[0], evt) >= 0) {
return
}
client.emitNewEvent(evt)
// Limit buffer size to prevent memory issues
if (prev.length >= 50) {
notificationBufferRef.current = [evt, ...prev.slice(0, 49)]
} else {
notificationBufferRef.current = [evt, ...prev]
}
client.emitNewEvent(evt)
}
}
}
)
@ -145,25 +153,39 @@ export function NotificationProvider({ children }: { children: React.ReactNode } @@ -145,25 +153,39 @@ export function NotificationProvider({ children }: { children: React.ReactNode }
oneose: (e) => {
if (e) {
eosed = e
// Don't sort on every EOSE - sorting is expensive and buffer is already maintained in order
// Only sort if buffer is getting large and out of order
if (notificationBufferRef.current.length > 100) {
notificationBufferRef.current = [
...notificationBufferRef.current.sort((a, b) => compareEvents(b, a))
]
}
}
},
onevent: (evt) => {
if (evt.pubkey !== pubkey) {
const prev = notificationBufferRef.current
if (!eosed) {
// Before EOSE: just buffer events, don't emit yet
// Limit buffer size to prevent memory issues
if (prev.length < 100) {
notificationBufferRef.current = [evt, ...prev]
}
return
}
// After EOSE: only emit if it's newer than the most recent event
if (prev.length && compareEvents(prev[0], evt) >= 0) {
return
}
client.emitNewEvent(evt)
// Limit buffer size to prevent memory issues
if (prev.length >= 50) {
notificationBufferRef.current = [evt, ...prev.slice(0, 49)]
} else {
notificationBufferRef.current = [evt, ...prev]
}
client.emitNewEvent(evt)
}
},
onAllClose: (reasons) => {
if (reasons.every((reason) => reason === 'closed by caller')) {

121
src/services/client-replaceable-events.service.ts

@ -43,8 +43,8 @@ export class ReplaceableEventService { @@ -43,8 +43,8 @@ export class ReplaceableEventService {
>(
this.replaceableEventFromBigRelaysBatchLoadFn.bind(this),
{
batchScheduleFn: (callback) => setTimeout(callback, 50),
maxBatchSize: 500,
batchScheduleFn: (callback) => setTimeout(callback, 100), // Increased from 50ms to 100ms to better batch rapid scrolling
maxBatchSize: 200, // Reduced from 500 to prevent overwhelming the system during rapid scrolling
cacheKeyFn: ({ pubkey, kind }) => `${pubkey}:${kind}`
}
)
@ -314,10 +314,18 @@ export class ReplaceableEventService { @@ -314,10 +314,18 @@ export class ReplaceableEventService {
private async replaceableEventFromBigRelaysBatchLoadFn(
params: readonly { pubkey: string; kind: number }[]
): Promise<(NEvent | null)[]> {
logger.info('[ReplaceableEventService] Batch load function called', {
// CRITICAL: Reduce logging during rapid scrolling - only log large batches
if (params.length > 50) {
logger.info('[ReplaceableEventService] Large batch load function called', {
paramCount: params.length,
pubkeys: params.map(p => p.pubkey.substring(0, 8))
kind: params[0]?.kind
})
} else {
logger.debug('[ReplaceableEventService] Batch load function called', {
paramCount: params.length,
kind: params[0]?.kind
})
}
// Step 1: Batch check IndexedDB for all requested events
const groups = new Map<number, string[]>()
@ -338,7 +346,8 @@ export class ReplaceableEventService { @@ -338,7 +346,8 @@ export class ReplaceableEventService {
try {
// Use batched IndexedDB query
const indexedDbEvents = await indexedDb.getManyReplaceableEvents(pubkeys, kind)
logger.info('[ReplaceableEventService] IndexedDB batch query completed', {
// Only log at debug level to reduce noise during rapid scrolling
logger.debug('[ReplaceableEventService] IndexedDB batch query completed', {
kind,
pubkeyCount: pubkeys.length,
foundCount: indexedDbEvents.filter(e => e !== null && e !== undefined).length
@ -382,16 +391,24 @@ export class ReplaceableEventService { @@ -382,16 +391,24 @@ export class ReplaceableEventService {
// Step 2: Only fetch missing events from network
if (missingParams.length === 0) {
logger.info('[ReplaceableEventService] All events found in IndexedDB, skipping network fetch', {
logger.debug('[ReplaceableEventService] All events found in IndexedDB, skipping network fetch', {
totalCount: params.length
})
return results
}
// Only log at info level for large batches
if (missingParams.length > 50) {
logger.info('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
totalCount: params.length
})
} else {
logger.debug('[ReplaceableEventService] Fetching missing events from network', {
missingCount: missingParams.length,
totalCount: params.length
})
}
// Group missing params by kind for network fetch
const missingGroups = new Map<number, { pubkey: string; index: number }[]>()
@ -408,31 +425,27 @@ export class ReplaceableEventService { @@ -408,31 +425,27 @@ export class ReplaceableEventService {
// ALWAYS use comprehensive relay list: author's outboxes + user's inboxes + defaults
// For profiles/metadata: includes user's own relays (read/write/local) + PROFILE_FETCH_RELAY_URLS
// For each pubkey, build comprehensive relay list
logger.info('[ReplaceableEventService] Building relay lists for batch', {
kind,
pubkeyCount: pubkeys.length
})
// CRITICAL FIX: For batch fetches, use default relays instead of fetching relay lists for each author
// Fetching relay lists for hundreds of authors causes infinite loops and browser crashes
// Use PROFILE_FETCH_RELAY_URLS + FAST_READ_RELAY_URLS for profiles, or FAST_READ_RELAY_URLS for other kinds
const relayUrls = kind === kinds.Metadata
? Array.from(new Set([...PROFILE_FETCH_RELAY_URLS, ...FAST_READ_RELAY_URLS]))
: [...FAST_READ_RELAY_URLS]
logger.info('[ReplaceableEventService] Using comprehensive relay list', {
pubkeyCount: pubkeys.length,
totalRelayCount: relayUrls.length,
// Only log at info level for large batches
if (pubkeys.length > 50) {
logger.info('[ReplaceableEventService] Starting query for large batch', {
kind,
relays: relayUrls.slice(0, 5) // Show first 5 for debugging
pubkeyCount: pubkeys.length,
relayCount: relayUrls.length
})
// Use all relays in parallel - browsers can handle many concurrent subscriptions
// The QueryService manages per-relay concurrency limits to avoid overloading individual relays
logger.info('[ReplaceableEventService] Starting query for batch', {
} else {
logger.debug('[ReplaceableEventService] Starting query for batch', {
kind,
pubkeyCount: pubkeys.length,
relayCount: relayUrls.length
})
}
const events = await this.queryService.query(relayUrls, {
authors: pubkeys,
kinds: [kind]
@ -441,22 +454,46 @@ export class ReplaceableEventService { @@ -441,22 +454,46 @@ export class ReplaceableEventService {
eoseTimeout: 100, // Reduced from 200ms for faster early returns
globalTimeout: 2000 // Reduced from 3000ms to prevent long waits when many relays are slow
})
// Only log at info level for large batches or if many events found
if (pubkeys.length > 50 || events.length > 100) {
logger.info('[ReplaceableEventService] Query completed for batch', {
kind,
pubkeyCount: pubkeys.length,
eventCount: events.length
})
// Log when no events are found (helps debug relay failures)
if (kind === kinds.Metadata && events.length === 0 && pubkeys.length > 0) {
logger.debug('[ReplaceableEventService] No profile events found from relays', {
} else {
logger.debug('[ReplaceableEventService] Query completed for batch', {
kind,
pubkeyCount: pubkeys.length,
relayCount: relayUrls.length,
relays: relayUrls.slice(0, 3) // Show first 3 for brevity
eventCount: events.length
})
}
// CRITICAL: Limit the number of events processed to prevent memory issues during rapid scrolling
// If we have too many events, only process the most recent ones per pubkey
if (events.length > 1000) {
logger.warn('[ReplaceableEventService] Large batch detected, limiting processing', {
kind,
eventCount: events.length,
pubkeyCount: pubkeys.length
})
// Group by pubkey and keep only the most recent event per pubkey
const eventsByPubkey = new Map<string, NEvent>()
for (const event of events) {
const key = `${event.pubkey}:${event.kind}`
const existing = eventsByPubkey.get(key)
if (!existing || existing.created_at < event.created_at) {
eventsByPubkey.set(key, event)
}
}
// Convert back to array, but limit to reasonable size
const limitedEvents = Array.from(eventsByPubkey.values()).slice(0, 500)
logger.info('[ReplaceableEventService] Limited batch size', {
originalCount: events.length,
limitedCount: limitedEvents.length
})
// Use limited events for processing
for (const event of limitedEvents) {
const key = `${event.pubkey}:${event.kind}`
const existing = eventsMap.get(key)
if (!existing || existing.created_at < event.created_at) {
@ -469,6 +506,32 @@ export class ReplaceableEventService { @@ -469,6 +506,32 @@ export class ReplaceableEventService {
}
}
}
} else {
// Normal processing for smaller batches
for (const event of events) {
const key = `${event.pubkey}:${event.kind}`
const existing = eventsMap.get(key)
if (!existing || existing.created_at < event.created_at) {
eventsMap.set(key, event)
// Update results array for this event
const itemIndex = missingItems.findIndex(item => item.pubkey === event.pubkey)
if (itemIndex >= 0) {
const paramIndex = missingItems[itemIndex]!.index
results[paramIndex] = event
}
}
}
}
// Log when no events are found (helps debug relay failures)
if (kind === kinds.Metadata && events.length === 0 && pubkeys.length > 0) {
logger.debug('[ReplaceableEventService] No profile events found from relays', {
pubkeyCount: pubkeys.length,
relayCount: relayUrls.length,
relays: relayUrls.slice(0, 3) // Show first 3 for brevity
})
}
})
)
@ -485,12 +548,20 @@ export class ReplaceableEventService { @@ -485,12 +548,20 @@ export class ReplaceableEventService {
})
)
// Only log at info level for large batches
if (params.length > 50) {
logger.info('[ReplaceableEventService] Batch load function completed', {
paramCount: params.length,
foundCount: results.filter(r => r !== null).length,
indexedDbCount: params.length - missingParams.length,
networkCount: missingParams.length
})
} else {
logger.debug('[ReplaceableEventService] Batch load function completed', {
paramCount: params.length,
foundCount: results.filter(r => r !== null).length
})
}
return results
}

81
src/services/client.service.ts

@ -842,10 +842,12 @@ class ClientService extends EventTarget { @@ -842,10 +842,12 @@ class ClientService extends EventTarget {
},
{
startLogin,
needSort = true
needSort = true,
useCache = false
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
} = {}
) {
const newEventIdSet = new Set<string>()
@ -889,7 +891,7 @@ class ClientService extends EventTarget { @@ -889,7 +891,7 @@ class ClientService extends EventTarget {
},
onClose
},
{ startLogin, needSort }
{ startLogin, needSort, useCache }
)
})
)
@ -909,6 +911,29 @@ class ClientService extends EventTarget { @@ -909,6 +911,29 @@ class ClientService extends EventTarget {
}
}
/**
* Check if a timeline has more events available (either cached or from network)
*/
hasMoreTimelineEvents(key: string, until: number): boolean {
const timeline = this.timelines[key]
if (!timeline) return false
if (Array.isArray(timeline)) {
// For multiple timelines, check if any has more events
return timeline.some((subKey) => {
const subTimeline = this.timelines[subKey]
if (!subTimeline || Array.isArray(subTimeline)) return false
const { refs } = subTimeline
// Check if there are refs with created_at <= until that we haven't loaded
return refs.some(([, createdAt]) => createdAt <= until)
})
}
const { refs } = timeline
// Check if there are refs with created_at <= until that we haven't loaded
return refs.some(([, createdAt]) => createdAt <= until)
}
async loadMoreTimeline(key: string, until: number, limit: number) {
const timeline = this.timelines[key]
if (!timeline) return []
@ -1146,10 +1171,12 @@ class ClientService extends EventTarget { @@ -1146,10 +1171,12 @@ class ClientService extends EventTarget {
},
{
startLogin,
needSort = true
needSort = true,
useCache = false
}: {
startLogin?: () => void
needSort?: boolean
useCache?: boolean
} = {}
) {
const relays = Array.from(new Set(urls))
@ -1157,16 +1184,42 @@ class ClientService extends EventTarget { @@ -1157,16 +1184,42 @@ class ClientService extends EventTarget {
const timeline = this.timelines[key]
let cachedEvents: NEvent[] = []
let since: number | undefined
if (timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
// CRITICAL: Only use cache if explicitly enabled (for profile timelines)
// Main feeds (home, notifications) should always fetch fresh from relays
if (useCache && timeline && !Array.isArray(timeline) && timeline.refs.length && needSort) {
cachedEvents = (
await Promise.all(timeline.refs.slice(0, filter.limit).map(([id]) => this.eventService.fetchEvent(id)))
).filter((evt): evt is NEvent => !!evt)
if (cachedEvents.length) {
onEvents([...cachedEvents], false)
since = cachedEvents[0].created_at + 1
// Sort cached events by newest first
cachedEvents.sort((a, b) => b.created_at - a.created_at)
// CRITICAL FIX: Filter out very old cached events (older than 24 hours)
// This prevents showing 15+ hour old events when the cache is stale
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
const recentCachedEvents = cachedEvents.filter(evt => evt.created_at >= oneDayAgo)
if (recentCachedEvents.length > 0) {
// Only show cached events if they're recent
onEvents([...recentCachedEvents], false)
// Use the NEWEST cached event's timestamp + 1 to fetch only newer events
since = recentCachedEvents[0].created_at + 1
} else {
// All cached events are too old, ignore them and start fresh
cachedEvents = []
}
}
}
// CRITICAL FIX: If no cached events (or all were too old), use a recent timestamp
// This prevents the feed from showing 15+ hour old events when relays are slow
if (!since && needSort) {
// Default to last 24 hours if no recent cached events
// This ensures we get recent content even if relays are slow
const oneDayAgo = dayjs().subtract(24, 'hours').unix()
since = oneDayAgo
}
// eslint-disable-next-line @typescript-eslint/no-this-alias
const that = this
let events: NEvent[] = []
@ -1178,7 +1231,8 @@ class ClientService extends EventTarget { @@ -1178,7 +1231,8 @@ class ClientService extends EventTarget {
const deliverProgressive = () => {
if (eosedAt || events.length === 0) return
const snap = [...events].sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
onEvents(needSort ? snap.concat(cachedEvents).slice(0, filter.limit) : snap, false)
// Only include cached events if caching is enabled
onEvents(needSort && useCache ? snap.concat(cachedEvents).slice(0, filter.limit) : snap, false)
}
const subCloser = this.subscribe(relays, since ? { ...filter, since } : filter, {
startLogin,
@ -1204,6 +1258,9 @@ class ClientService extends EventTarget { @@ -1204,6 +1258,9 @@ class ClientService extends EventTarget {
onNew(evt)
}
// Only update timeline cache if caching is enabled
if (!useCache) return
const timeline = that.timelines[key]
if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
return
@ -1241,10 +1298,14 @@ class ClientService extends EventTarget { @@ -1241,10 +1298,14 @@ class ClientService extends EventTarget {
}
if (!eosed) {
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
return onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], false)
// Only include cached events if caching is enabled
return onEvents([...(useCache ? events.concat(cachedEvents).slice(0, filter.limit) : events)], false)
}
events = events.sort((a, b) => b.created_at - a.created_at).slice(0, filter.limit)
// Only update timeline cache if caching is enabled
if (useCache) {
const timeline = that.timelines[key]
// no cache yet
if (!timeline || Array.isArray(timeline) || !timeline.refs.length) {
@ -1271,6 +1332,10 @@ class ClientService extends EventTarget { @@ -1271,6 +1332,10 @@ class ClientService extends EventTarget {
timeline.refs = newRefs.concat(timeline.refs)
onEvents([...events.concat(cachedEvents).slice(0, filter.limit)], true)
}
} else {
// No caching - just return events directly
onEvents([...events], true)
}
},
onclose: onClose
})

Loading…
Cancel
Save