diff --git a/src/components/NoteList/index.tsx b/src/components/NoteList/index.tsx
index ed5bc16..eb2e66d 100644
--- a/src/components/NoteList/index.tsx
+++ b/src/components/NoteList/index.tsx
@@ -149,6 +149,9 @@ const NoteList = forwardRef(
const refresh = () => {
scrollToTop()
+ // Clear relay connection state to force fresh connections
+ const relayUrls = subRequests.flatMap(req => req.urls)
+ client.clearRelayConnectionState(relayUrls)
setTimeout(() => {
setRefreshCount((count) => count + 1)
}, 500)
@@ -247,12 +250,13 @@ const NoteList = forwardRef(
)
// Add a fallback timeout to prevent infinite loading
+ // Increased timeout to 15 seconds to handle slow relay connections
const fallbackTimeout = setTimeout(() => {
if (loading) {
setLoading(false)
- logger.debug('NoteList loading timeout - stopping after 6 seconds')
+ logger.debug('NoteList loading timeout - stopping after 15 seconds')
}
- }, 6000)
+ }, 15000)
setTimelineKey(timelineKey)
return () => {
@@ -344,7 +348,12 @@ const NoteList = forwardRef(
{t('no more notes')}
) : (
-
diff --git a/src/components/Profile/ProfileBookmarksAndHashtags.tsx b/src/components/Profile/ProfileBookmarksAndHashtags.tsx
index edc88be..c46a1fc 100644
--- a/src/components/Profile/ProfileBookmarksAndHashtags.tsx
+++ b/src/components/Profile/ProfileBookmarksAndHashtags.tsx
@@ -52,8 +52,8 @@ export default function ProfileBookmarksAndHashtags({
.filter((url): url is string => !!url)
const comprehensiveRelays = Array.from(new Set(normalizedRelays))
- console.log('[ProfileBookmarksAndHashtags] Using', comprehensiveRelays.length, 'relays for bookmark/interest list events:', comprehensiveRelays)
- console.log('[ProfileBookmarksAndHashtags] Relay breakdown - inboxes:', myRelayList.read?.length || 0, 'outboxes:', myRelayList.write?.length || 0, 'favorites:', favoriteRelays?.length || 0, 'big:', BIG_RELAY_URLS.length, 'fast_read:', FAST_READ_RELAY_URLS.length, 'fast_write:', FAST_WRITE_RELAY_URLS.length)
+ // Debug: Relay configuration for bookmark/interest list events
+ // console.log('[ProfileBookmarksAndHashtags] Using', comprehensiveRelays.length, 'relays for bookmark/interest list events:', comprehensiveRelays)
return comprehensiveRelays
}, [myPubkey, favoriteRelays])
@@ -78,7 +78,7 @@ export default function ProfileBookmarksAndHashtags({
bookmarkList = await client.fetchBookmarkListEvent(pubkey)
}
- console.log('[ProfileBookmarksAndHashtags] Bookmark list event:', bookmarkList)
+ // console.log('[ProfileBookmarksAndHashtags] Bookmark list event:', bookmarkList)
setBookmarkListEvent(bookmarkList)
if (bookmarkList && bookmarkList.tags.length > 0) {
@@ -88,7 +88,7 @@ export default function ProfileBookmarksAndHashtags({
.map(tag => tag[1])
.reverse() // Reverse to show newest first
- console.log('[ProfileBookmarksAndHashtags] Found', eventIds.length, 'bookmark event IDs:', eventIds)
+ // console.log('[ProfileBookmarksAndHashtags] Found', eventIds.length, 'bookmark event IDs:', eventIds)
if (eventIds.length > 0) {
try {
@@ -137,7 +137,7 @@ export default function ProfileBookmarksAndHashtags({
interestList = await client.fetchInterestListEvent(pubkey)
}
- console.log('[ProfileBookmarksAndHashtags] Interest list event:', interestList)
+ // console.log('[ProfileBookmarksAndHashtags] Interest list event:', interestList)
setInterestListEvent(interestList)
if (interestList && interestList.tags.length > 0) {
@@ -146,7 +146,7 @@ export default function ProfileBookmarksAndHashtags({
.filter(tag => tag[0] === 't' && tag[1])
.map(tag => tag[1])
- console.log('[ProfileBookmarksAndHashtags] Found', hashtags.length, 'interest hashtags:', hashtags)
+ // console.log('[ProfileBookmarksAndHashtags] Found', hashtags.length, 'interest hashtags:', hashtags)
if (hashtags.length > 0) {
try {
@@ -156,7 +156,7 @@ export default function ProfileBookmarksAndHashtags({
'#t': hashtags,
limit: 100
})
- console.log('[ProfileBookmarksAndHashtags] Fetched', events.length, 'hashtag events')
+ // console.log('[ProfileBookmarksAndHashtags] Fetched', events.length, 'hashtag events')
setHashtagEvents(events)
} catch (error) {
console.warn('[ProfileBookmarksAndHashtags] Error fetching hashtag events:', error)
@@ -196,7 +196,7 @@ export default function ProfileBookmarksAndHashtags({
pinList = await client.fetchPinListEvent(pubkey)
}
- console.log('[ProfileBookmarksAndHashtags] Pin list event:', pinList)
+ // console.log('[ProfileBookmarksAndHashtags] Pin list event:', pinList)
setPinListEvent(pinList)
if (pinList && pinList.tags.length > 0) {
@@ -206,7 +206,7 @@ export default function ProfileBookmarksAndHashtags({
.map(tag => tag[1])
.reverse() // Reverse to show newest first
- console.log('[ProfileBookmarksAndHashtags] Found', eventIds.length, 'pin event IDs:', eventIds)
+ // console.log('[ProfileBookmarksAndHashtags] Found', eventIds.length, 'pin event IDs:', eventIds)
if (eventIds.length > 0) {
try {
diff --git a/src/components/Profile/ProfileFeed.tsx b/src/components/Profile/ProfileFeed.tsx
index 190774e..45e9f6b 100644
--- a/src/components/Profile/ProfileFeed.tsx
+++ b/src/components/Profile/ProfileFeed.tsx
@@ -48,8 +48,18 @@ export default function ProfileFeed({
// Privacy: Only use user's own relays + defaults, never connect to other users' relays
const myRelayList = myPubkey ? await client.fetchRelayList(myPubkey) : { write: [], read: [] }
- // Build comprehensive relay list: user's inboxes + user's favorite relays + big relays + fast read relays + fast write relays
- const allRelays = [
+ // Build comprehensive relay list: prioritize write relays when viewing own profile
+ const isOwnProfile = myPubkey === pubkey
+ const allRelays = isOwnProfile ? [
+ // For own profile: prioritize write relays first to find own responses
+ ...(myRelayList.write || []), // User's outboxes (kind 10002) - PRIORITY
+ ...(myRelayList.read || []), // User's inboxes (kind 10002)
+ ...(favoriteRelays || []), // User's favorite relays (kind 10012)
+ ...FAST_WRITE_RELAY_URLS, // Fast write relays - PRIORITY
+ ...BIG_RELAY_URLS, // Big relays
+ ...FAST_READ_RELAY_URLS // Fast read relays
+ ] : [
+ // For other profiles: use standard order
...(myRelayList.read || []), // User's inboxes (kind 10002)
...(myRelayList.write || []), // User's outboxes (kind 10002)
...(favoriteRelays || []), // User's favorite relays (kind 10012)
@@ -64,7 +74,13 @@ export default function ProfileFeed({
.filter((url): url is string => !!url)
const userRelays = Array.from(new Set(normalizedRelays))
- console.log('[ProfileFeed] Using', userRelays.length, 'relays for profile feed:', userRelays)
+
+ // Debug: Log relay usage for own profile to help troubleshoot missing responses
+ if (isOwnProfile) {
+ console.log('[ProfileFeed] Using', userRelays.length, 'relays for OWN profile (prioritizing write relays):', userRelays)
+ console.log('[ProfileFeed] Write relays:', myRelayList.write)
+ console.log('[ProfileFeed] Read relays:', myRelayList.read)
+ }
if (listMode === 'you') {
if (!myPubkey) {
diff --git a/src/components/ReplyNoteList/index.tsx b/src/components/ReplyNoteList/index.tsx
index cf8bf93..7248730 100644
--- a/src/components/ReplyNoteList/index.tsx
+++ b/src/components/ReplyNoteList/index.tsx
@@ -93,45 +93,27 @@ function ReplyNoteList({ index, event, sort = 'oldest' }: { index?: number; even
: event.id
// For replaceable events, also check the event ID in case replies are stored there
const eventIdKey = event.id
- let parentEventKeys = [currentEventKey]
+ const parentEventKeys = [currentEventKey]
if (isReplaceableEvent(event.kind) && currentEventKey !== eventIdKey) {
parentEventKeys.push(eventIdKey)
}
+ // FIXED: Only fetch direct replies to the original event, don't traverse reply chains
+ // This prevents the doom loop that was causing "too many concurrent REQS"
+ const events = parentEventKeys.flatMap((id) => repliesMap.get(id)?.events || [])
- const processedEventIds = new Set() // Prevent infinite loops
- let iterationCount = 0
- const MAX_ITERATIONS = 10 // Prevent infinite loops
-
- while (parentEventKeys.length > 0 && iterationCount < MAX_ITERATIONS) {
- iterationCount++
- const events = parentEventKeys.flatMap((id) => repliesMap.get(id)?.events || [])
-
- events.forEach((evt) => {
- if (replyIdSet.has(evt.id)) return
- if (mutePubkeySet.has(evt.pubkey)) {
- return
- }
- if (hideContentMentioningMutedUsers && isMentioningMutedUsers(evt, mutePubkeySet)) {
- return
- }
+ events.forEach((evt) => {
+ if (replyIdSet.has(evt.id)) return
+ if (mutePubkeySet.has(evt.pubkey)) {
+ return
+ }
+ if (hideContentMentioningMutedUsers && isMentioningMutedUsers(evt, mutePubkeySet)) {
+ return
+ }
- replyIdSet.add(evt.id)
- replyEvents.push(evt)
- })
-
- // Prevent infinite loops by tracking processed event IDs
- const newParentEventKeys = events
- .map((evt) => evt.id)
- .filter((id) => !processedEventIds.has(id))
-
- newParentEventKeys.forEach((id) => processedEventIds.add(id))
- parentEventKeys = newParentEventKeys
- }
-
- if (iterationCount >= MAX_ITERATIONS) {
- logger.warn('ReplyNoteList: Maximum iterations reached, possible circular reference in replies')
- }
+ replyIdSet.add(evt.id)
+ replyEvents.push(evt)
+ })
@@ -363,10 +345,11 @@ function ReplyNoteList({ index, event, sort = 'oldest' }: { index?: number; even
}, [rootInfo, currentIndex, index, onNewReply])
useEffect(() => {
- if (replies.length === 0 && !loading && timelineKey) {
+ // Only try to load more if we have no replies, not loading, have a timeline key, and haven't reached the end
+ if (replies.length === 0 && !loading && timelineKey && until !== undefined) {
loadMore()
}
- }, [replies.length, loading, timelineKey]) // More specific dependencies to prevent infinite loops
+ }, [replies.length, loading, timelineKey, until]) // Added until to prevent infinite loops
useEffect(() => {
const options = {
diff --git a/src/lib/error-suppression.ts b/src/lib/error-suppression.ts
index b0f5c52..cb84ddd 100644
--- a/src/lib/error-suppression.ts
+++ b/src/lib/error-suppression.ts
@@ -53,6 +53,40 @@ export function suppressExpectedErrors() {
return
}
+ // Suppress Workbox precaching errors for development modules
+ if (message.includes('Precaching did not find a match') && (
+ message.includes('@vite/client') ||
+ message.includes('main.tsx') ||
+ message.includes('src/') ||
+ message.includes('node_modules/')
+ )) {
+ return
+ }
+
+ // Suppress "too many concurrent REQs" errors (handled by circuit breaker)
+ if (message.includes('too many concurrent REQs')) {
+ return
+ }
+
+ // Suppress relay overload errors (handled by throttling)
+ if (message.includes('Relay overloaded - too many concurrent requests')) {
+ return
+ }
+
+ // Suppress nostr-tools "too many concurrent REQs" errors
+ if (message.includes('NOTICE from') && message.includes('ERROR: too many concurrent REQs')) {
+ return
+ }
+
+ // Suppress nostr-tools connection errors
+ if (message.includes('NOTICE from') && (
+ message.includes('ERROR:') ||
+ message.includes('connection closed') ||
+ message.includes('connection errored')
+ )) {
+ return
+ }
+
// Call original console.error for unexpected errors
originalConsoleError.apply(console, args)
}
diff --git a/src/pages/primary/DiscussionsPage/index.tsx b/src/pages/primary/DiscussionsPage/index.tsx
index 611c371..ba7b8d4 100644
--- a/src/pages/primary/DiscussionsPage/index.tsx
+++ b/src/pages/primary/DiscussionsPage/index.tsx
@@ -641,14 +641,14 @@ const DiscussionsPage = forwardRef(() => {
})
// Debug logging for subtopic detection
- if (entrySubtopics.length > 0) {
- console.log('Found subtopics for entry:', {
- threadId: entry.event.id.substring(0, 8),
- allTopics: entry.allTopics,
- entrySubtopics,
- dynamicTopics: dynamicTopics.allTopics.map(dt => ({ id: dt.id, isSubtopic: dt.isSubtopic }))
- })
- }
+ // if (entrySubtopics.length > 0) {
+ // console.log('Found subtopics for entry:', {
+ // threadId: entry.event.id.substring(0, 8),
+ // allTopics: entry.allTopics,
+ // entrySubtopics,
+ // dynamicTopics: dynamicTopics.allTopics.map(dt => ({ id: dt.id, isSubtopic: dt.isSubtopic }))
+ // })
+ // }
if (entrySubtopics.length > 0) {
// Group under the first subtopic found
diff --git a/src/providers/KindFilterProvider.tsx b/src/providers/KindFilterProvider.tsx
index 2ee80ad..27e8784 100644
--- a/src/providers/KindFilterProvider.tsx
+++ b/src/providers/KindFilterProvider.tsx
@@ -27,12 +27,12 @@ export function KindFilterProvider({ children }: { children: React.ReactNode })
)
// Debug logging
- console.log('KindFilterProvider initialized:', {
- defaultShowKinds,
- storedShowKinds,
- finalShowKinds: showKinds,
- showKindsLength: showKinds.length
- })
+ // console.log('KindFilterProvider initialized:', {
+ // defaultShowKinds,
+ // storedShowKinds,
+ // finalShowKinds: showKinds,
+ // showKindsLength: showKinds.length
+ // })
const updateShowKinds = (kinds: number[]) => {
storage.setShowKinds(kinds)
diff --git a/src/services/client.service.ts b/src/services/client.service.ts
index f019646..40211e0 100644
--- a/src/services/client.service.ts
+++ b/src/services/client.service.ts
@@ -59,13 +59,17 @@ class ClientService extends EventTarget {
)
private trendingNotesCache: NEvent[] | null = null
private requestThrottle = new Map() // Track request timestamps per relay
- private readonly REQUEST_COOLDOWN = 2000 // 2 second cooldown between requests to prevent "too many REQs"
+ private readonly REQUEST_COOLDOWN = 3000 // 3 second cooldown between requests to prevent "too many REQs"
private failureCount = new Map() // Track consecutive failures per relay
- private readonly MAX_FAILURES = 2 // Max failures before exponential backoff (reduced from 3)
+ private readonly MAX_FAILURES = 1 // Max failures before exponential backoff (reduced to 1 for faster circuit breaker activation)
private circuitBreaker = new Map() // Track when relays are temporarily disabled
- private readonly CIRCUIT_BREAKER_TIMEOUT = 120000 // 2 minute timeout for circuit breaker (increased)
+ private readonly CIRCUIT_BREAKER_TIMEOUT = 60000 // 60 second timeout for circuit breaker (increased for better stability)
private concurrentRequests = new Map() // Track concurrent requests per relay
- private readonly MAX_CONCURRENT_REQUESTS = 2 // Max concurrent requests per relay
+ private readonly MAX_CONCURRENT_REQUESTS = 1 // Max concurrent requests per relay (reduced to prevent "too many REQs")
+ private globalRequestThrottle = 0 // Global request throttle to prevent overwhelming all relays
+ private readonly GLOBAL_REQUEST_COOLDOWN = 1000 // 1 second global cooldown between any relay requests
+ private blacklistedRelays = new Map() // Temporarily blacklist problematic relays
+ private readonly BLACKLIST_TIMEOUT = 300000 // 5 minutes blacklist timeout
private userIndex = new FlexSearch.Index({
tokenize: 'forward'
@@ -75,6 +79,9 @@ class ClientService extends EventTarget {
super()
this.pool = new SimplePool()
this.pool.trackRelays = true
+
+ // Pre-blacklist known problematic relays
+ this.blacklistRelay('wss://freelay.sovbit.host/')
}
public static getInstance(): ClientService {
@@ -343,10 +350,21 @@ class ClientService extends EventTarget {
totalCount: uniqueRelayUrls.length
})
} else {
- reject(new Error('Publishing timeout - no relays responded in time'))
+ // Don't reject for notification updates - they're not critical
+ if (event.kind === 30078) { // Application-specific data (notifications)
+ logger.debug('Notification update timeout - non-critical, continuing')
+ resolve({
+ success: false,
+ relayStatuses,
+ successCount: 0,
+ totalCount: uniqueRelayUrls.length
+ })
+ } else {
+ reject(new Error('Publishing timeout - no relays responded in time'))
+ }
}
}
- }, 15_000) // 15 second overall timeout
+ }, 10_000) // Reduced to 10 second overall timeout
Promise.allSettled(
uniqueRelayUrls.map(async (url) => {
@@ -388,7 +406,9 @@ class ClientService extends EventTarget {
error instanceof Error &&
error.message.includes('too many concurrent REQs')
) {
- logger.debug(`⚠ Relay ${url} is overloaded, skipping retry`)
+ logger.debug(`⚠ Relay ${url} is overloaded, blacklisting temporarily`)
+ // Blacklist this relay for 5 minutes to prevent further overload
+ this.blacklistRelay(url)
errors.push({ url, error: new Error('Relay overloaded - too many concurrent requests') })
finishedCount++
@@ -555,8 +575,8 @@ class ClientService extends EventTarget {
) {
const newEventIdSet = new Set()
const requestCount = subRequests.length
- // More aggressive threshold for faster loading - respond when 1/3 of relays respond
- const threshold = Math.max(1, Math.floor(requestCount / 3))
+ // More aggressive threshold for faster loading - respond when 1/2 of relays respond (increased from 1/3)
+ const threshold = Math.max(1, Math.floor(requestCount / 2))
let eventIdSet = new Set()
let events: NEvent[] = []
let eosedCount = 0
@@ -567,9 +587,9 @@ class ClientService extends EventTarget {
if (!hasCalledOnEvents && events.length === 0) {
hasCalledOnEvents = true
onEvents([], true) // Call with empty events to stop loading
- logger.debug('Global subscription timeout - stopping after 8 seconds')
+ logger.debug('Global subscription timeout - stopping after 12 seconds')
}
- }, 8000)
+ }, 12000) // Increased timeout to 12 seconds for better reliability
const subs = await Promise.all(
subRequests.map(async ({ urls, filter }) => {
@@ -1076,6 +1096,52 @@ class ClientService extends EventTarget {
return this._fetchEvent(id)
}
+ // Force clear relay connection state to allow fresh connections
+ clearRelayConnectionState(relayUrls?: string[]) {
+ if (relayUrls) {
+ // Clear state for specific relays
+ relayUrls.forEach(url => {
+ this.failureCount.delete(url)
+ this.circuitBreaker.delete(url)
+ this.requestThrottle.delete(url)
+ this.concurrentRequests.delete(url)
+ this.blacklistedRelays.delete(url) // Also clear blacklist
+ logger.debug(`Cleared connection state for relay: ${url}`)
+ })
+ } else {
+ // Clear all relay state
+ this.failureCount.clear()
+ this.circuitBreaker.clear()
+ this.requestThrottle.clear()
+ this.concurrentRequests.clear()
+ this.blacklistedRelays.clear() // Clear blacklist
+ this.globalRequestThrottle = 0 // Reset global throttle
+ logger.debug('Cleared all relay connection state')
+ }
+ }
+
+ // Blacklist a problematic relay temporarily
+ private blacklistRelay(relayUrl: string): void {
+ this.blacklistedRelays.set(relayUrl, Date.now())
+ logger.debug(`🚫 Blacklisted problematic relay: ${relayUrl}`)
+ }
+
+ // Check if a relay is blacklisted
+ private isRelayBlacklisted(relayUrl: string): boolean {
+ const blacklistTime = this.blacklistedRelays.get(relayUrl)
+ if (!blacklistTime) return false
+
+ const now = Date.now()
+ if (now - blacklistTime > this.BLACKLIST_TIMEOUT) {
+ // Blacklist expired, remove it
+ this.blacklistedRelays.delete(relayUrl)
+ logger.debug(`🟢 Blacklist expired for relay: ${relayUrl}`)
+ return false
+ }
+
+ return true
+ }
+
async fetchTrendingNotes() {
if (this.trendingNotesCache) {
return this.trendingNotesCache
@@ -1838,6 +1904,12 @@ class ClientService extends EventTarget {
// Skip empty or invalid URLs
if (!url || typeof url !== 'string') return false
+ // Skip blacklisted relays
+ if (this.isRelayBlacklisted(url)) {
+ logger.debug(`Skipping blacklisted relay: ${url}`)
+ return false
+ }
+
// Skip relays with open circuit breaker
if (this.isCircuitBreakerOpen(url)) {
logger.debug(`Skipping relay with open circuit breaker: ${url}`)
@@ -1858,8 +1930,26 @@ class ClientService extends EventTarget {
}
})
- // Limit to 3 relays to prevent "too many concurrent REQs" errors and improve speed
- // Reduced from 4 to 3 for faster response
+ // For profile feeds, prioritize write relays to ensure user's own responses are found
+ // Check if this looks like a profile feed (relays include write relays)
+ const hasWriteRelays = validRelays.some(url =>
+ FAST_WRITE_RELAY_URLS.some(writeRelay => normalizeUrl(writeRelay) === normalizeUrl(url))
+ )
+
+ if (hasWriteRelays) {
+ // For profile feeds: prioritize write relays and allow more relays
+ const writeRelays = validRelays.filter(url =>
+ FAST_WRITE_RELAY_URLS.some(writeRelay => normalizeUrl(writeRelay) === normalizeUrl(url))
+ )
+ const otherRelays = validRelays.filter(url =>
+ !FAST_WRITE_RELAY_URLS.some(writeRelay => normalizeUrl(writeRelay) === normalizeUrl(url))
+ )
+
+ // Return write relays first, then others (up to 6 total for profile feeds - reduced from 8)
+ return [...writeRelays, ...otherRelays].slice(0, 6)
+ }
+
+ // For other feeds: limit to 3 relays to prevent "too many concurrent REQs" errors (reduced from 5)
return validRelays.slice(0, 3)
}
@@ -1871,19 +1961,26 @@ class ClientService extends EventTarget {
const failures = this.failureCount.get(relayUrl) || 0
const concurrent = this.concurrentRequests.get(relayUrl) || 0
+ // Global throttling to prevent overwhelming all relays
+ const globalDelay = Math.max(0, this.GLOBAL_REQUEST_COOLDOWN - (now - this.globalRequestThrottle))
+ if (globalDelay > 0) {
+ await new Promise(resolve => setTimeout(resolve, globalDelay))
+ }
+ this.globalRequestThrottle = Date.now()
+
// Check concurrent request limit
if (concurrent >= this.MAX_CONCURRENT_REQUESTS) {
logger.debug(`Relay ${relayUrl} has ${concurrent} concurrent requests, waiting...`)
// Wait for a concurrent request to complete
while (this.concurrentRequests.get(relayUrl) || 0 >= this.MAX_CONCURRENT_REQUESTS) {
- await new Promise(resolve => setTimeout(resolve, 1000))
+ await new Promise(resolve => setTimeout(resolve, 2000)) // Increased wait time
}
}
// Calculate delay based on failures (exponential backoff)
let delay = this.REQUEST_COOLDOWN
if (failures >= this.MAX_FAILURES) {
- delay = Math.min(this.REQUEST_COOLDOWN * Math.pow(2, failures - this.MAX_FAILURES), 30000) // Max 30 seconds
+ delay = Math.min(this.REQUEST_COOLDOWN * Math.pow(2, failures - this.MAX_FAILURES), 60000) // Max 60 seconds
} else if (now - lastRequest < this.REQUEST_COOLDOWN) {
delay = this.REQUEST_COOLDOWN - (now - lastRequest)
}
@@ -1918,8 +2015,8 @@ class ClientService extends EventTarget {
this.concurrentRequests.set(relayUrl, current - 1)
}
- // Activate circuit breaker if too many failures
- if (newFailures >= 3) {
+ // Activate circuit breaker immediately on any failure to prevent "too many concurrent REQs"
+ if (newFailures >= this.MAX_FAILURES) {
this.circuitBreaker.set(relayUrl, Date.now())
logger.debug(`🔴 Circuit breaker activated for ${relayUrl} (${newFailures} failures)`)
}
diff --git a/src/services/note-stats.service.ts b/src/services/note-stats.service.ts
index 600823a..8110e54 100644
--- a/src/services/note-stats.service.ts
+++ b/src/services/note-stats.service.ts
@@ -322,7 +322,7 @@ class NoteStatsService {
// Skip self-interactions - don't count likes from the original event author
if (originalEventAuthor && originalEventAuthor === evt.pubkey) {
- console.log('[NoteStats] Skipping self-like from', evt.pubkey, 'to event', targetEventId)
+ // console.log('[NoteStats] Skipping self-like from', evt.pubkey, 'to event', targetEventId)
return
}
@@ -371,7 +371,7 @@ class NoteStatsService {
// Skip self-interactions - don't count reposts from the original event author
if (originalEventAuthor && originalEventAuthor === evt.pubkey) {
- console.log('[NoteStats] Skipping self-repost from', evt.pubkey, 'to event', eventId)
+ // console.log('[NoteStats] Skipping self-repost from', evt.pubkey, 'to event', eventId)
return
}
@@ -389,7 +389,7 @@ class NoteStatsService {
// Skip self-interactions - don't count zaps from the original event author
if (originalEventAuthor && originalEventAuthor === senderPubkey) {
- console.log('[NoteStats] Skipping self-zap from', senderPubkey, 'to event', originalEventId)
+ // console.log('[NoteStats] Skipping self-zap from', senderPubkey, 'to event', originalEventId)
return
}
@@ -490,7 +490,7 @@ class NoteStatsService {
// Skip self-interactions - don't count quotes from the original event author
if (originalEventAuthor && originalEventAuthor === evt.pubkey) {
- console.log('[NoteStats] Skipping self-quote from', evt.pubkey, 'to event', quotedEventId)
+ // console.log('[NoteStats] Skipping self-quote from', evt.pubkey, 'to event', quotedEventId)
return
}
@@ -513,7 +513,7 @@ class NoteStatsService {
// Skip self-interactions - don't count highlights from the original event author
if (originalEventAuthor && originalEventAuthor === evt.pubkey) {
- console.log('[NoteStats] Skipping self-highlight from', evt.pubkey, 'to event', highlightedEventId)
+ // console.log('[NoteStats] Skipping self-highlight from', evt.pubkey, 'to event', highlightedEventId)
return
}
diff --git a/vite.config.ts b/vite.config.ts
index 2c2b52d..4a45490 100644
--- a/vite.config.ts
+++ b/vite.config.ts
@@ -42,10 +42,50 @@ export default defineConfig({
globPatterns: ['**/*.{js,css,html,png,jpg,svg}'],
globDirectory: 'dist/',
maximumFileSizeToCacheInBytes: 5 * 1024 * 1024,
- cleanupOutdatedCaches: true
+ cleanupOutdatedCaches: true,
+ skipWaiting: true,
+ clientsClaim: true,
+ navigateFallback: '/index.html',
+ navigateFallbackDenylist: [/^\/api\//, /^\/_/, /^\/admin/],
+ runtimeCaching: [
+ {
+ urlPattern: /^https:\/\/image\.nostr\.build\/.*/i,
+ handler: 'CacheFirst',
+ options: {
+ cacheName: 'nostr-images',
+ expiration: {
+ maxEntries: 100,
+ maxAgeSeconds: 30 * 24 * 60 * 60 // 30 days
+ }
+ }
+ },
+ {
+ urlPattern: /^https:\/\/cdn\.satellite\.earth\/.*/i,
+ handler: 'CacheFirst',
+ options: {
+ cacheName: 'satellite-images',
+ expiration: {
+ maxEntries: 100,
+ maxAgeSeconds: 30 * 24 * 60 * 60 // 30 days
+ }
+ }
+ },
+ {
+ urlPattern: /^https:\/\/.*\.(?:png|jpg|jpeg|svg|gif|webp)$/i,
+ handler: 'CacheFirst',
+ options: {
+ cacheName: 'external-images',
+ expiration: {
+ maxEntries: 200,
+ maxAgeSeconds: 7 * 24 * 60 * 60 // 7 days
+ }
+ }
+ }
+ ]
},
devOptions: {
- enabled: true
+ enabled: true,
+ type: 'module'
},
manifest: {
name: 'Jumble',