diff --git a/package-lock.json b/package-lock.json
index 6726d14..af5ed65 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "jumble-imwald",
- "version": "13.0",
+ "version": "13.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "jumble-imwald",
- "version": "13.0",
+ "version": "13.1",
"license": "MIT",
"dependencies": {
"@asciidoctor/core": "^3.0.4",
diff --git a/package.json b/package.json
index 205196f..569c59a 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "jumble-imwald",
- "version": "13.0",
+ "version": "13.1",
"description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble",
"private": true,
"type": "module",
diff --git a/src/components/CacheRelaysSetting/index.tsx b/src/components/CacheRelaysSetting/index.tsx
index 0518ec1..4a6a252 100644
--- a/src/components/CacheRelaysSetting/index.tsx
+++ b/src/components/CacheRelaysSetting/index.tsx
@@ -404,8 +404,22 @@ export default function CacheRelaysSetting() {
}
// Check if an event is invalid
- const isInvalidEvent = useCallback((item: { key: string; value: any; addedAt: number }): boolean => {
- if (!item || !item.value) return true
+ const isInvalidEvent = useCallback((item: { key: string; value: any; addedAt: number }, storeName?: string | null): boolean => {
+ if (!item) return true
+
+ // RSS feed items are not Nostr events, so skip validation for that store
+ // Handle both old format (with item property) and new format (with value property)
+ if (storeName === 'rssFeedItems') {
+ // Old format has item property, new format has value property - both are valid for RSS items
+ if (item.value || (item as any).item) {
+ return false
+ }
+ // If neither exists, it's invalid
+ return true
+ }
+
+ // For other stores, check if value exists
+ if (!item.value) return true
const event = item.value as Event
// Check for required Nostr event fields
@@ -690,7 +704,7 @@ export default function CacheRelaysSetting() {
) : (
filteredStoreItems.map((item, index) => {
const nestedCount = (item as any).nestedCount
- const invalid = isInvalidEvent(item)
+ const invalid = isInvalidEvent(item, selectedStore)
const invalidExplanation = invalid ? getInvalidEventExplanation(item) : ''
return (
@@ -870,7 +884,7 @@ export default function CacheRelaysSetting() {
) : (
filteredStoreItems.map((item, index) => {
const nestedCount = (item as any).nestedCount
- const invalid = isInvalidEvent(item)
+ const invalid = isInvalidEvent(item, selectedStore)
const invalidExplanation = invalid ? getInvalidEventExplanation(item) : ''
return (
diff --git a/src/components/NormalFeed/index.tsx b/src/components/NormalFeed/index.tsx
index 4c1b01f..412ff3c 100644
--- a/src/components/NormalFeed/index.tsx
+++ b/src/components/NormalFeed/index.tsx
@@ -10,6 +10,9 @@ import { forwardRef, useMemo, useRef, useState, useEffect } from 'react'
import KindFilter from '../KindFilter'
import { RefreshButton } from '../RefreshButton'
import RssFeedList from '../RssFeedList'
+import { useNostr } from '@/providers/NostrProvider'
+import rssFeedService from '@/services/rss-feed.service'
+import { DEFAULT_RSS_FEEDS } from '@/constants'
const NormalFeed = forwardRef
storage.getShowRssFeed())
const [activeTab, setActiveTab] = useState(listMode)
+ const [rssRefreshKey, setRssRefreshKey] = useState(0)
+ const { pubkey, rssFeedListEvent } = useNostr()
// Sync activeTab with listMode when listMode changes (but not when switching to RSS)
useEffect(() => {
@@ -121,11 +126,50 @@ const NormalFeed = forwardRef}
>
- ) : null
+ ) : (
+ <>
+ {!supportTouch && {
+ // Get feed URLs from event or use default
+ let feedUrls: string[] = DEFAULT_RSS_FEEDS
+ if (pubkey && rssFeedListEvent) {
+ try {
+ const urls = rssFeedListEvent.tags
+ .filter(tag => tag[0] === 'u' && tag[1])
+ .map(tag => tag[1] as string)
+ .filter((url): url is string => {
+ if (typeof url !== 'string') return false
+ const trimmed = url.trim()
+ return trimmed.length > 0
+ })
+ if (urls.length > 0) {
+ feedUrls = urls
+ }
+ } catch (e) {
+ // Use default feeds on error
+ }
+ }
+
+ // Trigger background refresh and UI update
+ logger.info('[NormalFeed] Manual refresh: triggering background refresh', { feedCount: feedUrls.length })
+ // Start background refresh (don't wait for it)
+ rssFeedService.backgroundRefreshFeeds(feedUrls).catch(err => {
+ logger.error('[NormalFeed] Manual refresh: background refresh failed', { error: err })
+ })
+ // Immediately trigger UI update (will show cached items, then update when background refresh completes)
+ if (pubkey) {
+ window.dispatchEvent(new CustomEvent('rssFeedListUpdated', {
+ detail: { pubkey, feedUrls, eventId: 'manual-refresh' }
+ }))
+ }
+ // Also force re-render by updating key
+ setRssRefreshKey(prev => prev + 1)
+ }} />}
+ >
+ )
}
/>
{activeTab === 'rss' ? (
-
+
) : (
+
{
@@ -3064,14 +3065,15 @@ export default function MarkdownArticle({
}
}
const displayUrl = thumbnailUrl || media.url
+ const hasThumbnail = !!thumbnailUrl
return (
-
+
{
diff --git a/src/components/RssFeedItem/index.tsx b/src/components/RssFeedItem/index.tsx
index f4facda..bc80a98 100644
--- a/src/components/RssFeedItem/index.tsx
+++ b/src/components/RssFeedItem/index.tsx
@@ -398,29 +398,33 @@ export default function RssFeedItem({ item, className }: { item: TRssFeedItem; c
{item.media
.filter(m => m.type?.startsWith('image/') || !m.type || m.type === 'image')
- .map((media, index) => (
-
-

{
- e.stopPropagation()
- // Open image in new tab
- window.open(media.url, '_blank', 'noopener,noreferrer')
- }}
- onError={(e) => {
- // Hide image on error
- e.currentTarget.style.display = 'none'
- }}
- />
- {media.credit && (
-
- {t('Photo')}: {media.credit}
-
- )}
-
- ))}
+ .map((media, index) => {
+ const hasThumbnail = !!media.thumbnail
+ const imageUrl = media.thumbnail || media.url
+ return (
+
+

{
+ e.stopPropagation()
+ // Open full image in new tab
+ window.open(media.url, '_blank', 'noopener,noreferrer')
+ }}
+ onError={(e) => {
+ // Hide image on error
+ e.currentTarget.style.display = 'none'
+ }}
+ />
+ {media.credit && (
+
+ {t('Photo')}: {media.credit}
+
+ )}
+
+ )
+ })}
)}
diff --git a/src/components/RssFeedList/index.tsx b/src/components/RssFeedList/index.tsx
index 27d05ab..98e6ba6 100644
--- a/src/components/RssFeedList/index.tsx
+++ b/src/components/RssFeedList/index.tsx
@@ -13,22 +13,52 @@ export default function RssFeedList() {
const [items, setItems] = useState([])
const [loading, setLoading] = useState(true)
const [error, setError] = useState(null)
+ const [refreshing, setRefreshing] = useState(false)
useEffect(() => {
// Create AbortController for this effect
- const abortController = new AbortController()
+ let abortController = new AbortController()
let isMounted = true
let isLoading = false
+ let timeoutId: NodeJS.Timeout | null = null
+
+ const loadRssFeeds = async (forceNewController = false) => {
+ // If forced, create a new controller (for manual refreshes)
+ if (forceNewController) {
+ abortController.abort() // Abort old one
+ abortController = new AbortController()
+ }
- const loadRssFeeds = async () => {
// Check if already aborted or if a load is already in progress
if (abortController.signal.aborted || isLoading) {
+ logger.debug('[RssFeedList] Skipping load - already aborted or loading', {
+ aborted: abortController.signal.aborted,
+ isLoading
+ })
return
}
+ // Clear any existing timeout
+ if (timeoutId) {
+ clearTimeout(timeoutId)
+ timeoutId = null
+ }
+
isLoading = true
setLoading(true)
setError(null)
+
+ // Set a timeout to prevent infinite loading (30 seconds)
+ timeoutId = setTimeout(() => {
+ if (isMounted && isLoading) {
+ logger.warn('[RssFeedList] Feed loading timeout - aborting and showing partial results')
+ abortController.abort()
+ isLoading = false
+ if (isMounted) {
+ setLoading(false)
+ }
+ }
+ }, 30000)
try {
// Get feed URLs from event or use default
@@ -72,6 +102,12 @@ export default function RssFeedList() {
}
} else if (pubkey) {
logger.info('[RssFeedList] No RSS feed list event in context, using default feeds')
+ // Trigger background refresh for default feeds when no event exists
+ rssFeedService.backgroundRefreshFeeds(feedUrls, abortController.signal).catch(err => {
+ if (!(err instanceof DOMException && err.name === 'AbortError')) {
+ logger.error('[RssFeedList] Background refresh of default feeds failed', { error: err })
+ }
+ })
}
// Check if aborted before fetching
@@ -79,11 +115,19 @@ export default function RssFeedList() {
return
}
- // Fetch and merge feeds (this handles errors gracefully and returns partial results)
+ // Fetch and merge feeds (cache-first: returns cached items immediately, background-refreshes)
+ // Show refreshing indicator (background refresh will run in background, or we'll wait if cache is empty)
+ if (isMounted) {
+ setRefreshing(true)
+ }
+
const fetchedItems = await rssFeedService.fetchMultipleFeeds(feedUrls, abortController.signal)
// Check if aborted after fetching
if (abortController.signal.aborted || !isMounted) {
+ if (isMounted) {
+ setRefreshing(false)
+ }
return
}
@@ -94,6 +138,40 @@ export default function RssFeedList() {
}
setItems(fetchedItems)
+
+ // Set up a listener for cache updates (background refresh may add new items)
+ // Re-check cache after a delay to see if background refresh added items
+ const checkForUpdates = async () => {
+ if (abortController.signal.aborted || !isMounted) {
+ if (isMounted) {
+ setRefreshing(false)
+ }
+ return
+ }
+
+ try {
+ const updatedItems = await rssFeedService.fetchMultipleFeeds(feedUrls, abortController.signal)
+ if (!abortController.signal.aborted && isMounted) {
+ setRefreshing(false)
+ if (updatedItems.length > fetchedItems.length) {
+ // New items were added by background refresh
+ setItems(updatedItems)
+ logger.info('[RssFeedList] Updated items from background refresh', {
+ previousCount: fetchedItems.length,
+ newCount: updatedItems.length
+ })
+ }
+ }
+ } catch (err) {
+ if (isMounted) {
+ setRefreshing(false)
+ }
+ // Ignore errors in update check
+ }
+ }
+
+ // Check for updates after 5 seconds (background refresh should be done by then)
+ setTimeout(checkForUpdates, 5000)
} catch (err) {
// Don't handle abort errors - they're expected during cleanup
if (err instanceof DOMException && err.name === 'AbortError') {
@@ -116,9 +194,17 @@ export default function RssFeedList() {
}
} finally {
isLoading = false
+ if (timeoutId) {
+ clearTimeout(timeoutId)
+ timeoutId = null
+ }
// Only update loading state if still mounted
if (isMounted) {
setLoading(false)
+ // If we had no cached items, background refresh was awaited, so stop refreshing indicator
+ if (items.length === 0) {
+ setRefreshing(false)
+ }
}
}
}
@@ -134,7 +220,16 @@ export default function RssFeedList() {
eventId: detail.eventId,
feedCount: detail.feedUrls.length
})
- loadRssFeeds()
+
+ // For manual refresh, show refreshing indicator
+ if (detail.eventId === 'manual-refresh' && isMounted) {
+ setRefreshing(true)
+ }
+
+ // For manual refresh, the background refresh is already triggered by the button
+ // Just reload to show updated items (background refresh will update cache in the background)
+ // For other updates (like event changes), also just reload
+ loadRssFeeds(true)
}
}
@@ -143,7 +238,11 @@ export default function RssFeedList() {
return () => {
isMounted = false
isLoading = false
- abortController.abort() // Cancel all in-flight requests
+ if (timeoutId) {
+ clearTimeout(timeoutId)
+ }
+ // Abort any in-flight requests
+ abortController.abort()
window.removeEventListener('rssFeedListUpdated', handleRssFeedListUpdate as EventListener)
}
}, [pubkey, rssFeedListEvent, t])
@@ -176,6 +275,12 @@ export default function RssFeedList() {
return (
+ {refreshing && (
+
+
+ {t('Refreshing feeds...')}
+
+ )}
{items.map((item) => (
))}
diff --git a/src/pages/secondary/RssFeedSettingsPage/index.tsx b/src/pages/secondary/RssFeedSettingsPage/index.tsx
index 17ec7ca..760c28b 100644
--- a/src/pages/secondary/RssFeedSettingsPage/index.tsx
+++ b/src/pages/secondary/RssFeedSettingsPage/index.tsx
@@ -13,10 +13,11 @@ import { CloudUpload, Loader, Trash2, Plus } from 'lucide-react'
import logger from '@/lib/logger'
import { ExtendedKind } from '@/constants'
import indexedDb from '@/services/indexed-db.service'
+import rssFeedService from '@/services/rss-feed.service'
const RssFeedSettingsPage = forwardRef(({ index, hideTitlebar = false }: { index?: number; hideTitlebar?: boolean }, ref) => {
const { t } = useTranslation()
- const { pubkey, publish, checkLogin, rssFeedListEvent } = useNostr()
+ const { pubkey, publish, checkLogin, rssFeedListEvent, updateRssFeedListEvent } = useNostr()
const [feedUrls, setFeedUrls] = useState
([])
const [newFeedUrl, setNewFeedUrl] = useState('')
const [showRssFeed, setShowRssFeed] = useState(true)
@@ -262,11 +263,20 @@ const RssFeedSettingsPage = forwardRef(({ index, hideTitlebar = false }: { index
})
}
+ // Update the context with the new event
+ await updateRssFeedListEvent(result)
+
// Dispatch custom event to notify other components (like RssFeedList) to refresh
window.dispatchEvent(new CustomEvent('rssFeedListUpdated', {
detail: { pubkey, feedUrls, eventId: result.id }
}))
+ // Trigger background refresh of feeds (don't wait for it)
+ logger.info('[RssFeedSettingsPage] Triggering background refresh of RSS feeds', { feedCount: feedUrls.length })
+ rssFeedService.backgroundRefreshFeeds(feedUrls).catch(err => {
+ logger.error('[RssFeedSettingsPage] Background refresh failed', { error: err })
+ })
+
// Read relayStatuses immediately before it might be deleted
const relayStatuses = (result as any).relayStatuses
logger.info('[RssFeedSettingsPage] Publishing complete', {
diff --git a/src/providers/NostrProvider/index.tsx b/src/providers/NostrProvider/index.tsx
index 128954d..45b0f58 100644
--- a/src/providers/NostrProvider/index.tsx
+++ b/src/providers/NostrProvider/index.tsx
@@ -94,6 +94,7 @@ type TNostrContext = {
updateInterestListEvent: (interestListEvent: Event) => Promise
updateFavoriteRelaysEvent: (favoriteRelaysEvent: Event) => Promise
updateBlockedRelaysEvent: (blockedRelaysEvent: Event) => Promise
+ updateRssFeedListEvent: (rssFeedListEvent: Event) => Promise
updateNotificationsSeenAt: (skipPublish?: boolean) => Promise
}
@@ -1094,6 +1095,13 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
setBlockedRelaysEvent(newBlockedRelaysEvent)
}
+ const updateRssFeedListEvent = async (rssFeedListEvent: Event) => {
+ const newRssFeedListEvent = await indexedDb.putReplaceableEvent(rssFeedListEvent)
+ if (newRssFeedListEvent.id !== rssFeedListEvent.id) return
+
+ setRssFeedListEvent(newRssFeedListEvent)
+ }
+
const updateNotificationsSeenAt = async (skipPublish = false) => {
if (!account) return
@@ -1163,6 +1171,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
updateInterestListEvent,
updateFavoriteRelaysEvent,
updateBlockedRelaysEvent,
+ updateRssFeedListEvent,
updateNotificationsSeenAt
}}
>
diff --git a/src/services/indexed-db.service.ts b/src/services/indexed-db.service.ts
index e14033a..b23c262 100644
--- a/src/services/indexed-db.service.ts
+++ b/src/services/indexed-db.service.ts
@@ -28,6 +28,7 @@ const StoreNames = {
BLOCKED_RELAYS_EVENTS: 'blockedRelaysEvents',
CACHE_RELAYS_EVENTS: 'cacheRelaysEvents',
RSS_FEED_LIST_EVENTS: 'rssFeedListEvents',
+ RSS_FEED_ITEMS: 'rssFeedItems',
RELAY_SETS: 'relaySets',
FOLLOWING_FAVORITE_RELAYS: 'followingFavoriteRelays',
RELAY_INFOS: 'relayInfos',
@@ -51,7 +52,7 @@ class IndexedDbService {
init(): Promise {
if (!this.initPromise) {
this.initPromise = new Promise((resolve, reject) => {
- const request = window.indexedDB.open('jumble', 16)
+ const request = window.indexedDB.open('jumble', 17)
request.onerror = (event) => {
reject(event)
@@ -124,6 +125,11 @@ class IndexedDbService {
if (!db.objectStoreNames.contains(StoreNames.RSS_FEED_LIST_EVENTS)) {
db.createObjectStore(StoreNames.RSS_FEED_LIST_EVENTS, { keyPath: 'key' })
}
+ if (!db.objectStoreNames.contains(StoreNames.RSS_FEED_ITEMS)) {
+ const store = db.createObjectStore(StoreNames.RSS_FEED_ITEMS, { keyPath: 'key' })
+ store.createIndex('feedUrl', 'feedUrl', { unique: false })
+ store.createIndex('pubDate', 'pubDate', { unique: false })
+ }
}
})
setTimeout(() => this.cleanUp(), 1000 * 60) // 1 minute
@@ -1301,6 +1307,139 @@ class IndexedDbService {
})
)
}
+
+ /**
+ * Store RSS feed items in IndexedDB
+ */
+ async putRssFeedItems(items: import('./rss-feed.service').RssFeedItem[]): Promise {
+ await this.initPromise
+ const storeName = StoreNames.RSS_FEED_ITEMS
+
+ if (!this.db || !this.db.objectStoreNames.contains(storeName)) {
+ logger.warn('[IndexedDB] RSS feed items store not found', { storeName })
+ return
+ }
+
+ return new Promise((resolve) => {
+ const transaction = this.db!.transaction(storeName, 'readwrite')
+ const store = transaction.objectStore(storeName)
+
+ let completed = 0
+ let errors = 0
+
+ items.forEach((item) => {
+ // Create a unique key from feedUrl and guid
+ const key = `${item.feedUrl}:${item.guid}`
+ // Store in TValue format for consistency with other stores
+ const value: TValue = {
+ key,
+ value: item,
+ addedAt: Date.now()
+ }
+
+ const request = store.put(value)
+ request.onsuccess = () => {
+ completed++
+ if (completed + errors === items.length) {
+ resolve()
+ }
+ }
+ request.onerror = () => {
+ errors++
+ if (completed + errors === items.length) {
+ resolve() // Don't reject, just log
+ }
+ }
+ })
+
+ if (items.length === 0) {
+ resolve()
+ }
+ })
+ }
+
+ /**
+ * Get all RSS feed items from IndexedDB
+ */
+ async getRssFeedItems(): Promise {
+ await this.initPromise
+ const storeName = StoreNames.RSS_FEED_ITEMS
+
+ if (!this.db || !this.db.objectStoreNames.contains(storeName)) {
+ logger.warn('[IndexedDB] RSS feed items store not found', { storeName })
+ return []
+ }
+
+ return new Promise((resolve, reject) => {
+ const transaction = this.db!.transaction(storeName, 'readonly')
+ const store = transaction.objectStore(storeName)
+ const request = store.getAll()
+
+ request.onsuccess = () => {
+ const items = request.result.map((entry: TValue | any) => {
+ let item: import('./rss-feed.service').RssFeedItem | null = null
+
+ // Handle new format (with value property)
+ if (entry.value) {
+ item = entry.value
+ }
+ // Fallback for old format (with item property)
+ else if ((entry as any).item) {
+ item = (entry as any).item as import('./rss-feed.service').RssFeedItem
+ }
+
+ if (!item) {
+ return null
+ }
+
+ // Ensure pubDate is properly handled (IndexedDB may serialize Date as string)
+ if (item.pubDate && typeof item.pubDate === 'string') {
+ item.pubDate = new Date(item.pubDate)
+ } else if (item.pubDate && typeof item.pubDate === 'number') {
+ item.pubDate = new Date(item.pubDate)
+ }
+
+ return item
+ }).filter((item): item is import('./rss-feed.service').RssFeedItem => item !== null)
+
+ logger.debug('[IndexedDB] Retrieved RSS feed items', {
+ totalRetrieved: request.result.length,
+ validItems: items.length
+ })
+ resolve(items)
+ }
+
+ request.onerror = () => {
+ reject(request.error)
+ }
+ })
+ }
+
+ /**
+ * Clear RSS feed items from IndexedDB
+ */
+ async clearRssFeedItems(): Promise {
+ await this.initPromise
+ const storeName = StoreNames.RSS_FEED_ITEMS
+
+ if (!this.db || !this.db.objectStoreNames.contains(storeName)) {
+ return
+ }
+
+ return new Promise((resolve, reject) => {
+ const transaction = this.db!.transaction(storeName, 'readwrite')
+ const store = transaction.objectStore(storeName)
+ const request = store.clear()
+
+ request.onsuccess = () => {
+ resolve()
+ }
+
+ request.onerror = () => {
+ reject(request.error)
+ }
+ })
+ }
}
const instance = IndexedDbService.getInstance()
diff --git a/src/services/rss-feed.service.ts b/src/services/rss-feed.service.ts
index 43db843..44d2fb4 100644
--- a/src/services/rss-feed.service.ts
+++ b/src/services/rss-feed.service.ts
@@ -1,5 +1,6 @@
import { DEFAULT_RSS_FEEDS } from '@/constants'
import logger from '@/lib/logger'
+import indexedDb from '@/services/indexed-db.service'
export interface RssFeedItemMedia {
url: string
@@ -55,6 +56,7 @@ class RssFeedService {
static instance: RssFeedService
private feedCache: Map = new Map()
private readonly CACHE_DURATION = 5 * 60 * 1000 // 5 minutes
+ private backgroundRefreshController: AbortController | null = null
constructor() {
if (!RssFeedService.instance) {
@@ -925,7 +927,7 @@ class RssFeedService {
/**
* Fetch multiple feeds and merge items
- * This method gracefully handles failures - if some feeds fail, it returns items from successful feeds
+ * Cache-first: reads from IndexedDB, displays immediately, then background-refreshes to merge new items
*/
async fetchMultipleFeeds(feedUrls: string[], signal?: AbortSignal): Promise {
if (feedUrls.length === 0) {
@@ -937,70 +939,345 @@ class RssFeedService {
throw new DOMException('The operation was aborted.', 'AbortError')
}
- const results = await Promise.allSettled(
- feedUrls.map(url => this.fetchFeed(url, signal))
- )
-
- // Check if aborted after fetching
- if (signal?.aborted) {
- throw new DOMException('The operation was aborted.', 'AbortError')
+ // Step 1: Read from IndexedDB cache first (cache-first strategy)
+ let cachedItems: RssFeedItem[] = []
+ try {
+ const allCachedItems = await indexedDb.getRssFeedItems()
+ logger.info('[RssFeedService] Retrieved all cached items from IndexedDB', {
+ totalCached: allCachedItems.length
+ })
+
+ // Filter to only items from the requested feeds
+ // Normalize URLs for comparison (remove trailing slashes, ensure consistent format)
+ const normalizeUrl = (url: string) => url.trim().replace(/\/$/, '')
+ const normalizedRequestedUrls = new Set(feedUrls.map(normalizeUrl))
+
+ cachedItems = allCachedItems.filter(item => {
+ const normalizedItemUrl = normalizeUrl(item.feedUrl)
+ const matches = normalizedRequestedUrls.has(normalizedItemUrl)
+ if (!matches && allCachedItems.length > 0 && allCachedItems.length < 10) {
+ // Only log for small sets to avoid spam
+ logger.debug('[RssFeedService] Item filtered out (feed URL not in requested list)', {
+ itemFeedUrl: item.feedUrl,
+ normalizedItemUrl,
+ requestedFeeds: feedUrls,
+ normalizedRequestedUrls: Array.from(normalizedRequestedUrls),
+ itemGuid: item.guid?.substring(0, 20)
+ })
+ }
+ return matches
+ })
+
+ logger.info('[RssFeedService] Filtered cached items by feed URLs', {
+ beforeFilter: allCachedItems.length,
+ afterFilter: cachedItems.length,
+ requestedFeedCount: feedUrls.length,
+ uniqueCachedFeedUrls: [...new Set(allCachedItems.map(i => i.feedUrl))],
+ requestedFeedUrls: feedUrls
+ })
+
+ // Convert pubDate back to Date objects (handle both Date objects and timestamps/strings)
+ cachedItems = cachedItems.map(item => {
+ let pubDate: Date | null = null
+ if (item.pubDate) {
+ if (item.pubDate instanceof Date) {
+ pubDate = item.pubDate
+ } else if (typeof item.pubDate === 'number') {
+ pubDate = new Date(item.pubDate)
+ } else if (typeof item.pubDate === 'string') {
+ pubDate = new Date(item.pubDate)
+ }
+ }
+ return {
+ ...item,
+ pubDate
+ }
+ })
+
+ logger.info('[RssFeedService] Loaded cached items from IndexedDB', {
+ cachedCount: cachedItems.length,
+ feedCount: feedUrls.length,
+ filteredCount: cachedItems.length,
+ feedUrls: feedUrls
+ })
+ } catch (error) {
+ logger.warn('[RssFeedService] Failed to load cached items from IndexedDB', { error })
}
- const allItems: RssFeedItem[] = []
- let successCount = 0
- let failureCount = 0
- let abortCount = 0
+ const cacheWasEmpty = cachedItems.length === 0
- results.forEach((result, index) => {
- if (result.status === 'fulfilled') {
- allItems.push(...result.value.items)
- successCount++
- logger.debug('[RssFeedService] Successfully fetched feed', { url: feedUrls[index], itemCount: result.value.items.length })
- } else {
- failureCount++
- const error = result.reason
- // Don't log abort errors - they're expected during cleanup
- if (error instanceof DOMException && error.name === 'AbortError') {
- abortCount++
- // Silently skip aborted requests
+ // Step 2: Background refresh to merge new items
+ // If cache is empty, we'll wait a bit for the refresh to complete
+ const backgroundRefresh = async () => {
+ if (signal?.aborted) {
+ return
+ }
+
+ try {
+ const results = await Promise.allSettled(
+ feedUrls.map(url => this.fetchFeed(url, signal))
+ )
+
+ if (signal?.aborted) {
return
}
- // Log warning but don't throw - we want to return partial results
- const errorMessage = error instanceof Error ? error.message : String(error)
- logger.warn('[RssFeedService] Failed to fetch feed after trying all strategies', {
- url: feedUrls[index],
- error: errorMessage
+
+ const newItems: RssFeedItem[] = []
+ let successCount = 0
+ let failureCount = 0
+ let abortCount = 0
+
+ results.forEach((result, index) => {
+ if (result.status === 'fulfilled') {
+ newItems.push(...result.value.items)
+ successCount++
+ logger.debug('[RssFeedService] Successfully fetched feed', { url: feedUrls[index], itemCount: result.value.items.length })
+ } else {
+ failureCount++
+ const error = result.reason
+ if (error instanceof DOMException && error.name === 'AbortError') {
+ abortCount++
+ return
+ }
+ const errorMessage = error instanceof Error ? error.message : String(error)
+ logger.warn('[RssFeedService] Failed to fetch feed after trying all strategies', {
+ url: feedUrls[index],
+ error: errorMessage
+ })
+ }
})
+
+ if (!signal?.aborted && successCount > 0) {
+ // Merge new items with cached items (deduplicate by feedUrl:guid)
+ const itemMap = new Map()
+
+ // Add cached items first
+ cachedItems.forEach(item => {
+ const key = `${item.feedUrl}:${item.guid}`
+ itemMap.set(key, item)
+ })
+
+ // Add/update with new items (newer items replace older ones)
+ newItems.forEach(item => {
+ const key = `${item.feedUrl}:${item.guid}`
+ const existing = itemMap.get(key)
+ // Keep the newer item, or add if it doesn't exist
+ if (!existing || (item.pubDate && existing.pubDate && item.pubDate > existing.pubDate)) {
+ itemMap.set(key, item)
+ }
+ })
+
+ const mergedItems = Array.from(itemMap.values())
+
+ // Sort by publication date (newest first)
+ mergedItems.sort((a, b) => {
+ const dateA = a.pubDate?.getTime() || 0
+ const dateB = b.pubDate?.getTime() || 0
+ return dateB - dateA
+ })
+
+ // Write merged items back to IndexedDB
+ try {
+ await indexedDb.putRssFeedItems(mergedItems)
+ logger.info('[RssFeedService] Updated IndexedDB cache with merged items', {
+ totalItems: mergedItems.length,
+ newItems: newItems.length,
+ cachedItems: cachedItems.length
+ })
+ } catch (error) {
+ logger.error('[RssFeedService] Failed to update IndexedDB cache', { error })
+ }
+ }
+ } catch (error) {
+ if (!(error instanceof DOMException && error.name === 'AbortError')) {
+ logger.error('[RssFeedService] Background refresh failed', { error })
+ }
}
- })
+ }
- // Log summary (only if not aborted)
- if (!signal?.aborted) {
- if (successCount > 0) {
- logger.info('[RssFeedService] Feed fetch summary', {
- total: feedUrls.length,
- successful: successCount,
- failed: failureCount - abortCount, // Don't count aborts as failures
- aborted: abortCount,
- itemsFound: allItems.length
- })
- } else if (failureCount > abortCount) {
- // Only log error if there were actual failures (not just aborts)
- logger.error('[RssFeedService] All feeds failed to fetch', {
- total: feedUrls.length,
- urls: feedUrls
- })
+ // If cache is empty, wait a bit for background refresh to populate it
+ if (cacheWasEmpty) {
+ logger.info('[RssFeedService] Cache is empty, waiting for background refresh to complete', { feedCount: feedUrls.length })
+ try {
+ // Wait up to 10 seconds for background refresh to complete
+ await Promise.race([
+ backgroundRefresh(),
+ new Promise(resolve => setTimeout(resolve, 10000))
+ ])
+
+ // Re-read from cache after background refresh
+ try {
+ const refreshedItems = await indexedDb.getRssFeedItems()
+ const feedUrlSet = new Set(feedUrls)
+ cachedItems = refreshedItems
+ .filter(item => feedUrlSet.has(item.feedUrl))
+ .map(item => ({
+ ...item,
+ pubDate: item.pubDate ? new Date(item.pubDate) : null
+ }))
+
+ logger.info('[RssFeedService] Loaded items after background refresh', {
+ itemCount: cachedItems.length,
+ feedCount: feedUrls.length
+ })
+ } catch (error) {
+ logger.warn('[RssFeedService] Failed to reload cached items after background refresh', { error })
+ }
+ } catch (error) {
+ if (!(error instanceof DOMException && error.name === 'AbortError')) {
+ logger.error('[RssFeedService] Background refresh error during initial load', { error })
+ }
}
+ } else {
+ // Cache has items, start background refresh in background (don't wait)
+ backgroundRefresh().catch(err => {
+ if (!(err instanceof DOMException && err.name === 'AbortError')) {
+ logger.error('[RssFeedService] Background refresh error', { error: err })
+ }
+ })
}
+ // Return cached items (now potentially updated from background refresh)
// Sort by publication date (newest first)
- allItems.sort((a, b) => {
+ cachedItems.sort((a, b) => {
const dateA = a.pubDate?.getTime() || 0
const dateB = b.pubDate?.getTime() || 0
return dateB - dateA
})
- return allItems
+ return cachedItems
+ }
+
+ /**
+ * Trigger a background refresh for specific feed URLs (without returning cached items)
+ * This is useful when you want to force a refresh after updating the feed list
+ * Aborts any existing background refresh before starting a new one
+ */
+ async backgroundRefreshFeeds(feedUrls: string[], signal?: AbortSignal): Promise {
+ if (feedUrls.length === 0) {
+ return
+ }
+
+ // Abort any existing background refresh
+ if (this.backgroundRefreshController) {
+ logger.info('[RssFeedService] Aborting existing background refresh before starting new one')
+ this.backgroundRefreshController.abort()
+ this.backgroundRefreshController = null
+ }
+
+ // Create a new AbortController for this refresh
+ const controller = new AbortController()
+ this.backgroundRefreshController = controller
+
+ // Combine with external signal if provided
+ if (signal) {
+ if (signal.aborted) {
+ controller.abort()
+ this.backgroundRefreshController = null
+ return
+ }
+ signal.addEventListener('abort', () => {
+ controller.abort()
+ this.backgroundRefreshController = null
+ }, { once: true })
+ }
+
+ const combinedSignal = signal ? (() => {
+ const combined = new AbortController()
+ const abort = () => combined.abort()
+ signal.addEventListener('abort', abort, { once: true })
+ controller.signal.addEventListener('abort', abort, { once: true })
+ return combined.signal
+ })() : controller.signal
+
+ try {
+ const results = await Promise.allSettled(
+ feedUrls.map(url => this.fetchFeed(url, combinedSignal))
+ )
+
+ if (combinedSignal.aborted || controller.signal.aborted) {
+ this.backgroundRefreshController = null
+ return
+ }
+
+ const newItems: RssFeedItem[] = []
+ let successCount = 0
+
+ results.forEach((result, index) => {
+ if (result.status === 'fulfilled') {
+ newItems.push(...result.value.items)
+ successCount++
+ logger.debug('[RssFeedService] Background refresh: successfully fetched feed', {
+ url: feedUrls[index],
+ itemCount: result.value.items.length
+ })
+ }
+ })
+
+ if (!combinedSignal.aborted && !controller.signal.aborted && successCount > 0) {
+ // Get existing cached items
+ let cachedItems: RssFeedItem[] = []
+ try {
+ cachedItems = await indexedDb.getRssFeedItems()
+ const feedUrlSet = new Set(feedUrls)
+ cachedItems = cachedItems.filter(item => feedUrlSet.has(item.feedUrl))
+ cachedItems = cachedItems.map(item => ({
+ ...item,
+ pubDate: item.pubDate ? new Date(item.pubDate) : null
+ }))
+ } catch (error) {
+ logger.warn('[RssFeedService] Failed to load cached items for background refresh', { error })
+ }
+
+ // Merge new items with cached items (deduplicate by feedUrl:guid)
+ const itemMap = new Map()
+
+ // Add cached items first
+ cachedItems.forEach(item => {
+ const key = `${item.feedUrl}:${item.guid}`
+ itemMap.set(key, item)
+ })
+
+ // Add/update with new items (newer items replace older ones)
+ newItems.forEach(item => {
+ const key = `${item.feedUrl}:${item.guid}`
+ const existing = itemMap.get(key)
+ if (!existing || (item.pubDate && existing.pubDate && item.pubDate > existing.pubDate)) {
+ itemMap.set(key, item)
+ }
+ })
+
+ const mergedItems = Array.from(itemMap.values())
+
+ // Sort by publication date (newest first)
+ mergedItems.sort((a, b) => {
+ const dateA = a.pubDate?.getTime() || 0
+ const dateB = b.pubDate?.getTime() || 0
+ return dateB - dateA
+ })
+
+ // Write merged items back to IndexedDB
+ try {
+ await indexedDb.putRssFeedItems(mergedItems)
+ logger.info('[RssFeedService] Background refresh: updated IndexedDB cache', {
+ totalItems: mergedItems.length,
+ newItems: newItems.length,
+ cachedItems: cachedItems.length
+ })
+ } catch (error) {
+ logger.error('[RssFeedService] Background refresh: failed to update IndexedDB cache', { error })
+ }
+ }
+
+ // Clear the controller when done
+ this.backgroundRefreshController = null
+ } catch (error) {
+ // Clear the controller on error
+ this.backgroundRefreshController = null
+ if (!(error instanceof DOMException && error.name === 'AbortError')) {
+ logger.error('[RssFeedService] Background refresh failed', { error })
+ }
+ }
}
/**
@@ -1009,8 +1286,21 @@ class RssFeedService {
clearCache(url?: string) {
if (url) {
this.feedCache.delete(url)
+ // Also clear from IndexedDB (filter by feedUrl)
+ indexedDb.getRssFeedItems().then(items => {
+ const filtered = items.filter(item => item.feedUrl !== url)
+ indexedDb.putRssFeedItems(filtered).catch(err => {
+ logger.error('[RssFeedService] Failed to clear feed from IndexedDB', { url, error: err })
+ })
+ }).catch(err => {
+ logger.error('[RssFeedService] Failed to get items for cache clear', { url, error: err })
+ })
} else {
this.feedCache.clear()
+ // Clear all from IndexedDB
+ indexedDb.clearRssFeedItems().catch(err => {
+ logger.error('[RssFeedService] Failed to clear IndexedDB cache', { error: err })
+ })
}
}
}