You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
502 lines
15 KiB
502 lines
15 KiB
<script lang="ts"> |
|
import { nostrClient } from '../../services/nostr/nostr-client.js'; |
|
import { relayManager } from '../../services/nostr/relay-manager.js'; |
|
import FeedPost from './FeedPost.svelte'; |
|
import ThreadDrawer from './ThreadDrawer.svelte'; |
|
import type { NostrEvent } from '../../types/nostr.js'; |
|
import { onMount, tick } from 'svelte'; |
|
import { KIND } from '../../types/kind-lookup.js'; |
|
|
|
let posts = $state<NostrEvent[]>([]); |
|
let loading = $state(true); |
|
let loadingMore = $state(false); |
|
let hasMore = $state(true); |
|
let oldestTimestamp = $state<number | null>(null); |
|
|
|
// Batch-loaded reactions: eventId -> reactions[] |
|
let reactionsMap = $state<Map<string, NostrEvent[]>>(new Map()); |
|
|
|
// Drawer state for viewing parent/quoted events |
|
let drawerOpen = $state(false); |
|
let drawerEvent = $state<NostrEvent | null>(null); |
|
|
|
// Debounce updates to prevent rapid re-renders |
|
let updateTimeout: ReturnType<typeof setTimeout> | null = null; |
|
let pendingUpdates: NostrEvent[] = []; |
|
|
|
function openDrawer(event: NostrEvent) { |
|
drawerEvent = event; |
|
drawerOpen = true; |
|
} |
|
|
|
function closeDrawer() { |
|
drawerOpen = false; |
|
drawerEvent = null; |
|
} |
|
|
|
let sentinelElement = $state<HTMLElement | null>(null); |
|
let observer: IntersectionObserver | null = null; |
|
let subscriptionId: string | null = $state(null); |
|
let refreshInterval: ReturnType<typeof setInterval> | null = null; |
|
let subscriptionSetup = $state(false); // Track if subscription is already set up |
|
|
|
onMount(async () => { |
|
await nostrClient.initialize(); |
|
await loadFeed(); |
|
// Set up persistent subscription for new events (only once) |
|
if (!subscriptionSetup) { |
|
setupSubscription(); |
|
setupPeriodicRefresh(); |
|
subscriptionSetup = true; |
|
} |
|
}); |
|
|
|
// Cleanup subscription on unmount |
|
$effect(() => { |
|
return () => { |
|
if (subscriptionId) { |
|
nostrClient.unsubscribe(subscriptionId); |
|
subscriptionId = null; |
|
} |
|
if (refreshInterval) { |
|
clearInterval(refreshInterval); |
|
refreshInterval = null; |
|
} |
|
subscriptionSetup = false; |
|
}; |
|
}); |
|
|
|
// Listen for custom event from EmbeddedEvent components |
|
$effect(() => { |
|
const handleOpenEvent = (e: CustomEvent) => { |
|
if (e.detail?.event) { |
|
openDrawer(e.detail.event); |
|
} |
|
}; |
|
|
|
window.addEventListener('openEventInDrawer', handleOpenEvent as EventListener); |
|
|
|
return () => { |
|
window.removeEventListener('openEventInDrawer', handleOpenEvent as EventListener); |
|
}; |
|
}); |
|
|
|
// Cleanup on unmount |
|
$effect(() => { |
|
return () => { |
|
if (observer) { |
|
observer.disconnect(); |
|
} |
|
if (updateTimeout) { |
|
clearTimeout(updateTimeout); |
|
} |
|
if (subscriptionId) { |
|
nostrClient.unsubscribe(subscriptionId); |
|
subscriptionId = null; |
|
} |
|
if (refreshInterval) { |
|
clearInterval(refreshInterval); |
|
refreshInterval = null; |
|
} |
|
}; |
|
}); |
|
|
|
// Set up persistent subscription for real-time updates |
|
function setupSubscription() { |
|
if (subscriptionId) { |
|
// Already subscribed |
|
return; |
|
} |
|
|
|
const relays = relayManager.getFeedReadRelays(); |
|
const filters = [{ kinds: [KIND.SHORT_TEXT_NOTE], limit: 20 }]; |
|
|
|
// Subscribe to new kind 1 events |
|
subscriptionId = nostrClient.subscribe( |
|
filters, |
|
relays, |
|
(event: NostrEvent) => { |
|
// Only add events that are newer than what we already have |
|
const existingIds = new Set(posts.map(p => p.id)); |
|
if (!existingIds.has(event.id)) { |
|
handleUpdate([event]); |
|
} |
|
}, |
|
(relay: string) => { |
|
console.debug(`[FeedPage] Subscription EOSE from ${relay}`); |
|
} |
|
); |
|
|
|
console.log(`[FeedPage] Set up persistent subscription for new events (ID: ${subscriptionId})`); |
|
} |
|
|
|
// Set up periodic refresh to ensure we get new events even if subscription fails |
|
function setupPeriodicRefresh() { |
|
if (refreshInterval) { |
|
return; // Already set up |
|
} |
|
|
|
// Refresh every 30 seconds |
|
refreshInterval = setInterval(async () => { |
|
try { |
|
const relays = relayManager.getFeedReadRelays(); |
|
|
|
// Get the newest post's timestamp to only fetch newer events |
|
const newestTimestamp = posts.length > 0 |
|
? Math.max(...posts.map(p => p.created_at)) |
|
: Math.floor(Date.now() / 1000) - 60; // Last minute if no posts |
|
|
|
const filters = [{ |
|
kinds: [KIND.SHORT_TEXT_NOTE], |
|
limit: 50, |
|
since: newestTimestamp + 1 // Only get events newer than what we have |
|
}]; |
|
|
|
// Fetch new events (without cache to ensure we query relays) |
|
const events = await nostrClient.fetchEvents( |
|
filters, |
|
relays, |
|
{ |
|
useCache: false, // Don't use cache for refresh - always query relays |
|
cacheResults: true, |
|
timeout: 10000 |
|
} |
|
); |
|
|
|
// Check for new events |
|
const existingIds = new Set(posts.map(p => p.id)); |
|
const newEvents = events.filter(e => !existingIds.has(e.id)); |
|
|
|
if (newEvents.length > 0) { |
|
console.log(`[FeedPage] Periodic refresh found ${newEvents.length} new events`); |
|
handleUpdate(newEvents); |
|
} |
|
} catch (error) { |
|
console.debug('[FeedPage] Periodic refresh error:', error); |
|
} |
|
}, 30000); // 30 seconds |
|
|
|
console.log('[FeedPage] Set up periodic refresh (every 30 seconds)'); |
|
} |
|
|
|
// Set up observer when sentinel element is available |
|
$effect(() => { |
|
if (sentinelElement && !loading && !observer) { |
|
observer = new IntersectionObserver((entries) => { |
|
if (entries[0].isIntersecting && hasMore && !loadingMore) { |
|
loadMore(); |
|
} |
|
}, { threshold: 0.1 }); |
|
|
|
observer.observe(sentinelElement); |
|
|
|
return () => { |
|
if (observer) { |
|
observer.disconnect(); |
|
observer = null; |
|
} |
|
}; |
|
} |
|
}); |
|
|
|
async function loadFeed() { |
|
loading = true; |
|
try { |
|
const config = nostrClient.getConfig(); |
|
const relays = relayManager.getFeedReadRelays(); |
|
|
|
// Load initial feed - use cache for fast initial load |
|
const filters = [{ kinds: [KIND.SHORT_TEXT_NOTE], limit: 20 }]; |
|
const events = await nostrClient.fetchEvents( |
|
filters, |
|
relays, |
|
{ |
|
useCache: true, // Use cache for fast initial display |
|
cacheResults: true, |
|
// Don't use onUpdate here - subscriptions handle updates |
|
timeout: 10000 |
|
} |
|
); |
|
|
|
// Also immediately query relays to ensure we get fresh data in background |
|
// This runs in parallel but doesn't use onUpdate to avoid loops |
|
nostrClient.fetchEvents( |
|
filters, |
|
relays, |
|
{ |
|
useCache: false, // Force query relays |
|
cacheResults: true, |
|
// Don't use onUpdate - let subscriptions handle it |
|
timeout: 10000 |
|
} |
|
).then((newEvents) => { |
|
// Only update if we got new events that aren't already in posts |
|
if (newEvents.length > 0) { |
|
const existingIds = new Set(posts.map(p => p.id)); |
|
const trulyNew = newEvents.filter(e => !existingIds.has(e.id)); |
|
if (trulyNew.length > 0) { |
|
handleUpdate(trulyNew); |
|
} |
|
} |
|
}).catch(error => { |
|
console.debug('[FeedPage] Background relay query error:', error); |
|
}); |
|
|
|
// Sort by created_at descending and deduplicate |
|
const uniqueMap = new Map<string, NostrEvent>(); |
|
for (const event of events) { |
|
if (!uniqueMap.has(event.id)) { |
|
uniqueMap.set(event.id, event); |
|
} |
|
} |
|
const unique = Array.from(uniqueMap.values()); |
|
const sorted = unique.sort((a, b) => b.created_at - a.created_at); |
|
posts = sorted; |
|
|
|
if (sorted.length > 0) { |
|
oldestTimestamp = Math.min(...sorted.map(e => e.created_at)); |
|
// Batch load reactions for all posts |
|
await loadReactionsForPosts(sorted); |
|
} |
|
|
|
hasMore = events.length >= 20; |
|
} catch (error) { |
|
console.error('Error loading feed:', error); |
|
} finally { |
|
loading = false; |
|
} |
|
} |
|
|
|
async function loadMore() { |
|
if (loadingMore || !hasMore) return; |
|
|
|
loadingMore = true; |
|
try { |
|
const config = nostrClient.getConfig(); |
|
const relays = relayManager.getFeedReadRelays(); |
|
|
|
const filters = [{ |
|
kinds: [KIND.SHORT_TEXT_NOTE], |
|
limit: 20, |
|
until: oldestTimestamp || undefined |
|
}]; |
|
|
|
const events = await nostrClient.fetchEvents( |
|
filters, |
|
relays, |
|
{ |
|
useCache: true, |
|
cacheResults: true, |
|
timeout: 10000 |
|
} |
|
); |
|
|
|
if (events.length === 0) { |
|
hasMore = false; |
|
return; |
|
} |
|
|
|
// Filter out duplicates |
|
const existingIds = new Set(posts.map(p => p.id)); |
|
const newEvents = events.filter(e => !existingIds.has(e.id)); |
|
|
|
if (newEvents.length > 0) { |
|
const sorted = newEvents.sort((a, b) => b.created_at - a.created_at); |
|
posts = [...posts, ...sorted]; |
|
|
|
const oldest = Math.min(...newEvents.map(e => e.created_at)); |
|
if (oldest < (oldestTimestamp || Infinity)) { |
|
oldestTimestamp = oldest; |
|
} |
|
// Batch load reactions for new posts |
|
await loadReactionsForPosts(sorted); |
|
hasMore = events.length >= 20; |
|
} else if (events.length > 0) { |
|
// All events were duplicates, but we got some results |
|
// This might mean we've reached the end, or we need to adjust the timestamp |
|
if (oldestTimestamp) { |
|
// Try moving the timestamp forward slightly to avoid getting the same results |
|
oldestTimestamp = oldestTimestamp - 1; |
|
hasMore = events.length >= 20; |
|
} else { |
|
hasMore = false; |
|
} |
|
} else { |
|
// No events returned at all |
|
hasMore = false; |
|
} |
|
} catch (error) { |
|
console.error('Error loading more:', error); |
|
} finally { |
|
loadingMore = false; |
|
} |
|
} |
|
|
|
// Debounced update handler to prevent rapid re-renders and loops |
|
function handleUpdate(updated: NostrEvent[]) { |
|
if (!updated || updated.length === 0) return; |
|
|
|
// Deduplicate incoming updates before adding to pending |
|
const existingIds = new Set(posts.map(p => p.id)); |
|
const newUpdates = updated.filter(e => e && e.id && !existingIds.has(e.id)); |
|
|
|
if (newUpdates.length === 0) { |
|
return; // All duplicates, skip silently |
|
} |
|
|
|
// Also deduplicate within pendingUpdates |
|
const pendingIds = new Set(pendingUpdates.map(e => e.id)); |
|
const trulyNew = newUpdates.filter(e => !pendingIds.has(e.id)); |
|
|
|
if (trulyNew.length === 0) { |
|
return; // Already in pending, skip silently |
|
} |
|
|
|
pendingUpdates.push(...trulyNew); |
|
|
|
if (updateTimeout) { |
|
clearTimeout(updateTimeout); |
|
} |
|
|
|
// Batch updates every 500ms to prevent rapid re-renders |
|
updateTimeout = setTimeout(() => { |
|
if (pendingUpdates.length === 0) { |
|
return; |
|
} |
|
|
|
// Final deduplication check against current posts (posts may have changed) |
|
const currentIds = new Set(posts.map(p => p.id)); |
|
const newEvents = pendingUpdates.filter(e => e && e.id && !currentIds.has(e.id)); |
|
|
|
if (newEvents.length === 0) { |
|
pendingUpdates = []; |
|
return; |
|
} |
|
|
|
console.log(`[FeedPage] Processing ${newEvents.length} new events, existing: ${posts.length}`); |
|
|
|
// Merge and sort, then deduplicate by ID |
|
const merged = [...posts, ...newEvents]; |
|
// Deduplicate by ID (keep first occurrence) |
|
const uniqueMap = new Map<string, NostrEvent>(); |
|
for (const event of merged) { |
|
if (event && event.id && !uniqueMap.has(event.id)) { |
|
uniqueMap.set(event.id, event); |
|
} |
|
} |
|
const unique = Array.from(uniqueMap.values()); |
|
const sorted = unique.sort((a, b) => b.created_at - a.created_at); |
|
|
|
// Only update if we actually have new events to prevent loops |
|
if (sorted.length > posts.length || sorted.some((e, i) => e.id !== posts[i]?.id)) { |
|
posts = sorted; |
|
console.debug(`[FeedPage] Updated posts to ${sorted.length} events`); |
|
} |
|
|
|
pendingUpdates = []; |
|
}, 500); |
|
} |
|
|
|
// Batch load reactions for multiple posts at once |
|
async function loadReactionsForPosts(postsToLoad: NostrEvent[]) { |
|
if (postsToLoad.length === 0) return; |
|
|
|
try { |
|
const reactionRelays = relayManager.getProfileReadRelays(); |
|
const eventIds = postsToLoad.map(p => p.id); |
|
|
|
// Batch fetch all reactions for all posts in one query |
|
const allReactions = await nostrClient.fetchEvents( |
|
[ |
|
{ kinds: [KIND.REACTION], '#e': eventIds, limit: 1000 }, |
|
{ kinds: [KIND.REACTION], '#E': eventIds, limit: 1000 } |
|
], |
|
reactionRelays, |
|
{ useCache: true, cacheResults: true, timeout: 10000 } |
|
); |
|
|
|
// Group reactions by event ID |
|
const newReactionsMap = new Map<string, NostrEvent[]>(); |
|
for (const reaction of allReactions) { |
|
// Find which event(s) this reaction is for |
|
const eTags = reaction.tags.filter(t => (t[0] === 'e' || t[0] === 'E') && t[1]); |
|
for (const tag of eTags) { |
|
const eventId = tag[1]; |
|
if (eventIds.includes(eventId)) { |
|
if (!newReactionsMap.has(eventId)) { |
|
newReactionsMap.set(eventId, []); |
|
} |
|
newReactionsMap.get(eventId)!.push(reaction); |
|
} |
|
} |
|
} |
|
|
|
// Merge with existing reactions |
|
for (const [eventId, reactions] of newReactionsMap.entries()) { |
|
const existing = reactionsMap.get(eventId) || []; |
|
const combined = [...existing, ...reactions]; |
|
// Deduplicate by reaction ID |
|
const unique = Array.from(new Map(combined.map(r => [r.id, r])).values()); |
|
reactionsMap.set(eventId, unique); |
|
} |
|
} catch (error) { |
|
console.error('[FeedPage] Error batch loading reactions:', error); |
|
} |
|
} |
|
</script> |
|
|
|
<div class="feed-page"> |
|
{#if loading} |
|
<div class="loading-state"> |
|
<p class="text-fog-text dark:text-fog-dark-text">Loading feed...</p> |
|
</div> |
|
{:else if posts.length === 0} |
|
<div class="empty-state"> |
|
<p class="text-fog-text dark:text-fog-dark-text">No posts found. Check back later!</p> |
|
</div> |
|
{:else} |
|
<div class="feed-posts"> |
|
{#each posts as post (post.id)} |
|
<FeedPost post={post} onOpenEvent={openDrawer} reactions={reactionsMap.get(post.id)} /> |
|
{/each} |
|
</div> |
|
|
|
{#if drawerOpen && drawerEvent} |
|
<ThreadDrawer opEvent={drawerEvent} isOpen={drawerOpen} onClose={closeDrawer} /> |
|
{/if} |
|
|
|
<div id="feed-sentinel" class="feed-sentinel" bind:this={sentinelElement}> |
|
{#if loadingMore} |
|
<p class="text-fog-text-light dark:text-fog-dark-text-light">Loading more...</p> |
|
{:else if hasMore} |
|
<p class="text-fog-text-light dark:text-fog-dark-text-light">Scroll for more</p> |
|
{:else} |
|
<p class="text-fog-text-light dark:text-fog-dark-text-light">No more posts</p> |
|
{/if} |
|
</div> |
|
{/if} |
|
</div> |
|
|
|
<style> |
|
.feed-page { |
|
max-width: 100%; |
|
} |
|
|
|
.loading-state, |
|
.empty-state { |
|
padding: 2rem; |
|
text-align: center; |
|
} |
|
|
|
.feed-posts { |
|
display: flex; |
|
flex-direction: column; |
|
gap: 1rem; |
|
} |
|
|
|
.feed-sentinel { |
|
padding: 2rem; |
|
text-align: center; |
|
min-height: 100px; |
|
} |
|
</style>
|
|
|