Browse Source

reveal votes, even if the user is logged-out

hide bookmark indicator, unless active bookmark available
master
Silberengel 1 month ago
parent
commit
0d3017543c
  1. 2
      src/lib/components/content/EmojiPicker.svelte
  2. 212
      src/lib/modules/comments/CommentThread.svelte
  3. 17
      src/lib/modules/feed/FeedPost.svelte
  4. 17
      src/lib/modules/feed/HighlightCard.svelte
  5. 222
      src/lib/modules/reactions/FeedReactionButtons.svelte
  6. 35
      src/lib/modules/threads/ThreadList.svelte
  7. 10
      src/lib/services/nostr/nip30-emoji.ts
  8. 445
      src/lib/services/nostr/nostr-client.ts
  9. 9
      src/lib/services/user-data.ts

2
src/lib/components/content/EmojiPicker.svelte

@ -61,7 +61,7 @@ @@ -61,7 +61,7 @@
try {
await loadAllEmojiPacks();
const allEmojis = getAllCustomEmojis();
console.debug(`[EmojiPicker] Loaded ${allEmojis.length} custom emojis`);
// Silently load emojis - no need to log
customEmojis = allEmojis;
} catch (error) {
console.error('Error loading custom emojis:', error);

212
src/lib/modules/comments/CommentThread.svelte

@ -168,66 +168,93 @@ @@ -168,66 +168,93 @@
return;
}
// Batch updates to prevent flickering
requestAnimationFrame(() => {
isProcessingUpdate = true;
// Process immediately - don't batch with requestAnimationFrame for faster UI updates
isProcessingUpdate = true;
try {
let hasNewReplies = false;
const commentsMap = new Map(comments.map(c => [c.id, c]));
const kind1RepliesMap = new Map(kind1Replies.map(r => [r.id, r]));
const yakBacksMap = new Map(yakBacks.map(y => [y.id, y]));
const zapReceiptsMap = new Map(zapReceipts.map(z => [z.id, z]));
try {
let hasNewReplies = false;
const commentsMap = new Map(comments.map(c => [c.id, c]));
const kind1RepliesMap = new Map(kind1Replies.map(r => [r.id, r]));
const yakBacksMap = new Map(yakBacks.map(y => [y.id, y]));
const zapReceiptsMap = new Map(zapReceipts.map(z => [z.id, z]));
for (const reply of updated) {
// Skip if we already have this reply
if (commentsMap.has(reply.id) || kind1RepliesMap.has(reply.id) ||
yakBacksMap.has(reply.id) || zapReceiptsMap.has(reply.id)) {
continue;
}
for (const reply of updated) {
// Skip if we already have this reply
if (commentsMap.has(reply.id) || kind1RepliesMap.has(reply.id) ||
yakBacksMap.has(reply.id) || zapReceiptsMap.has(reply.id)) {
continue;
}
// Check if this reply references the root OR is a reply to any existing comment/reply
const parentId = getParentEventId(reply);
const isReplyToRoot = referencesRoot(reply);
const isReplyToExisting = parentId && (
parentId === threadId ||
commentsMap.has(parentId) ||
kind1RepliesMap.has(parentId) ||
yakBacksMap.has(parentId) ||
zapReceiptsMap.has(parentId)
);
if (!isReplyToRoot && !isReplyToExisting) {
continue;
}
// Add the reply to the appropriate map
if (reply.kind === KIND.COMMENT) {
commentsMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.SHORT_TEXT_NOTE) {
kind1RepliesMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.VOICE_REPLY) {
yakBacksMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.ZAP_RECEIPT) {
zapReceiptsMap.set(reply.id, reply);
hasNewReplies = true;
}
// Check if this reply references the root OR is a reply to any existing comment/reply
const parentId = getParentEventId(reply);
const isReplyToRoot = referencesRoot(reply);
const isReplyToExisting = parentId && (
parentId === threadId ||
commentsMap.has(parentId) ||
kind1RepliesMap.has(parentId) ||
yakBacksMap.has(parentId) ||
zapReceiptsMap.has(parentId)
);
if (!isReplyToRoot && !isReplyToExisting) {
continue;
}
// Only update state if we have new replies
if (hasNewReplies) {
comments = Array.from(commentsMap.values());
kind1Replies = Array.from(kind1RepliesMap.values());
yakBacks = Array.from(yakBacksMap.values());
zapReceipts = Array.from(zapReceiptsMap.values());
// Add the reply to the appropriate map
if (reply.kind === KIND.COMMENT) {
commentsMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.SHORT_TEXT_NOTE) {
kind1RepliesMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.VOICE_REPLY) {
yakBacksMap.set(reply.id, reply);
hasNewReplies = true;
} else if (reply.kind === KIND.ZAP_RECEIPT) {
zapReceiptsMap.set(reply.id, reply);
hasNewReplies = true;
}
} finally {
isProcessingUpdate = false;
}
});
// Update state immediately if we have new replies
if (hasNewReplies) {
const allComments = Array.from(commentsMap.values());
const allKind1Replies = Array.from(kind1RepliesMap.values());
const allYakBacks = Array.from(yakBacksMap.values());
const allZapReceipts = Array.from(zapReceiptsMap.values());
// Limit array sizes to prevent memory bloat (keep most recent 500 of each type)
const MAX_COMMENTS = 500;
const MAX_REPLIES = 500;
// Sort by created_at descending and take most recent
comments = allComments
.sort((a, b) => b.created_at - a.created_at)
.slice(0, MAX_COMMENTS);
kind1Replies = allKind1Replies
.sort((a, b) => b.created_at - a.created_at)
.slice(0, MAX_REPLIES);
yakBacks = allYakBacks
.sort((a, b) => b.created_at - a.created_at)
.slice(0, MAX_REPLIES);
zapReceipts = allZapReceipts
.sort((a, b) => b.created_at - a.created_at)
.slice(0, MAX_REPLIES);
// Clear loading flag as soon as we get the first results
// This allows comments to render immediately instead of waiting for all fetches
if (loading) {
loading = false;
}
} else if (updated.length > 0 && loading) {
// If we got events but they were all filtered out, still clear loading
// This prevents the UI from being stuck in loading state
// The events might be nested replies that will be processed later
loading = false;
}
} finally {
isProcessingUpdate = false;
}
}
async function loadComments() {
@ -238,13 +265,13 @@ @@ -238,13 +265,13 @@
const allRelays = relayManager.getProfileReadRelays();
const replyFilters: any[] = [
{ kinds: [KIND.COMMENT], '#e': [threadId], limit: 500 },
{ kinds: [KIND.COMMENT], '#E': [threadId], limit: 500 },
{ kinds: [KIND.COMMENT], '#a': [threadId], limit: 500 },
{ kinds: [KIND.COMMENT], '#A': [threadId], limit: 500 },
{ kinds: [KIND.SHORT_TEXT_NOTE], '#e': [threadId], limit: 500 },
{ kinds: [KIND.VOICE_REPLY], '#e': [threadId], limit: 500 },
{ kinds: [KIND.ZAP_RECEIPT], '#e': [threadId], limit: 500 }
{ kinds: [KIND.COMMENT], '#e': [threadId], limit: 100 },
{ kinds: [KIND.COMMENT], '#E': [threadId], limit: 100 },
{ kinds: [KIND.COMMENT], '#a': [threadId], limit: 100 },
{ kinds: [KIND.COMMENT], '#A': [threadId], limit: 100 },
{ kinds: [KIND.SHORT_TEXT_NOTE], '#e': [threadId], limit: 100 },
{ kinds: [KIND.VOICE_REPLY], '#e': [threadId], limit: 100 },
{ kinds: [KIND.ZAP_RECEIPT], '#e': [threadId], limit: 100 }
];
// fetchEvents with useCache:true returns cached data immediately if available,
@ -261,7 +288,9 @@ @@ -261,7 +288,9 @@
loading = true; // Only show loading if no cache
}
// Now fetch with full options - returns cached immediately, fetches fresh in background
// Now fetch with full options - returns relay results immediately, then enhances with cache
// onUpdate callback will be called as events arrive from relays, allowing immediate rendering
// Use high priority to ensure comments load before background fetches (reactions, profiles, etc.)
const allReplies = await nostrClient.fetchEvents(
replyFilters,
allRelays,
@ -269,20 +298,43 @@ @@ -269,20 +298,43 @@
useCache: true,
cacheResults: true,
timeout: 10000,
onUpdate: handleReplyUpdate
onUpdate: handleReplyUpdate,
priority: 'high'
}
);
// Filter to only replies that reference the root
// Process initial results (from relays or cache)
// Note: onUpdate may have already updated the state and cleared loading
// But if onUpdate didn't process them (e.g., filtered out), we need to process them here
const rootReplies = allReplies.filter(reply => referencesRoot(reply));
// Separate by type
comments = rootReplies.filter(e => e.kind === KIND.COMMENT);
kind1Replies = rootReplies.filter(e => e.kind === KIND.SHORT_TEXT_NOTE);
yakBacks = rootReplies.filter(e => e.kind === KIND.VOICE_REPLY);
zapReceipts = rootReplies.filter(e => e.kind === KIND.ZAP_RECEIPT);
// Only update if we have new replies not already processed by onUpdate
const existingIds = new Set([
...comments.map(c => c.id),
...kind1Replies.map(r => r.id),
...yakBacks.map(y => y.id),
...zapReceipts.map(z => z.id)
]);
const newRootReplies = rootReplies.filter(r => !existingIds.has(r.id));
loading = false; // Hide loading now that we have data (cached or fresh)
if (newRootReplies.length > 0) {
// Merge with existing (onUpdate may have already added some)
const allComments = [...comments, ...newRootReplies.filter(e => e.kind === KIND.COMMENT)];
const allKind1Replies = [...kind1Replies, ...newRootReplies.filter(e => e.kind === KIND.SHORT_TEXT_NOTE)];
const allYakBacks = [...yakBacks, ...newRootReplies.filter(e => e.kind === KIND.VOICE_REPLY)];
const allZapReceipts = [...zapReceipts, ...newRootReplies.filter(e => e.kind === KIND.ZAP_RECEIPT)];
// Deduplicate
comments = Array.from(new Map(allComments.map(c => [c.id, c])).values());
kind1Replies = Array.from(new Map(allKind1Replies.map(r => [r.id, r])).values());
yakBacks = Array.from(new Map(allYakBacks.map(y => [y.id, y])).values());
zapReceipts = Array.from(new Map(allZapReceipts.map(z => [z.id, z])).values());
}
// ALWAYS clear loading flag after fetch completes, even if no events matched
// This prevents the UI from being stuck in loading state
loading = false;
// Recursively fetch all nested replies (non-blocking - let it run in background)
fetchNestedReplies().then(() => {
@ -337,7 +389,8 @@ @@ -337,7 +389,8 @@
{
useCache: true,
cacheResults: true,
onUpdate: handleReplyUpdate
onUpdate: handleReplyUpdate,
priority: 'high'
}
).catch(error => {
console.error('Error subscribing to nested replies:', error);
@ -544,27 +597,28 @@ @@ -544,27 +597,28 @@
// Always fetch kind 1111 comments - check both e and E tags, and a and A tags
replyFilters.push(
{ kinds: [KIND.COMMENT], '#e': [threadId], limit: 500 }, // Lowercase e tag
{ kinds: [KIND.COMMENT], '#E': [threadId], limit: 500 }, // Uppercase E tag (NIP-22)
{ kinds: [KIND.COMMENT], '#a': [threadId], limit: 500 }, // Lowercase a tag (some clients use wrong tags)
{ kinds: [KIND.COMMENT], '#A': [threadId], limit: 500 } // Uppercase A tag (NIP-22 for addressable events)
{ kinds: [KIND.COMMENT], '#e': [threadId], limit: 100 }, // Lowercase e tag
{ kinds: [KIND.COMMENT], '#E': [threadId], limit: 100 }, // Uppercase E tag (NIP-22)
{ kinds: [KIND.COMMENT], '#a': [threadId], limit: 100 }, // Lowercase a tag (some clients use wrong tags)
{ kinds: [KIND.COMMENT], '#A': [threadId], limit: 100 } // Uppercase A tag (NIP-22 for addressable events)
);
// For kind 1 events, fetch kind 1 replies
// Also fetch kind 1 replies for any event (some apps use kind 1 for everything)
replyFilters.push({ kinds: [KIND.SHORT_TEXT_NOTE], '#e': [threadId], limit: 500 });
replyFilters.push({ kinds: [KIND.SHORT_TEXT_NOTE], '#e': [threadId], limit: 100 });
// Fetch yak backs (kind 1244) - voice replies
replyFilters.push({ kinds: [KIND.VOICE_REPLY], '#e': [threadId], limit: 500 });
replyFilters.push({ kinds: [KIND.VOICE_REPLY], '#e': [threadId], limit: 100 });
// Fetch zap receipts (kind 9735)
replyFilters.push({ kinds: [KIND.ZAP_RECEIPT], '#e': [threadId], limit: 500 });
replyFilters.push({ kinds: [KIND.ZAP_RECEIPT], '#e': [threadId], limit: 100 });
// Don't use cache when reloading after publishing - we want fresh data
// Use high priority to ensure comments load before background fetches
const allReplies = await nostrClient.fetchEvents(
replyFilters,
allRelays,
{ useCache: false, cacheResults: true, timeout: 10000 }
{ useCache: false, cacheResults: true, timeout: 10000, priority: 'high' }
);
// Filter to only replies that reference the root

17
src/lib/modules/feed/FeedPost.svelte

@ -14,6 +14,7 @@ @@ -14,6 +14,7 @@
import { getKindInfo, KIND } from '../../types/kind-lookup.js';
import { stripMarkdown } from '../../services/text-utils.js';
import { isBookmarked } from '../../services/user-actions.js';
import { sessionManager } from '../../services/auth/session-manager.js';
interface Props {
post: NostrEvent;
@ -37,12 +38,18 @@ @@ -37,12 +38,18 @@
let zapCount = $state(0);
// Check if this event is bookmarked (async, so we use state)
// Only check if user is logged in
let bookmarked = $state(false);
const isLoggedIn = $derived(sessionManager.isLoggedIn());
$effect(() => {
isBookmarked(post.id).then(b => {
bookmarked = b;
});
if (isLoggedIn) {
isBookmarked(post.id).then(b => {
bookmarked = b;
});
} else {
bookmarked = false;
}
});
// Calculate votes as derived values to avoid infinite loops
@ -465,7 +472,9 @@ @@ -465,7 +472,9 @@
{/if}
{/if}
<div class="ml-auto flex items-center gap-2">
<span class="bookmark-indicator" class:bookmarked={bookmarked} title={bookmarked ? "Bookmarked" : "Not bookmarked"}>🔖</span>
{#if isLoggedIn && bookmarked}
<span class="bookmark-indicator bookmarked" title="Bookmarked">🔖</span>
{/if}
<EventMenu event={post} showContentActions={true} />
</div>
</div>

17
src/lib/modules/feed/HighlightCard.svelte

@ -9,6 +9,7 @@ @@ -9,6 +9,7 @@
import { getKindInfo, KIND } from '../../types/kind-lookup.js';
import { getHighlightsForEvent } from '../../services/nostr/highlight-service.js';
import { isBookmarked } from '../../services/user-actions.js';
import { sessionManager } from '../../services/auth/session-manager.js';
interface Props {
highlight: NostrEvent; // The highlight event (kind 9802)
@ -21,12 +22,18 @@ @@ -21,12 +22,18 @@
let loadingSource = $state(false);
// Check if this event is bookmarked (async, so we use state)
// Only check if user is logged in
let bookmarked = $state(false);
const isLoggedIn = $derived(sessionManager.isLoggedIn());
$effect(() => {
isBookmarked(highlight.id).then(b => {
bookmarked = b;
});
if (isLoggedIn) {
isBookmarked(highlight.id).then(b => {
bookmarked = b;
});
} else {
bookmarked = false;
}
});
// Extract source event ID from e-tag or a-tag
@ -313,7 +320,9 @@ @@ -313,7 +320,9 @@
<span class="text-xs text-fog-text-light dark:text-fog-dark-text-light flex-shrink-0">via {getClientName()}</span>
{/if}
<div class="ml-auto flex items-center gap-2">
<span class="bookmark-indicator" class:bookmarked={bookmarked} title={bookmarked ? "Bookmarked" : "Not bookmarked"}>🔖</span>
{#if isLoggedIn && bookmarked}
<span class="bookmark-indicator bookmarked" title="Bookmarked">🔖</span>
{/if}
<EventMenu event={highlight} showContentActions={true} />
</div>
</div>

222
src/lib/modules/reactions/FeedReactionButtons.svelte

@ -33,6 +33,8 @@ @@ -33,6 +33,8 @@
let loadingReactions = $state(false);
let lastEventId = $state<string | null>(null);
let isMounted = $state(false);
let processingUpdate = $state(false);
let updateDebounceTimer: ReturnType<typeof setTimeout> | null = null;
onMount(() => {
// Set lastEventId immediately to prevent $effect from running during mount
@ -82,26 +84,46 @@ @@ -82,26 +84,46 @@
});
// Handle real-time updates - process reactions when new ones arrive
// Debounced to prevent excessive processing
async function handleReactionUpdate(updated: NostrEvent[]) {
console.debug(`[FeedReactionButtons] Received reaction update for event ${event.id.substring(0, 16)}...:`, {
count: updated.length,
events: updated.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
fullEvent: r
}))
});
// Prevent concurrent processing
if (processingUpdate) {
return;
}
// Add new reactions to the map
let hasNewReactions = false;
for (const r of updated) {
allReactionsMap.set(r.id, r);
if (!allReactionsMap.has(r.id)) {
allReactionsMap.set(r.id, r);
hasNewReactions = true;
}
}
// Only process if we have new reactions
if (!hasNewReactions) {
return;
}
// Clear existing debounce timer
if (updateDebounceTimer) {
clearTimeout(updateDebounceTimer);
}
// Process all accumulated reactions
const allReactions = Array.from(allReactionsMap.values());
const filtered = await filterDeletedReactions(allReactions);
processReactions(filtered);
// Debounce processing to batch multiple rapid updates
updateDebounceTimer = setTimeout(async () => {
if (processingUpdate) return;
processingUpdate = true;
try {
// Process all accumulated reactions
const allReactions = Array.from(allReactionsMap.values());
const filtered = await filterDeletedReactions(allReactions);
processReactions(filtered);
} finally {
processingUpdate = false;
}
}, 300); // 300ms debounce
}
async function loadReactions() {
@ -115,44 +137,22 @@ @@ -115,44 +137,22 @@
// Use getProfileReadRelays() to include defaultRelays + profileRelays + user inbox + localRelays
// This ensures we get all reactions from the complete relay set, matching ThreadList behavior
const reactionRelays = relayManager.getProfileReadRelays();
console.debug(`[FeedReactionButtons] Loading reactions for event ${event.id.substring(0, 16)}... (kind ${event.kind})`);
console.debug(`[FeedReactionButtons] Using relays:`, reactionRelays);
// Clear and rebuild reactions map for this event
allReactionsMap.clear();
// Use low priority for reactions - they're background data, comments should load first
const reactionsWithLowerE = await nostrClient.fetchEvents(
[{ kinds: [KIND.REACTION], '#e': [event.id], limit: 100 }],
reactionRelays,
{ useCache: true, cacheResults: true, onUpdate: handleReactionUpdate, timeout: 5000 }
{ useCache: true, cacheResults: true, onUpdate: handleReactionUpdate, timeout: 5000, priority: 'low' }
);
const reactionsWithUpperE = await nostrClient.fetchEvents(
[{ kinds: [KIND.REACTION], '#E': [event.id], limit: 100 }],
reactionRelays,
{ useCache: true, cacheResults: true, onUpdate: handleReactionUpdate, timeout: 5000 }
{ useCache: true, cacheResults: true, onUpdate: handleReactionUpdate, timeout: 5000, priority: 'low' }
);
console.debug(`[FeedReactionButtons] Reactions fetched:`, {
eventId: event.id.substring(0, 16) + '...',
kind: event.kind,
withLowerE: reactionsWithLowerE.length,
withUpperE: reactionsWithUpperE.length,
lowerE_events: reactionsWithLowerE.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
tags: r.tags.filter(t => t[0] === 'e' || t[0] === 'E'),
fullEvent: r
})),
upperE_events: reactionsWithUpperE.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
tags: r.tags.filter(t => t[0] === 'e' || t[0] === 'E'),
fullEvent: r
}))
});
// Combine and deduplicate by reaction ID
for (const r of reactionsWithLowerE) {
allReactionsMap.set(r.id, r);
@ -162,31 +162,8 @@ @@ -162,31 +162,8 @@
}
const reactionEvents = Array.from(allReactionsMap.values());
console.debug(`[FeedReactionButtons] All reactions (deduplicated):`, {
total: reactionEvents.length,
events: reactionEvents.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
tags: r.tags.filter(t => t[0] === 'e' || t[0] === 'E'),
created_at: new Date(r.created_at * 1000).toISOString(),
fullEvent: r
}))
});
// Filter out deleted reactions (kind 5)
const filteredReactions = await filterDeletedReactions(reactionEvents);
console.debug(`[FeedReactionButtons] After filtering deleted reactions:`, {
before: reactionEvents.length,
after: filteredReactions.length,
filtered: reactionEvents.length - filteredReactions.length,
events: filteredReactions.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
fullEvent: r
}))
});
processReactions(filteredReactions);
} catch (error) {
@ -211,22 +188,13 @@ @@ -211,22 +188,13 @@
// Fetch deletion events that reference these specific reaction IDs
// This is much more efficient than fetching all deletion events from all users
// Use low priority for deletion events - background data
const deletionEvents = await nostrClient.fetchEvents(
[{ kinds: [KIND.EVENT_DELETION], '#e': limitedReactionIds, limit: 100 }],
reactionRelays,
{ useCache: true, timeout: 5000 }
{ useCache: true, timeout: 5000, priority: 'low' }
);
console.debug(`[FeedReactionButtons] Deletion events fetched:`, {
count: deletionEvents.length,
events: deletionEvents.map(d => ({
id: d.id.substring(0, 16) + '...',
pubkey: d.pubkey.substring(0, 16) + '...',
deletedEventIds: d.tags.filter(t => t[0] === 'e').map(t => t[1]?.substring(0, 16) + '...'),
fullEvent: d
}))
});
// Build a set of deleted reaction event IDs (more efficient - just a Set)
const deletedReactionIds = new Set<string>();
for (const deletionEvent of deletionEvents) {
@ -238,10 +206,6 @@ @@ -238,10 +206,6 @@
}
}
console.debug(`[FeedReactionButtons] Deleted reaction IDs by pubkey:`,
Array.from(deletedReactionIds).slice(0, 10).map(id => id.substring(0, 16) + '...')
);
// Filter out deleted reactions - much simpler now
const filtered = reactions.filter(reaction => {
const isDeleted = deletedReactionIds.has(reaction.id);
@ -252,7 +216,10 @@ @@ -252,7 +216,10 @@
}
async function processReactions(reactionEvents: NostrEvent[]) {
console.debug(`[FeedReactionButtons] Processing ${reactionEvents.length} reactions for event ${event.id.substring(0, 16)}... (kind ${event.kind})`);
// Prevent duplicate processing - check if we're already processing the same set
if (processingUpdate) {
return;
}
const reactionMap = new Map<string, { content: string; pubkeys: Set<string>; eventIds: Map<string, string> }>();
const currentUser = sessionManager.getCurrentPubkey();
let skippedInvalid = 0;
@ -270,12 +237,7 @@ @@ -270,12 +237,7 @@
content = '-';
} else if (content !== '+' && content !== '-') {
skippedInvalid++;
console.log(`[FeedReactionButtons] Skipping invalid reaction for kind 11:`, {
originalContent,
reactionId: reactionEvent.id.substring(0, 16) + '...',
pubkey: reactionEvent.pubkey.substring(0, 16) + '...',
fullEvent: reactionEvent
});
// Silently skip invalid reactions - no need to log every one
continue; // Skip invalid reactions for threads
}
}
@ -292,26 +254,17 @@ @@ -292,26 +254,17 @@
}
}
console.debug(`[FeedReactionButtons] Processed reactions summary:`, {
totalReactions: reactionEvents.length,
skippedInvalid,
reactionCounts: Array.from(reactionMap.entries()).map(([content, data]) => ({
content,
count: data.pubkeys.size,
pubkeys: Array.from(data.pubkeys).map(p => p.substring(0, 16) + '...'),
eventIds: Array.from(data.eventIds.entries()).map(([pubkey, eventId]) => ({
pubkey: pubkey.substring(0, 16) + '...',
eventId: eventId.substring(0, 16) + '...'
// Only log in debug mode to reduce console noise
if (import.meta.env.DEV && false) { // Disable verbose logging
console.debug(`[FeedReactionButtons] Processed reactions summary:`, {
totalReactions: reactionEvents.length,
skippedInvalid,
reactionCounts: Array.from(reactionMap.entries()).map(([content, data]) => ({
content,
count: data.pubkeys.size
}))
})),
userReaction,
allReactionEvents: reactionEvents.map(r => ({
id: r.id.substring(0, 16) + '...',
pubkey: r.pubkey.substring(0, 16) + '...',
content: r.content,
fullEvent: r
}))
});
});
}
reactions = reactionMap;
@ -639,48 +592,51 @@ @@ -639,48 +592,51 @@
const isLoggedIn = $derived(sessionManager.isLoggedIn());
</script>
{#if isLoggedIn}
<div class="Feed-reaction-buttons flex gap-2 items-center flex-wrap">
{#if event.kind === KIND.DISCUSSION_THREAD || forceUpvoteDownvote}
<!-- Kind 11 (Thread) or Kind 1111 (Reply to Thread): Only upvote and downvote buttons -->
<button
onclick={() => toggleReaction('+')}
class="reaction-btn vote-btn {userReaction === '+' ? 'active' : ''}"
title="Upvote"
disabled={!isLoggedIn}
class="reaction-btn vote-btn {userReaction === '+' ? 'active' : ''} {!isLoggedIn ? 'disabled' : ''}"
title={isLoggedIn ? "Upvote" : "Login to vote"}
aria-label="Upvote"
>
<span class="vote-count {getReactionCount('+') > 0 ? 'has-votes' : ''}">{getReactionCount('+')}</span>
</button>
<button
onclick={() => toggleReaction('-')}
class="reaction-btn vote-btn {userReaction === '-' ? 'active' : ''}"
title="Downvote"
disabled={!isLoggedIn}
class="reaction-btn vote-btn {userReaction === '-' ? 'active' : ''} {!isLoggedIn ? 'disabled' : ''}"
title={isLoggedIn ? "Downvote" : "Login to vote"}
aria-label="Downvote"
>
<span class="vote-count {getReactionCount('-') > 0 ? 'has-votes' : ''}">{getReactionCount('-')}</span>
</button>
{:else}
<!-- Kind 1 (Feed): Full reaction menu -->
<div class="reaction-wrapper">
<button
bind:this={menuButton}
onclick={handleHeartClick}
class="reaction-btn heart-btn {userReaction === '+' ? 'active' : ''}"
title="Like or choose reaction"
aria-label="Like or choose reaction"
>
</button>
<EmojiPicker
open={showMenu}
onSelect={(emoji) => {
toggleReaction(emoji);
showMenu = false;
}}
onClose={() => { showMenu = false; }}
/>
</div>
{#if isLoggedIn}
<div class="reaction-wrapper">
<button
bind:this={menuButton}
onclick={handleHeartClick}
class="reaction-btn heart-btn {userReaction === '+' ? 'active' : ''}"
title="Like or choose reaction"
aria-label="Like or choose reaction"
>
</button>
<EmojiPicker
open={showMenu}
onSelect={(emoji) => {
toggleReaction(emoji);
showMenu = false;
}}
onClose={() => { showMenu = false; }}
/>
</div>
{/if}
{#if event.kind !== KIND.DISCUSSION_THREAD}
{#each getAllReactions() as { content, count }}
@ -711,7 +667,6 @@ @@ -711,7 +667,6 @@
{/if}
{/if}
</div>
{/if}
<style>
.Feed-reaction-buttons {
@ -735,15 +690,20 @@ @@ -735,15 +690,20 @@
color: var(--fog-dark-text, #f9fafb);
}
.reaction-btn:hover {
.reaction-btn:hover:not(.disabled) {
background: var(--fog-highlight, #f3f4f6);
border-color: var(--fog-accent, #64748b);
}
:global(.dark) .reaction-btn:hover {
:global(.dark) .reaction-btn:hover:not(.disabled) {
background: var(--fog-dark-highlight, #374151);
}
.reaction-btn.disabled {
opacity: 0.6;
cursor: not-allowed;
}
.reaction-btn.active {
background: var(--fog-accent, #64748b);
color: var(--fog-text, #475569);

35
src/lib/modules/threads/ThreadList.svelte

@ -72,7 +72,6 @@ @@ -72,7 +72,6 @@
: Math.floor(Date.now() / 1000) - config.threadTimeoutDays * 86400;
const threadRelays = relayManager.getThreadReadRelays();
const commentRelays = relayManager.getCommentReadRelays();
// Use getProfileReadRelays() for reactions to include defaultRelays + profileRelays + user inbox + localRelays
// This ensures we get all reactions from the complete relay set
const reactionRelays = relayManager.getProfileReadRelays();
@ -121,12 +120,8 @@ @@ -121,12 +120,8 @@
const threadIds = Array.from(threadsMap.keys());
if (threadIds.length > 0) {
// Fetch all comments in parallel (relay-first for first-time users)
const allComments = await nostrClient.fetchEvents(
[{ kinds: [KIND.COMMENT], '#E': threadIds, '#K': ['11'] }],
commentRelays,
{ relayFirst: true, useCache: true, cacheResults: true, timeout: 3000 }
);
// Don't fetch comments - they're not displayed on the list page
// Only fetch reactions and zaps for sorting and display
// Fetch all reactions in parallel
// Note: Some relays reject '#E' filter, so we only use '#e' and handle both cases in grouping
@ -244,7 +239,6 @@ @@ -244,7 +239,6 @@
);
// Build maps
const newCommentsMap = new Map<string, NostrEvent[]>();
let newReactionsMap = new Map<string, NostrEvent[]>();
const newZapReceiptsMap = new Map<string, NostrEvent[]>();
@ -252,20 +246,6 @@ @@ -252,20 +246,6 @@
await processReactionUpdates();
newReactionsMap = reactionsMap; // Use the processed reactions map
// Group comments by thread ID
for (const comment of allComments) {
const threadId = comment.tags.find(t => t[0] === 'E' || t[0] === 'e')?.[1];
if (threadId && newThreadsMap.has(threadId)) {
if (!newCommentsMap.has(threadId)) {
newCommentsMap.set(threadId, []);
}
newCommentsMap.get(threadId)!.push(comment);
}
}
// Reactions are already processed by processReactionUpdates() above
// newReactionsMap is now set from reactionsMap
// Group zap receipts by thread ID
for (const zapReceipt of allZapReceipts) {
const threadId = zapReceipt.tags.find(t => t[0] === 'e')?.[1];
@ -277,9 +257,10 @@ @@ -277,9 +257,10 @@
}
}
commentsMap = newCommentsMap;
reactionsMap = newReactionsMap;
zapReceiptsMap = newZapReceiptsMap;
// Clear comments map - we don't fetch comments for the list page
commentsMap = new Map();
} else {
// Clear maps if no threads
commentsMap = new Map();
@ -303,16 +284,11 @@ @@ -303,16 +284,11 @@
case 'newest':
return [...events].sort((a, b) => b.created_at - a.created_at);
case 'active':
// Sort by most recent activity (comments, reactions, or zaps)
// Sort by most recent activity (reactions or zaps - comments not fetched for list page)
const activeSorted = events.map((event) => {
const comments = commentsMap.get(event.id) || [];
const reactions = reactionsMap.get(event.id) || [];
const zapReceipts = zapReceiptsMap.get(event.id) || [];
const lastCommentTime = comments.length > 0
? Math.max(...comments.map(c => c.created_at))
: 0;
const lastReactionTime = reactions.length > 0
? Math.max(...reactions.map(r => r.created_at))
: 0;
@ -323,7 +299,6 @@ @@ -323,7 +299,6 @@
const lastActivity = Math.max(
event.created_at,
lastCommentTime,
lastReactionTime,
lastZapTime
);

10
src/lib/services/nostr/nip30-emoji.ts

@ -175,7 +175,6 @@ export async function loadAllEmojiPacks(): Promise<void> { @@ -175,7 +175,6 @@ export async function loadAllEmojiPacks(): Promise<void> {
try {
// Use profile relays to get emoji packs from more sources
const relays = relayManager.getProfileReadRelays();
console.debug('[nip30-emoji] Loading all emoji packs/sets...');
// Fetch all emoji sets (10030) and emoji packs (30030)
// Use a high limit to get all available packs - increase limit to get more
@ -185,8 +184,6 @@ export async function loadAllEmojiPacks(): Promise<void> { @@ -185,8 +184,6 @@ export async function loadAllEmojiPacks(): Promise<void> {
{ useCache: true, cacheResults: true, timeout: 15000 }
);
console.debug(`[nip30-emoji] Found ${events.length} emoji pack/set events`);
// Process and cache all emoji sets/packs
// Track shortcode -> (url, created_at) to prefer most recent
const shortcodeToUrlAndTime = new Map<string, { url: string; created_at: number }>();
@ -226,12 +223,7 @@ export async function loadAllEmojiPacks(): Promise<void> { @@ -226,12 +223,7 @@ export async function loadAllEmojiPacks(): Promise<void> {
shortcodeCache.set(shortcode, url);
}
// Only log if we actually found emojis, otherwise it's just noise
if (shortcodeCache.size > 0) {
console.log(`[nip30-emoji] Cached ${emojiSetsByPubkey.size} emoji sets with ${shortcodeCache.size} unique shortcodes`);
} else {
console.debug(`[nip30-emoji] Cached ${emojiSetsByPubkey.size} emoji sets with ${shortcodeCache.size} unique shortcodes`);
}
// Silently cache - no need to log unless there's an error
allEmojiPacksLoaded = true;
} catch (error) {
console.error('Error loading all emoji packs:', error);

445
src/lib/services/nostr/nostr-client.ts

@ -24,6 +24,7 @@ interface FetchOptions { @@ -24,6 +24,7 @@ interface FetchOptions {
onUpdate?: (events: NostrEvent[]) => void;
timeout?: number;
relayFirst?: boolean; // If true, query relays first with timeout, then fill from cache
priority?: 'high' | 'medium' | 'low'; // Priority level: high for critical UI (comments), low for background (reactions, profiles)
}
class NostrClient {
@ -60,6 +61,55 @@ class NostrClient { @@ -60,6 +61,55 @@ class NostrClient {
// Cache NIP-11 metadata to avoid repeated HTTP requests
private nip11MetadataCache: Map<string, { requiresAuth: boolean; cachedAt: number }> = new Map();
private readonly NIP11_CACHE_TTL = 300000; // 5 minutes
// Track fetch patterns to identify repeated fetches
private fetchPatterns: Map<string, { count: number; lastFetch: number; totalEvents: number }> = new Map();
// Cache empty results to prevent repeated fetches of non-existent data
// Also track pending fetches to prevent concurrent duplicate fetches
private emptyResultCache: Map<string, { cachedAt: number; pending?: boolean }> = new Map();
private readonly EMPTY_RESULT_CACHE_TTL = 30000; // 30 seconds - cache empty results briefly
private readonly PENDING_FETCH_TTL = 5000; // 5 seconds - how long to wait for a pending fetch
/**
* Check if a relay requires authentication (using cached NIP-11 metadata)
* Returns null if unknown, true if requires auth, false if doesn't require auth
*/
private async checkRelayRequiresAuth(relayUrl: string): Promise<boolean | null> {
// Check cache first
const cached = this.nip11MetadataCache.get(relayUrl);
if (cached && (Date.now() - cached.cachedAt) < this.NIP11_CACHE_TTL) {
return cached.requiresAuth;
}
// Fetch NIP-11 metadata
try {
const httpUrl = relayUrl.replace(/^wss?:\/\//, (match) => {
return match === 'wss://' ? 'https://' : 'http://';
});
const nip11Url = `${httpUrl}/.well-known/nostr.json`;
const response = await fetch(nip11Url, {
method: 'GET',
headers: { 'Accept': 'application/nostr+json' },
signal: AbortSignal.timeout(2000) // 2 second timeout
});
if (response.ok) {
const metadata = await response.json();
const requiresAuth = metadata?.limitation?.auth_required === true;
// Cache the result
this.nip11MetadataCache.set(relayUrl, {
requiresAuth,
cachedAt: Date.now()
});
return requiresAuth;
}
} catch (error) {
// Metadata fetch failed - return null (unknown)
}
return null; // Unknown
}
async initialize(): Promise<void> {
if (this.initialized) return;
@ -83,8 +133,27 @@ class NostrClient { @@ -83,8 +133,27 @@ class NostrClient {
// Start subscription cleanup interval
this.startSubscriptionCleanup();
// Start periodic memory cleanup
this.startMemoryCleanup();
this.initialized = true;
}
/**
* Start periodic memory cleanup to prevent gradual memory growth
*/
private startMemoryCleanup(): void {
// Clean up memory every 30 seconds
setInterval(() => {
const stats = memoryManager.getStats();
if (stats.totalSizeMB > 50) {
const cleanedIds = memoryManager.cleanupOldEvents(25 * 1024 * 1024);
if (cleanedIds.length > 0) {
console.debug(`[nostr-client] Periodic cleanup: removed ${cleanedIds.length} tracked events (${stats.totalSizeMB.toFixed(2)}MB -> target 25MB)`);
}
}
}, 30000); // Every 30 seconds
}
/**
* Start periodic cleanup of inactive subscriptions
@ -567,9 +636,9 @@ class NostrClient { @@ -567,9 +636,9 @@ class NostrClient {
const limited = sorted.slice(0, Math.min(limit, MAX_EVENTS));
const filtered = filterEvents(limited);
// Only log cache queries at debug level to reduce console noise
// Only log if we got multiple events or if it's an interesting query
if (filtered.length > 5 || (filtered.length > 0 && limited.length > filtered.length * 2)) {
// Only log cache queries when significant filtering happens or large result sets
// This reduces noise from background cache enhancement queries
if (filtered.length > 10 || (filtered.length > 0 && limited.length > filtered.length * 3)) {
console.debug(`[nostr-client] Cache query: ${limited.length} events before filter, ${filtered.length} after filter`);
}
@ -866,7 +935,8 @@ class NostrClient { @@ -866,7 +935,8 @@ class NostrClient {
filters: Filter[],
events: Map<string, NostrEvent>,
timeout: number,
onUpdate?: (events: NostrEvent[]) => void
onUpdate?: (events: NostrEvent[]) => void,
priority: 'high' | 'medium' | 'low' = 'medium'
): Promise<void> {
return new Promise((resolve) => {
const makeRequest = () => {
@ -874,11 +944,17 @@ class NostrClient { @@ -874,11 +944,17 @@ class NostrClient {
const lastRequest = this.lastRequestTime.get(relayUrl) || 0;
const timeSinceLastRequest = now - lastRequest;
const activeForRelay = this.activeRequestsPerRelay.get(relayUrl) || 0;
// High priority requests get higher concurrent limits and can bypass some throttling
const maxConcurrentForPriority = priority === 'high' ? 2 : this.MAX_CONCURRENT_PER_RELAY;
const maxTotalForPriority = priority === 'high' ? 5 : this.MAX_CONCURRENT_TOTAL;
const minIntervalForPriority = priority === 'high' ? 50 : this.MIN_REQUEST_INTERVAL;
// Check if we can make the request now
if (timeSinceLastRequest >= this.MIN_REQUEST_INTERVAL &&
activeForRelay < this.MAX_CONCURRENT_PER_RELAY &&
this.totalActiveRequests < this.MAX_CONCURRENT_TOTAL) {
// High priority requests can bypass some throttling
if (timeSinceLastRequest >= minIntervalForPriority &&
activeForRelay < maxConcurrentForPriority &&
this.totalActiveRequests < maxTotalForPriority) {
// Update tracking
this.lastRequestTime.set(relayUrl, now);
@ -900,10 +976,10 @@ class NostrClient { @@ -900,10 +976,10 @@ class NostrClient {
});
} else {
// Wait and retry
const waitTime = Math.max(
this.MIN_REQUEST_INTERVAL - timeSinceLastRequest,
100
);
// High priority requests wait less
const waitTime = priority === 'high'
? Math.max(minIntervalForPriority - timeSinceLastRequest, 10)
: Math.max(this.MIN_REQUEST_INTERVAL - timeSinceLastRequest, 100);
setTimeout(makeRequest, waitTime);
}
};
@ -1032,13 +1108,29 @@ class NostrClient { @@ -1032,13 +1108,29 @@ class NostrClient {
// Cache the event
client.addToCache(event);
// Check memory usage and cleanup if needed (soft limits)
// Check memory usage and cleanup if needed (aggressive limits)
const stats = memoryManager.getStats();
if (stats.totalSizeMB > 200) {
// If over 200MB, cleanup oldest events to get back to 100MB
const cleanedIds = memoryManager.cleanupOldEvents(100 * 1024 * 1024);
// Note: We don't remove from events Map here as those are needed for return value
// The cleanup is just for tracking/monitoring purposes
if (stats.totalSizeMB > 50) {
// If over 50MB, cleanup oldest events to get back to 25MB
const cleanedIds = memoryManager.cleanupOldEvents(25 * 1024 * 1024);
// Actually remove cleaned events from the Map to free memory
if (cleanedIds.length > 0) {
for (const id of cleanedIds) {
events.delete(id);
}
console.warn(`[nostr-client] Memory cleanup: removed ${cleanedIds.length} events from Map (${stats.totalSizeMB.toFixed(2)}MB -> target 25MB)`);
}
}
// Also limit Map size per-request to prevent unbounded growth
if (events.size > 500) {
const sorted = Array.from(events.entries())
.sort((a, b) => a[1].created_at - b[1].created_at); // Oldest first
const toRemove = sorted.slice(0, events.size - 500);
for (const [id] of toRemove) {
events.delete(id);
memoryManager.untrackEvent(id);
}
}
// Stream event directly to onUpdate callback immediately
@ -1165,23 +1257,120 @@ class NostrClient { @@ -1165,23 +1257,120 @@ class NostrClient {
this.processingQueue = false;
}
/**
* Create a human-readable description of a filter for logging
*/
private describeFilter(filter: Filter): string {
const parts: string[] = [];
if (filter.kinds && filter.kinds.length > 0) {
parts.push(`kind${filter.kinds.length > 1 ? 's' : ''}:${filter.kinds.join(',')}`);
}
if (filter.ids && filter.ids.length > 0) {
parts.push(`id${filter.ids.length > 1 ? 's' : ''}:${filter.ids.length}`);
}
if (filter.authors && filter.authors.length > 0) {
parts.push(`author${filter.authors.length > 1 ? 's' : ''}:${filter.authors.length}`);
}
if (filter['#e'] && filter['#e'].length > 0) {
parts.push(`#e:${filter['#e'].length}`);
}
if (filter['#p'] && filter['#p'].length > 0) {
parts.push(`#p:${filter['#p'].length}`);
}
if (filter.limit) {
parts.push(`limit:${filter.limit}`);
}
return parts.length > 0 ? parts.join(' ') : 'empty filter';
}
async fetchEvents(
filters: Filter[],
relays: string[],
options: FetchOptions = {}
): Promise<NostrEvent[]> {
const { useCache = true, cacheResults = true, onUpdate, timeout = 10000, relayFirst = false } = options;
const { useCache = true, cacheResults = true, onUpdate, timeout = 10000, relayFirst = false, priority = 'medium' } = options;
// Create a key for this fetch to prevent duplicates
const fetchKey = JSON.stringify({
filters,
relays: relays.sort()
});
// Create filter description for logging
const filterDesc = filters.length === 1
? this.describeFilter(filters[0])
: filters.map(f => this.describeFilter(f)).join(' | ');
// Create relay description for logging (show count and first few)
const relayDesc = relays.length <= 3
? relays.join(', ')
: `${relays.length} relays (${relays.slice(0, 2).join(', ')}, ...)`;
const activeFetch = this.activeFetches.get(fetchKey);
if (activeFetch) {
console.debug(`[nostr-client] Deduplicating fetch [${filterDesc}] from [${relayDesc}] - already in progress`);
return activeFetch;
}
// Check if we recently got an empty result for this exact fetch
// This prevents repeated fetches of non-existent data
// Use a key based on filters (with actual IDs) but not relays, so different relay sets can share cache
// Create a stable key from filters that includes actual event IDs, not just counts
const filterKey = JSON.stringify(filters.map(f => ({
kinds: f.kinds,
ids: f.ids,
authors: f.authors,
'#e': f['#e'],
'#p': f['#p'],
limit: f.limit
})));
const emptyCacheKey = filterKey;
// Check and set pending flag atomically to prevent race conditions
const emptyCacheEntry = this.emptyResultCache.get(emptyCacheKey);
if (emptyCacheEntry) {
const age = Date.now() - emptyCacheEntry.cachedAt;
if (emptyCacheEntry.pending && age < this.PENDING_FETCH_TTL) {
// Another fetch for this is in progress, wait for it
console.log(`[nostr-client] Waiting for pending fetch [${filterDesc}] from [${relayDesc}] - another fetch in progress`);
// Wait and check multiple times (up to 2 seconds)
for (let i = 0; i < 4; i++) {
await new Promise(resolve => setTimeout(resolve, 500));
const updatedEntry = this.emptyResultCache.get(emptyCacheKey);
if (updatedEntry && !updatedEntry.pending) {
if ((Date.now() - updatedEntry.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const finalAge = Math.round((Date.now() - updatedEntry.cachedAt) / 1000);
console.log(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${finalAge}s ago (waited for pending)`);
return [];
}
break; // No longer pending, but result expired or had data
}
if (!updatedEntry || !updatedEntry.pending) {
break; // No longer pending
}
}
// If still pending after waiting, proceed (might be a slow fetch)
} else if (!emptyCacheEntry.pending && age < this.EMPTY_RESULT_CACHE_TTL) {
const ageSeconds = Math.round(age / 1000);
console.log(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${ageSeconds}s ago`);
return Promise.resolve([]);
}
}
// Atomically check and set pending flag - if another fetch just set it, wait
const existingEntry = this.emptyResultCache.get(emptyCacheKey);
if (existingEntry?.pending && (Date.now() - existingEntry.cachedAt) < 1000) {
// Very recent pending entry, wait a bit
await new Promise(resolve => setTimeout(resolve, 200));
const recheck = this.emptyResultCache.get(emptyCacheKey);
if (recheck && !recheck.pending && (Date.now() - recheck.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const finalAge = Math.round((Date.now() - recheck.cachedAt) / 1000);
console.log(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${finalAge}s ago (waited for concurrent)`);
return [];
}
}
// Mark this fetch as pending
this.emptyResultCache.set(emptyCacheKey, { cachedAt: Date.now(), pending: true });
// Always use relay-first mode: query relays first with timeout, then fill from cache if needed
{
@ -1225,7 +1414,14 @@ class NostrClient { @@ -1225,7 +1414,14 @@ class NostrClient {
if (response.ok) {
const metadata = await response.json();
if (metadata?.limitation?.auth_required) {
const requiresAuth = metadata?.limitation?.auth_required === true;
// Cache the metadata
this.nip11MetadataCache.set(relayUrl, {
requiresAuth,
cachedAt: Date.now()
});
if (requiresAuth) {
console.debug(`[nostr-client] Relay ${relayUrl} requires authentication (from NIP-11), authenticating before subscription...`);
const session = sessionManager.getSession();
if (session) {
@ -1242,7 +1438,7 @@ class NostrClient { @@ -1242,7 +1438,7 @@ class NostrClient {
console.debug(`[nostr-client] Proactive auth attempt for ${relayUrl} failed (will try on challenge):`, error);
}
} else {
console.debug(`[nostr-client] Relay ${relayUrl} requires authentication but user is not logged in`);
// Don't log this - it's expected when not logged in and we'll skip these relays now
}
}
}
@ -1253,17 +1449,72 @@ class NostrClient { @@ -1253,17 +1449,72 @@ class NostrClient {
}
}
// Check empty result cache again (in case another concurrent fetch already completed)
const emptyCacheEntry2 = this.emptyResultCache.get(emptyCacheKey);
if (emptyCacheEntry2 && !emptyCacheEntry2.pending && (Date.now() - emptyCacheEntry2.cachedAt) < this.EMPTY_RESULT_CACHE_TTL) {
const age = Math.round((Date.now() - emptyCacheEntry2.cachedAt) / 1000);
console.log(`[nostr-client] Skipping fetch [${filterDesc}] from [${relayDesc}] - empty result cached ${age}s ago (checked during fetch)`);
// Clear pending flag if it was set
this.emptyResultCache.delete(emptyCacheKey);
return [];
}
// Query relays first with timeout
// Respect cacheResults option - don't cache if explicitly disabled
const relayEvents = await this.fetchFromRelays(filters, relays, {
cacheResults: cacheResults,
onUpdate,
timeout: relayTimeout
timeout: relayTimeout,
priority: options.priority
});
// If we got results from relays, return them immediately
// Track results for composite log
let finalEvents = relayEvents;
let cacheEnhancementCount = 0;
let usedCacheFallback = false;
// If we got results from relays, return them immediately for fast display
// Then enhance with cache delta and log summary
if (relayEvents.length > 0) {
// Got events from relays
// Start cache enhancement in background, but wait for it before logging
const cacheEnhancementPromise = (async () => {
if (useCache && onUpdate) {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
// Find events in cache that aren't in relay results (delta)
const relayEventIds = new Set(relayEvents.map(e => e.id));
const cacheDelta = cachedEvents.filter(e => !relayEventIds.has(e.id));
if (cacheDelta.length > 0) {
cacheEnhancementCount = cacheDelta.length;
// Enhance results with cache delta via onUpdate callback
onUpdate(cacheDelta);
}
}
} catch (error) {
// Silently fail - cache enhancement is optional
}
}
return cacheEnhancementCount;
})();
// Wait for cache enhancement to complete, then log composite summary
cacheEnhancementPromise.then((enhancementCount) => {
// Track fetch patterns
const patternKey = `${filterDesc}`;
const pattern = this.fetchPatterns.get(patternKey) || { count: 0, lastFetch: 0, totalEvents: 0 };
pattern.count++;
pattern.lastFetch = Date.now();
pattern.totalEvents += relayEvents.length + enhancementCount;
this.fetchPatterns.set(patternKey, pattern);
const summary = enhancementCount > 0
? `[nostr-client] Fetch complete: ${relayEvents.length} from relays, ${enhancementCount} from cache (enhanced) [${filterDesc}] from [${relayDesc}]`
: `[nostr-client] Fetch complete: ${relayEvents.length} events from relays [${filterDesc}] from [${relayDesc}]`;
console.log(summary);
});
return relayEvents;
}
@ -1273,17 +1524,54 @@ class NostrClient { @@ -1273,17 +1524,54 @@ class NostrClient {
try {
const cachedEvents = await this.getCachedEvents(filters);
if (cachedEvents.length > 0) {
console.debug(`[nostr-client] Relay query returned 0 events, using ${cachedEvents.length} cached events`);
return cachedEvents;
usedCacheFallback = true;
finalEvents = cachedEvents;
console.debug(`[nostr-client] Using ${cachedEvents.length} cached events (relays returned 0)`);
} else {
console.debug(`[nostr-client] No cached events available, returning empty result`);
}
} catch (error) {
console.error('[nostr-client] Error querying cache:', error);
}
}
// Track fetch patterns for analysis
const patternKey = `${filterDesc}`;
const pattern = this.fetchPatterns.get(patternKey) || { count: 0, lastFetch: 0, totalEvents: 0 };
pattern.count++;
pattern.lastFetch = Date.now();
pattern.totalEvents += finalEvents.length;
this.fetchPatterns.set(patternKey, pattern);
// Cache empty results to prevent repeated fetches of non-existent data
if (finalEvents.length === 0 && !usedCacheFallback) {
const wasAlreadyCached = this.emptyResultCache.has(emptyCacheKey) && !this.emptyResultCache.get(emptyCacheKey)?.pending;
this.emptyResultCache.set(emptyCacheKey, { cachedAt: Date.now(), pending: false });
// Only log if this is a new cache entry (not updating an existing one)
if (!wasAlreadyCached) {
console.log(`[nostr-client] Cached empty result for [${filterDesc}] - will skip similar fetches for 30s`);
}
} else {
console.debug(`[nostr-client] No events from relays, useCache=false, returning empty array`);
// Clear pending flag if we got results
const currentEntry = this.emptyResultCache.get(emptyCacheKey);
if (currentEntry?.pending) {
this.emptyResultCache.delete(emptyCacheKey);
}
}
// Log composite summary
if (usedCacheFallback) {
console.log(`[nostr-client] Fetch complete: 0 from relays, ${finalEvents.length} from cache (fallback) [${filterDesc}] from [${relayDesc}]`);
} else if (finalEvents.length === 0) {
// Only log 0-event fetches if they're repeated many times (likely a problem)
if (pattern.count > 5 && pattern.totalEvents === 0) {
console.warn(`[nostr-client] Repeated empty fetch (${pattern.count}x): [${filterDesc}] from [${relayDesc}] - consider caching or skipping`);
} else {
console.log(`[nostr-client] Fetch complete: 0 events (relays returned 0, no cache available) [${filterDesc}] from [${relayDesc}]`);
}
}
return relayEvents; // Return empty array if both failed
return finalEvents;
})();
this.activeFetches.set(fetchKey, fetchPromise);
@ -1297,42 +1585,62 @@ class NostrClient { @@ -1297,42 +1585,62 @@ class NostrClient {
private async fetchFromRelays(
filters: Filter[],
relays: string[],
options: { cacheResults: boolean; onUpdate?: (events: NostrEvent[]) => void; timeout: number }
options: { cacheResults: boolean; onUpdate?: (events: NostrEvent[]) => void; timeout: number; priority?: 'high' | 'medium' | 'low' }
): Promise<NostrEvent[]> {
const timeout = options.timeout || config.relayTimeout;
// Check if user is logged in
const session = sessionManager.getSession();
const isLoggedIn = !!session;
// Filter out relays that have failed recently or permanently
// Also filter out auth-required relays if user is not logged in
const now = Date.now();
const availableRelays = relays.filter(url => {
if (this.relays.has(url)) return true; // Already connected
const failureInfo = this.failedRelays.get(url);
if (failureInfo) {
// Skip permanently failed relays
if (failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
return false; // Skip this relay, it has failed too many times
const availableRelays = await Promise.all(
relays.map(async (url) => {
if (this.relays.has(url)) return { url, available: true }; // Already connected
const failureInfo = this.failedRelays.get(url);
if (failureInfo) {
// Skip permanently failed relays
if (failureInfo.failureCount >= this.PERMANENT_FAILURE_THRESHOLD) {
return { url, available: false };
}
// Skip relays that failed recently (still in backoff period)
const timeSinceFailure = now - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
return { url, available: false };
}
}
// Skip relays that failed recently (still in backoff period)
const timeSinceFailure = now - failureInfo.lastFailure;
if (timeSinceFailure < failureInfo.retryAfter) {
return false; // Skip this relay, it failed recently
// If not logged in, check if relay requires auth and skip it
if (!isLoggedIn) {
const requiresAuth = await this.checkRelayRequiresAuth(url);
if (requiresAuth === true) {
return { url, available: false }; // Skip auth-required relay
}
}
}
return true; // Can try to connect
});
return { url, available: true };
})
);
const filteredRelays = availableRelays
.filter(r => r.available)
.map(r => r.url);
// Try to connect to relays that aren't already connected
// Like jumble, we gracefully handle failures - addRelay doesn't throw, it just doesn't add failed relays
const relaysToConnect = availableRelays.filter(url => !this.relays.has(url));
const relaysToConnect = filteredRelays.filter(url => !this.relays.has(url));
if (relaysToConnect.length > 0) {
if (relays.length === 1) {
if (filteredRelays.length === 1) {
console.log(`[nostr-client] Attempting to connect to relay ${relaysToConnect[0]}...`);
}
await Promise.allSettled(
relaysToConnect.map(url => this.addRelay(url))
);
// For single relay, wait for connection to actually establish
if (relays.length === 1 && relaysToConnect.length > 0) {
if (filteredRelays.length === 1 && relaysToConnect.length > 0) {
const relayUrl = relaysToConnect[0];
let attempts = 0;
const maxAttempts = 6; // Wait up to 3 seconds (6 * 500ms) for connection
@ -1355,15 +1663,17 @@ class NostrClient { @@ -1355,15 +1663,17 @@ class NostrClient {
}
// Get list of actually connected relays
const connectedRelays = availableRelays.filter(url => this.relays.has(url));
const connectedRelays = filteredRelays.filter(url => this.relays.has(url));
if (connectedRelays.length === 0) {
// Log at warn level for single relay queries (more important to know about failures)
const logLevel = relays.length === 1 ? 'warn' : 'debug';
const message = `[nostr-client] No connected relays available for fetch (${relays.length} requested: ${relays.join(', ')}, all failed or unavailable)`;
const logLevel = filteredRelays.length === 1 ? 'warn' : 'debug';
const skippedCount = relays.length - filteredRelays.length;
const skipReason = !isLoggedIn && skippedCount > 0 ? ` (${skippedCount} skipped: auth required)` : '';
const message = `[nostr-client] No connected relays available for fetch (${relays.length} requested, ${filteredRelays.length} available${skipReason}, all failed or unavailable)`;
if (logLevel === 'warn') {
console.warn(message);
// For single relay, also log which relays were attempted and failure info
if (relays.length === 1) {
if (filteredRelays.length === 1) {
const failureInfo = this.failedRelays.get(relays[0]);
if (failureInfo) {
console.warn(`[nostr-client] Relay ${relays[0]} failure info:`, failureInfo);
@ -1371,7 +1681,7 @@ class NostrClient { @@ -1371,7 +1681,7 @@ class NostrClient {
if (relaysToConnect.length > 0) {
console.warn(`[nostr-client] Attempted to connect to: ${relaysToConnect.join(', ')}`);
}
console.warn(`[nostr-client] Available relays (after filtering): ${availableRelays.join(', ')}`);
console.warn(`[nostr-client] Available relays (after filtering): ${filteredRelays.join(', ')}`);
console.warn(`[nostr-client] Currently connected relays: ${Array.from(this.relays.keys()).join(', ')}`);
}
} else {
@ -1383,8 +1693,10 @@ class NostrClient { @@ -1383,8 +1693,10 @@ class NostrClient {
// Log connection status for single relay queries
if (relays.length === 1 && connectedRelays.length === 1) {
console.log(`[nostr-client] Successfully connected to relay ${relays[0]}, fetching events...`);
} else if (connectedRelays.length < relays.length * 0.5) {
console.debug(`[nostr-client] Fetching from ${connectedRelays.length} connected relay(s) out of ${relays.length} requested`);
} else if (connectedRelays.length < filteredRelays.length * 0.5 || filteredRelays.length < relays.length) {
const skippedCount = relays.length - filteredRelays.length;
const skipReason = !isLoggedIn && skippedCount > 0 ? ` (${skippedCount} skipped: auth required)` : '';
console.debug(`[nostr-client] Fetching from ${connectedRelays.length} connected relay(s) out of ${filteredRelays.length} available${skipReason} (${relays.length} requested)`);
}
// Log connection status for single relay queries
@ -1393,17 +1705,38 @@ class NostrClient { @@ -1393,17 +1705,38 @@ class NostrClient {
}
// Process relays sequentially with throttling to avoid overload
// High priority requests get processed faster
const events: Map<string, NostrEvent> = new Map();
const priority = options.priority || 'medium';
const delayBetweenRelays = priority === 'high' ? 10 : 100;
// Limit events Map size to prevent memory bloat (keep only most recent 1000 events)
const MAX_EVENTS_IN_MAP = 1000;
for (const relayUrl of connectedRelays) {
await this.throttledRelayRequest(relayUrl, filters, events, timeout, options.onUpdate);
// Small delay between relays
await new Promise(resolve => setTimeout(resolve, 100));
await this.throttledRelayRequest(relayUrl, filters, events, timeout, options.onUpdate, priority);
// Limit Map size - remove oldest if over limit
if (events.size > MAX_EVENTS_IN_MAP) {
const sorted = Array.from(events.entries())
.sort((a, b) => a[1].created_at - b[1].created_at); // Oldest first
const toRemove = sorted.slice(0, events.size - MAX_EVENTS_IN_MAP);
for (const [id] of toRemove) {
events.delete(id);
memoryManager.untrackEvent(id);
}
}
// Small delay between relays (shorter for high priority)
await new Promise(resolve => setTimeout(resolve, delayBetweenRelays));
}
const eventArray = Array.from(events.values());
const filtered = filterEvents(eventArray);
const zapFiltered = filtered.filter(event => !this.shouldFilterZapReceipt(event));
// Clear events Map after processing to free memory
events.clear();
if (options.cacheResults && zapFiltered.length > 0) {
cacheEvents(zapFiltered).catch(() => {

9
src/lib/services/user-data.ts

@ -83,10 +83,11 @@ export async function fetchProfile( @@ -83,10 +83,11 @@ export async function fetchProfile(
];
// Background refresh - don't await, just fire and forget
// Use low priority - profiles are background data, comments should load first
nostrClient.fetchEvents(
[{ kinds: [KIND.METADATA], authors: [pubkey], limit: 1 }],
relayList,
{ useCache: false, cacheResults: true } // Don't use cache, but cache results
{ useCache: false, cacheResults: true, priority: 'low' } // Don't use cache, but cache results
).then((events) => {
if (events.length > 0) {
cacheProfile(events[0]).catch(() => {
@ -106,10 +107,11 @@ export async function fetchProfile( @@ -106,10 +107,11 @@ export async function fetchProfile(
...config.profileRelays
];
// Use low priority - profiles are background data, comments should load first
const events = await nostrClient.fetchEvents(
[{ kinds: [KIND.METADATA], authors: [pubkey], limit: 1 }],
relayList,
{ useCache: true, cacheResults: true }
{ useCache: true, cacheResults: true, priority: 'low' }
);
if (events.length === 0) return null;
@ -144,10 +146,11 @@ export async function fetchProfiles( @@ -144,10 +146,11 @@ export async function fetchProfiles(
...config.profileRelays
];
// Use low priority - profiles are background data, comments should load first
const events = await nostrClient.fetchEvents(
[{ kinds: [KIND.METADATA], authors: missing, limit: 1 }],
relayList,
{ useCache: true, cacheResults: true }
{ useCache: true, cacheResults: true, priority: 'low' }
);
for (const event of events) {

Loading…
Cancel
Save