Browse Source

implement discussion cache

imwald
Silberengel 4 months ago
parent
commit
d991739dd4
  1. 807
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  2. 2
      src/components/Note/PublicationIndex/PublicationIndex.tsx
  3. 189
      src/components/ReplyNoteList/index.tsx
  4. 212
      src/pages/primary/DiscussionsPage/index.tsx
  5. 308
      src/services/discussion-feed-cache.service.ts

807
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -44,10 +44,11 @@ function parseMarkdownContent( @@ -44,10 +44,11 @@ function parseMarkdownContent(
navigateToHashtag: (href: string) => void
navigateToRelay: (url: string) => void
}
): { nodes: React.ReactNode[]; hashtagsInContent: Set<string> } {
): { nodes: React.ReactNode[]; hashtagsInContent: Set<string>; footnotes: Map<string, string> } {
const { eventPubkey, imageIndexMap, openLightbox, navigateToHashtag, navigateToRelay } = options
const parts: React.ReactNode[] = []
const hashtagsInContent = new Set<string>()
const footnotes = new Map<string, string>()
let lastIndex = 0
// Find all patterns: markdown images, markdown links, relay URLs, nostr addresses, hashtags, wikilinks
@ -68,7 +69,7 @@ function parseMarkdownContent( @@ -68,7 +69,7 @@ function parseMarkdownContent(
})
// Markdown links: [text](url) - but not images
const markdownLinkRegex = /\[([^\]]+)\]\(([^)]+)\)/g
const markdownLinkRegex = /\[([^\]]+)\]\(([^)]+)\)/g
const linkMatches = Array.from(content.matchAll(markdownLinkRegex))
linkMatches.forEach(match => {
if (match.index !== undefined) {
@ -172,26 +173,253 @@ function parseMarkdownContent( @@ -172,26 +173,253 @@ function parseMarkdownContent(
}
})
// Footnote references ([^1], [^note], etc.) - but not definitions
const footnoteRefRegex = /\[\^([^\]]+)\]/g
const footnoteRefMatches = Array.from(content.matchAll(footnoteRefRegex))
footnoteRefMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if this is a footnote definition (has : after the closing bracket)
const afterMatch = content.substring(match.index + match[0].length, match.index + match[0].length + 2)
if (afterMatch.startsWith(']:')) {
return // This is a definition, not a reference
}
// Only add if not already covered by another pattern
const isInOther = patterns.some(p =>
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
patterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'footnote-ref',
data: match[1] // footnote ID
})
}
}
})
// Block-level patterns: headers, lists, horizontal rules, tables, footnotes - must be at start of line
// Process line by line to detect block-level elements
const lines = content.split('\n')
let currentIndex = 0
const blockPatterns: Array<{ index: number; end: number; type: string; data: any }> = []
// First pass: extract footnote definitions
lines.forEach((line) => {
const footnoteDefMatch = line.match(/^\[\^([^\]]+)\]:\s+(.+)$/)
if (footnoteDefMatch) {
const footnoteId = footnoteDefMatch[1]
const footnoteText = footnoteDefMatch[2]
footnotes.set(footnoteId, footnoteText)
}
})
// Second pass: detect tables and other block-level elements
let lineIdx = 0
while (lineIdx < lines.length) {
const line = lines[lineIdx]
const lineStartIndex = currentIndex
const lineEndIndex = currentIndex + line.length
// Tables: detect table rows (must have | characters)
// GitHub markdown table format: header row, separator row (|---|), data rows
if (line.includes('|') && line.trim().startsWith('|') && line.trim().endsWith('|')) {
// Check if this is a table by looking at the next line (separator)
if (lineIdx + 1 < lines.length) {
const nextLine = lines[lineIdx + 1]
const nextLineTrimmed = nextLine.trim()
// Table separator looks like: |---|---| or |:---|:---:|---:| or | -------- | ------- |
// Must start and end with |, and contain only spaces, dashes, colons, and pipes
const isSeparator = nextLineTrimmed.startsWith('|') &&
nextLineTrimmed.endsWith('|') &&
/^[\|\s\:\-]+$/.test(nextLineTrimmed) &&
nextLineTrimmed.includes('-')
if (isSeparator) {
// This is a table! Collect all table rows
const tableRows: string[] = []
const tableStartIndex = lineStartIndex
let tableEndIndex = lineEndIndex
let tableLineIdx = lineIdx
// Collect header row
tableRows.push(line)
tableLineIdx++
tableEndIndex += nextLine.length + 1
tableLineIdx++ // Skip separator
// Collect data rows until we hit a non-table line
while (tableLineIdx < lines.length) {
const tableLine = lines[tableLineIdx]
const tableLineTrimmed = tableLine.trim()
// Check if it's a table row (starts and ends with |)
if (tableLineTrimmed.startsWith('|') && tableLineTrimmed.endsWith('|')) {
// Check if it's another separator row (skip it)
const isAnotherSeparator = /^[\|\s\:\-]+$/.test(tableLineTrimmed) && tableLineTrimmed.includes('-')
if (!isAnotherSeparator) {
tableRows.push(tableLine)
tableEndIndex += tableLine.length + 1
}
tableLineIdx++
} else {
break
}
}
// Parse table rows into cells
const parsedRows: string[][] = []
tableRows.forEach((row) => {
// Split by |, trim each cell, filter out empty edge cells
const rawCells = row.split('|')
const cells = rawCells
.map(cell => cell.trim())
.filter((cell, idx) => {
// Remove empty cells at the very start and end (from leading/trailing |)
if (idx === 0 && cell === '') return false
if (idx === rawCells.length - 1 && cell === '') return false
return true
})
if (cells.length > 0) {
parsedRows.push(cells)
}
})
if (parsedRows.length > 0) {
blockPatterns.push({
index: tableStartIndex,
end: tableEndIndex,
type: 'table',
data: { rows: parsedRows, lineNum: lineIdx }
})
// Skip all table lines
currentIndex = tableEndIndex + 1
lineIdx = tableLineIdx
continue
}
}
}
}
// Headers (# Header, ## Header, etc.)
const headerMatch = line.match(/^(#{1,6})\s+(.+)$/)
if (headerMatch) {
const headerLevel = headerMatch[1].length
const headerText = headerMatch[2]
blockPatterns.push({
index: lineStartIndex,
end: lineEndIndex,
type: 'header',
data: { level: headerLevel, text: headerText, lineNum: lineIdx }
})
}
// Horizontal rule (---- or ====, at least 3 dashes/equals)
else if (line.match(/^[-=]{3,}\s*$/)) {
blockPatterns.push({
index: lineStartIndex,
end: lineEndIndex,
type: 'horizontal-rule',
data: { lineNum: lineIdx }
})
}
// Bullet list (* item or - item)
else if (line.match(/^[\*\-\+]\s+.+$/)) {
const listMatch = line.match(/^[\*\-\+]\s+(.+)$/)
if (listMatch) {
blockPatterns.push({
index: lineStartIndex,
end: lineEndIndex,
type: 'bullet-list-item',
data: { text: listMatch[1], lineNum: lineIdx }
})
}
}
// Numbered list (1. item, 2. item, etc.)
else if (line.match(/^\d+\.\s+.+$/)) {
const listMatch = line.match(/^\d+\.\s+(.+)$/)
if (listMatch) {
blockPatterns.push({
index: lineStartIndex,
end: lineEndIndex,
type: 'numbered-list-item',
data: { text: listMatch[1], lineNum: lineIdx, number: line.match(/^(\d+)/)?.[1] }
})
}
}
// Footnote definition (already extracted, but mark it so we don't render it in content)
else if (line.match(/^\[\^([^\]]+)\]:\s+.+$/)) {
blockPatterns.push({
index: lineStartIndex,
end: lineEndIndex,
type: 'footnote-definition',
data: { lineNum: lineIdx }
})
}
currentIndex += line.length + 1 // +1 for newline
lineIdx++
}
// Add block patterns to main patterns array
blockPatterns.forEach(pattern => {
patterns.push(pattern)
})
// Sort patterns by index
patterns.sort((a, b) => a.index - b.index)
// Remove overlapping patterns (keep the first one)
// Block-level patterns (headers, lists, horizontal rules, tables) take priority
const filteredPatterns: typeof patterns = []
let lastEnd = 0
patterns.forEach(pattern => {
if (pattern.index >= lastEnd) {
filteredPatterns.push(pattern)
lastEnd = pattern.end
const blockLevelTypes = ['header', 'horizontal-rule', 'bullet-list-item', 'numbered-list-item', 'table', 'footnote-definition']
const blockLevelPatterns = patterns.filter(p => blockLevelTypes.includes(p.type))
const otherPatterns = patterns.filter(p => !blockLevelTypes.includes(p.type))
// First add all block-level patterns
blockLevelPatterns.forEach(pattern => {
filteredPatterns.push(pattern)
})
// Then add other patterns that don't overlap with block-level patterns
otherPatterns.forEach(pattern => {
const overlapsWithBlock = blockLevelPatterns.some(blockPattern =>
(pattern.index >= blockPattern.index && pattern.index < blockPattern.end) ||
(pattern.end > blockPattern.index && pattern.end <= blockPattern.end) ||
(pattern.index <= blockPattern.index && pattern.end >= blockPattern.end)
)
if (!overlapsWithBlock) {
// Check for overlaps with existing filtered patterns
const overlaps = filteredPatterns.some(p =>
(pattern.index >= p.index && pattern.index < p.end) ||
(pattern.end > p.index && pattern.end <= p.end) ||
(pattern.index <= p.index && pattern.end >= p.end)
)
if (!overlaps) {
filteredPatterns.push(pattern)
}
}
})
// Re-sort by index
filteredPatterns.sort((a, b) => a.index - b.index)
// Build React nodes from patterns
filteredPatterns.forEach((pattern, i) => {
filteredPatterns.forEach((pattern, patternIdx) => {
// Add text before pattern
if (pattern.index > lastIndex) {
const text = content.slice(lastIndex, pattern.index)
if (text) {
parts.push(<span key={`text-${i}`}>{text}</span>)
// Process text for inline formatting (bold, italic, etc.)
// But skip if this text is part of a table (tables are handled as block patterns)
const isInTable = blockLevelPatterns.some(p =>
p.type === 'table' &&
lastIndex >= p.index &&
lastIndex < p.end
)
if (!isInTable) {
parts.push(...parseInlineMarkdown(text, `text-${patternIdx}`, footnotes))
}
}
}
@ -202,7 +430,7 @@ function parseMarkdownContent( @@ -202,7 +430,7 @@ function parseMarkdownContent(
const imageIndex = imageIndexMap.get(cleaned)
if (isImage(cleaned)) {
parts.push(
<div key={`img-${i}`} className="my-2 block">
<div key={`img-${patternIdx}`} className="my-2 block">
<Image
image={{ url, pubkey: eventPubkey }}
className="max-w-[400px] rounded-lg cursor-zoom-in"
@ -221,7 +449,7 @@ function parseMarkdownContent( @@ -221,7 +449,7 @@ function parseMarkdownContent(
)
} else if (isVideo(cleaned) || isAudio(cleaned)) {
parts.push(
<div key={`media-${i}`} className="my-2">
<div key={`media-${patternIdx}`} className="my-2">
<MediaPlayer
src={cleaned}
className="max-w-[400px]"
@ -238,7 +466,7 @@ function parseMarkdownContent( @@ -238,7 +466,7 @@ function parseMarkdownContent(
const relayPath = `/relays/${encodeURIComponent(url)}`
parts.push(
<a
key={`relay-${i}`}
key={`relay-${patternIdx}`}
href={relayPath}
className="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words cursor-pointer"
onClick={(e) => {
@ -255,7 +483,7 @@ function parseMarkdownContent( @@ -255,7 +483,7 @@ function parseMarkdownContent(
// Render as green link (will show WebPreview at bottom for HTTP/HTTPS)
parts.push(
<a
key={`link-${i}`}
key={`link-${patternIdx}`}
href={url}
target="_blank"
rel="noreferrer noopener"
@ -273,7 +501,7 @@ function parseMarkdownContent( @@ -273,7 +501,7 @@ function parseMarkdownContent(
const displayText = truncateLinkText(url)
parts.push(
<a
key={`relay-${i}`}
key={`relay-${patternIdx}`}
href={relayPath}
className="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline break-words cursor-pointer"
onClick={(e) => {
@ -286,24 +514,131 @@ function parseMarkdownContent( @@ -286,24 +514,131 @@ function parseMarkdownContent(
{displayText}
</a>
)
} else if (pattern.type === 'header') {
const { level, text } = pattern.data
// Parse the header text for inline formatting (but not nested headers)
const headerContent = parseInlineMarkdown(text, `header-${patternIdx}`, footnotes)
const HeaderTag = `h${Math.min(level, 6)}` as keyof JSX.IntrinsicElements
parts.push(
<HeaderTag
key={`header-${patternIdx}`}
className={`font-bold break-words block mt-4 mb-2 ${
level === 1 ? 'text-3xl' :
level === 2 ? 'text-2xl' :
level === 3 ? 'text-xl' :
level === 4 ? 'text-lg' :
level === 5 ? 'text-base' :
'text-sm'
}`}
>
{headerContent}
</HeaderTag>
)
} else if (pattern.type === 'horizontal-rule') {
parts.push(
<hr key={`hr-${patternIdx}`} className="my-4 border-t border-gray-300 dark:border-gray-700" />
)
} else if (pattern.type === 'bullet-list-item') {
const { text } = pattern.data
const listContent = parseInlineMarkdown(text, `bullet-${patternIdx}`, footnotes)
parts.push(
<li key={`bullet-${patternIdx}`} className="list-disc list-inside my-1">
{listContent}
</li>
)
} else if (pattern.type === 'numbered-list-item') {
const { text } = pattern.data
const listContent = parseInlineMarkdown(text, `numbered-${patternIdx}`, footnotes)
parts.push(
<li key={`numbered-${patternIdx}`} className="list-decimal list-inside my-1">
{listContent}
</li>
)
} else if (pattern.type === 'table') {
const { rows } = pattern.data
if (rows.length > 0) {
const headerRow = rows[0]
const dataRows = rows.slice(1)
parts.push(
<div key={`table-${patternIdx}`} className="my-4 overflow-x-auto">
<table className="min-w-full border-collapse border border-gray-300 dark:border-gray-700">
<thead>
<tr>
{headerRow.map((cell: string, cellIdx: number) => (
<th
key={`th-${patternIdx}-${cellIdx}`}
className="border border-gray-300 dark:border-gray-700 px-4 py-2 bg-gray-100 dark:bg-gray-800 font-semibold text-left"
>
{parseInlineMarkdown(cell, `table-header-${patternIdx}-${cellIdx}`, footnotes)}
</th>
))}
</tr>
</thead>
<tbody>
{dataRows.map((row: string[], rowIdx: number) => (
<tr key={`tr-${patternIdx}-${rowIdx}`}>
{row.map((cell: string, cellIdx: number) => (
<td
key={`td-${patternIdx}-${rowIdx}-${cellIdx}`}
className="border border-gray-300 dark:border-gray-700 px-4 py-2"
>
{parseInlineMarkdown(cell, `table-cell-${patternIdx}-${rowIdx}-${cellIdx}`, footnotes)}
</td>
))}
</tr>
))}
</tbody>
</table>
</div>
)
}
} else if (pattern.type === 'footnote-definition') {
// Don't render footnote definitions in the main content - they'll be rendered at the bottom
// Just skip this pattern
} else if (pattern.type === 'footnote-ref') {
const footnoteId = pattern.data
const footnoteText = footnotes.get(footnoteId)
if (footnoteText) {
parts.push(
<sup key={`footnote-ref-${patternIdx}`} className="footnote-ref">
<a
href={`#footnote-${footnoteId}`}
id={`footnote-ref-${footnoteId}`}
className="text-blue-600 dark:text-blue-400 hover:underline no-underline"
onClick={(e) => {
e.preventDefault()
const footnoteElement = document.getElementById(`footnote-${footnoteId}`)
if (footnoteElement) {
footnoteElement.scrollIntoView({ behavior: 'smooth', block: 'center' })
}
}}
>
[{footnoteId}]
</a>
</sup>
)
} else {
// Footnote not found, just render the reference as-is
parts.push(<span key={`footnote-ref-${patternIdx}`}>[^{footnoteId}]</span>)
}
} else if (pattern.type === 'nostr') {
const bech32Id = pattern.data
// Check if it's a profile type (mentions/handles should be inline)
if (bech32Id.startsWith('npub') || bech32Id.startsWith('nprofile')) {
parts.push(
<span key={`nostr-${i}`} className="inline-block">
<span key={`nostr-${patternIdx}`} className="inline-block">
<EmbeddedMention userId={bech32Id} />
</span>
)
} else if (bech32Id.startsWith('note') || bech32Id.startsWith('nevent') || bech32Id.startsWith('naddr')) {
// Embedded events should be block-level and fill width
parts.push(
<div key={`nostr-${i}`} className="w-full my-2">
<div key={`nostr-${patternIdx}`} className="w-full my-2">
<EmbeddedNote noteId={bech32Id} />
</div>
)
} else {
parts.push(<span key={`nostr-${i}`}>nostr:{bech32Id}</span>)
parts.push(<span key={`nostr-${patternIdx}`}>nostr:{bech32Id}</span>)
}
} else if (pattern.type === 'hashtag') {
const tag = pattern.data
@ -311,7 +646,7 @@ function parseMarkdownContent( @@ -311,7 +646,7 @@ function parseMarkdownContent(
hashtagsInContent.add(tagLower) // Track hashtags rendered inline
parts.push(
<a
key={`hashtag-${i}`}
key={`hashtag-${patternIdx}`}
href={`/notes?t=${tagLower}`}
className="inline text-green-600 dark:text-green-400 hover:text-green-700 dark:hover:text-green-300 hover:underline cursor-pointer"
onClick={(e) => {
@ -335,7 +670,7 @@ function parseMarkdownContent( @@ -335,7 +670,7 @@ function parseMarkdownContent(
const dtag = target.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-+|-+$/g, '')
parts.push(
<Wikilink key={`wikilink-${i}`} dTag={dtag} displayText={displayText} />
<Wikilink key={`wikilink-${patternIdx}`} dTag={dtag} displayText={displayText} />
)
}
@ -346,16 +681,362 @@ function parseMarkdownContent( @@ -346,16 +681,362 @@ function parseMarkdownContent(
if (lastIndex < content.length) {
const text = content.slice(lastIndex)
if (text) {
parts.push(<span key="text-end">{text}</span>)
// Process text for inline formatting
// But skip if this text is part of a table
const isInTable = blockLevelPatterns.some(p =>
p.type === 'table' &&
lastIndex >= p.index &&
lastIndex < p.end
)
if (!isInTable) {
parts.push(...parseInlineMarkdown(text, 'text-end', footnotes))
}
}
}
// If no patterns, just return the content as text (with inline formatting)
if (parts.length === 0) {
const formattedContent = parseInlineMarkdown(content, 'text-only', footnotes)
return { nodes: formattedContent, hashtagsInContent, footnotes }
}
// Wrap list items in <ul> or <ol> tags
const wrappedParts: React.ReactNode[] = []
let partIdx = 0
while (partIdx < parts.length) {
const part = parts[partIdx]
// Check if this is a list item
if (React.isValidElement(part) && part.type === 'li') {
// Determine if it's a bullet or numbered list
const isBullet = part.key && part.key.toString().startsWith('bullet-')
const isNumbered = part.key && part.key.toString().startsWith('numbered-')
if (isBullet || isNumbered) {
// Collect consecutive list items of the same type
const listItems: React.ReactNode[] = [part]
partIdx++
while (partIdx < parts.length) {
const nextPart = parts[partIdx]
if (React.isValidElement(nextPart) && nextPart.type === 'li') {
const nextIsBullet = nextPart.key && nextPart.key.toString().startsWith('bullet-')
const nextIsNumbered = nextPart.key && nextPart.key.toString().startsWith('numbered-')
if ((isBullet && nextIsBullet) || (isNumbered && nextIsNumbered)) {
listItems.push(nextPart)
partIdx++
} else {
break
}
} else {
break
}
}
// Wrap in <ul> or <ol>
if (isBullet) {
wrappedParts.push(
<ul key={`ul-${partIdx}`} className="list-disc list-inside my-2 space-y-1">
{listItems}
</ul>
)
} else {
wrappedParts.push(
<ol key={`ol-${partIdx}`} className="list-decimal list-inside my-2 space-y-1">
{listItems}
</ol>
)
}
continue
}
}
wrappedParts.push(part)
partIdx++
}
// Add footnotes section at the end if there are any footnotes
if (footnotes.size > 0) {
wrappedParts.push(
<div key="footnotes-section" className="mt-8 pt-4 border-t border-gray-300 dark:border-gray-700">
<h3 className="text-lg font-semibold mb-4">Footnotes</h3>
<ol className="list-decimal list-inside space-y-2">
{Array.from(footnotes.entries()).map(([id, text]) => (
<li
key={`footnote-${id}`}
id={`footnote-${id}`}
className="text-sm text-gray-700 dark:text-gray-300"
>
<span className="font-semibold">[{id}]:</span>{' '}
<span>{parseInlineMarkdown(text, `footnote-${id}`, footnotes)}</span>
{' '}
<a
href={`#footnote-ref-${id}`}
className="text-blue-600 dark:text-blue-400 hover:underline text-xs"
onClick={(e) => {
e.preventDefault()
const refElement = document.getElementById(`footnote-ref-${id}`)
if (refElement) {
refElement.scrollIntoView({ behavior: 'smooth', block: 'center' })
}
}}
>
</a>
</li>
))}
</ol>
</div>
)
}
return { nodes: wrappedParts, hashtagsInContent, footnotes }
}
/**
* Parse inline markdown formatting (bold, italic, strikethrough, inline code, footnote references)
* Returns an array of React nodes
*
* Supports:
* - Bold: **text** or __text__ (double) or *text* (single asterisk)
* - Italic: _text_ (single underscore) or __text__ (double underscore, but bold takes priority)
* - Strikethrough: ~~text~~ (double tilde) or ~text~ (single tilde)
* - Inline code: ``code`` (double backtick) or `code` (single backtick)
* - Footnote references: [^1] (handled at block level, but parsed here for inline context)
*/
function parseInlineMarkdown(text: string, keyPrefix: string, _footnotes: Map<string, string> = new Map()): React.ReactNode[] {
const parts: React.ReactNode[] = []
let lastIndex = 0
const inlinePatterns: Array<{ index: number; end: number; type: string; data: any }> = []
// Inline code: ``code`` (double backtick) or `code` (single backtick) - process first to avoid conflicts
// Double backticks first
const doubleCodeRegex = /``([^`\n]+?)``/g
const doubleCodeMatches = Array.from(text.matchAll(doubleCodeRegex))
doubleCodeMatches.forEach(match => {
if (match.index !== undefined) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'code',
data: match[1]
})
}
})
// Single backtick (but not if already in double backtick)
const singleCodeRegex = /`([^`\n]+?)`/g
const singleCodeMatches = Array.from(text.matchAll(singleCodeRegex))
singleCodeMatches.forEach(match => {
if (match.index !== undefined) {
const isInDoubleCode = inlinePatterns.some(p =>
p.type === 'code' &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInDoubleCode) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'code',
data: match[1]
})
}
}
})
// Bold: **text** (double asterisk) or __text__ (double underscore) - process first
// Also handle *text* (single asterisk) as bold
const doubleBoldAsteriskRegex = /\*\*(.+?)\*\*/g
const doubleBoldAsteriskMatches = Array.from(text.matchAll(doubleBoldAsteriskRegex))
doubleBoldAsteriskMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code
const isInCode = inlinePatterns.some(p =>
p.type === 'code' &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInCode) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'bold',
data: match[1]
})
}
}
})
// Double underscore bold (but check if it's already italic)
const doubleBoldUnderscoreRegex = /__(.+?)__/g
const doubleBoldUnderscoreMatches = Array.from(text.matchAll(doubleBoldUnderscoreRegex))
doubleBoldUnderscoreMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code or bold
const isInOther = inlinePatterns.some(p =>
(p.type === 'code' || p.type === 'bold') &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'bold',
data: match[1]
})
}
}
})
// Single asterisk bold: *text* (not part of **bold**)
const singleBoldAsteriskRegex = /(?<!\*)\*([^*\n]+?)\*(?!\*)/g
const singleBoldAsteriskMatches = Array.from(text.matchAll(singleBoldAsteriskRegex))
singleBoldAsteriskMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code, double bold, or strikethrough
const isInOther = inlinePatterns.some(p =>
(p.type === 'code' || p.type === 'bold' || p.type === 'strikethrough') &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'bold',
data: match[1]
})
}
}
})
// Strikethrough: ~~text~~ (double tilde) or ~text~ (single tilde)
// Double tildes first
const doubleStrikethroughRegex = /~~(.+?)~~/g
const doubleStrikethroughMatches = Array.from(text.matchAll(doubleStrikethroughRegex))
doubleStrikethroughMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code or bold
const isInOther = inlinePatterns.some(p =>
(p.type === 'code' || p.type === 'bold') &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'strikethrough',
data: match[1]
})
}
}
})
// Single tilde strikethrough
const singleStrikethroughRegex = /(?<!~)~([^~\n]+?)~(?!~)/g
const singleStrikethroughMatches = Array.from(text.matchAll(singleStrikethroughRegex))
singleStrikethroughMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code, bold, or double strikethrough
const isInOther = inlinePatterns.some(p =>
(p.type === 'code' || p.type === 'bold' || p.type === 'strikethrough') &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'strikethrough',
data: match[1]
})
}
}
})
// Italic: _text_ (single underscore) or __text__ (double underscore, but bold takes priority)
// Single underscore italic (not part of __bold__)
const singleItalicUnderscoreRegex = /(?<!_)_([^_\n]+?)_(?!_)/g
const singleItalicUnderscoreMatches = Array.from(text.matchAll(singleItalicUnderscoreRegex))
singleItalicUnderscoreMatches.forEach(match => {
if (match.index !== undefined) {
// Skip if already in code, bold, or strikethrough
const isInOther = inlinePatterns.some(p =>
(p.type === 'code' || p.type === 'bold' || p.type === 'strikethrough') &&
match.index! >= p.index &&
match.index! < p.end
)
if (!isInOther) {
inlinePatterns.push({
index: match.index,
end: match.index + match[0].length,
type: 'italic',
data: match[1]
})
}
}
})
// Double underscore italic (only if not already bold)
// Note: __text__ is bold by default, but if user wants it italic, we can add it
// For now, we'll keep __text__ as bold only, and _text_ as italic
// Sort by index
inlinePatterns.sort((a, b) => a.index - b.index)
// Remove overlaps (keep first)
const filtered: typeof inlinePatterns = []
let lastEnd = 0
inlinePatterns.forEach(pattern => {
if (pattern.index >= lastEnd) {
filtered.push(pattern)
lastEnd = pattern.end
}
})
// Build nodes
filtered.forEach((pattern, i) => {
// Add text before pattern
if (pattern.index > lastIndex) {
const textBefore = text.slice(lastIndex, pattern.index)
if (textBefore) {
parts.push(<span key={`${keyPrefix}-inline-text-${i}`}>{textBefore}</span>)
}
}
// Render pattern
if (pattern.type === 'bold') {
parts.push(<strong key={`${keyPrefix}-bold-${i}`}>{pattern.data}</strong>)
} else if (pattern.type === 'italic') {
parts.push(<em key={`${keyPrefix}-italic-${i}`}>{pattern.data}</em>)
} else if (pattern.type === 'strikethrough') {
parts.push(<del key={`${keyPrefix}-strikethrough-${i}`} className="line-through">{pattern.data}</del>)
} else if (pattern.type === 'code') {
parts.push(
<code key={`${keyPrefix}-code-${i}`} className="bg-muted px-1 py-0.5 rounded text-sm font-mono">
{pattern.data}
</code>
)
}
lastIndex = pattern.end
})
// Add remaining text
if (lastIndex < text.length) {
const remaining = text.slice(lastIndex)
if (remaining) {
parts.push(<span key={`${keyPrefix}-inline-text-final`}>{remaining}</span>)
}
}
// If no patterns, just return the content as text
// If no patterns found, return the text as-is
if (parts.length === 0) {
return { nodes: [<span key="text-only">{content}</span>], hashtagsInContent }
return [<span key={`${keyPrefix}-plain`}>{text}</span>]
}
return { nodes: parts, hashtagsInContent }
return parts
}
export default function MarkdownArticle({
@ -556,13 +1237,15 @@ export default function MarkdownArticle({ @@ -556,13 +1237,15 @@ export default function MarkdownArticle({
// Parse markdown content with post-processing for nostr: links and hashtags
const { nodes: parsedContent, hashtagsInContent } = useMemo(() => {
return parseMarkdownContent(preprocessedContent, {
const result = parseMarkdownContent(preprocessedContent, {
eventPubkey: event.pubkey,
imageIndexMap,
openLightbox,
navigateToHashtag,
navigateToRelay
})
// Return nodes and hashtags (footnotes are already included in nodes)
return { nodes: result.nodes, hashtagsInContent: result.hashtagsInContent }
}, [preprocessedContent, event.pubkey, imageIndexMap, openLightbox, navigateToHashtag, navigateToRelay])
// Filter metadata tags to only show what's not already in content
@ -574,43 +1257,43 @@ export default function MarkdownArticle({ @@ -574,43 +1257,43 @@ export default function MarkdownArticle({
<>
<div className={`prose prose-zinc max-w-none dark:prose-invert break-words overflow-wrap-anywhere ${className || ''}`}>
{/* Metadata */}
{!hideMetadata && metadata.title && <h1 className="break-words">{metadata.title}</h1>}
{!hideMetadata && metadata.summary && (
<blockquote>
<p className="break-words">{metadata.summary}</p>
</blockquote>
)}
{hideMetadata && metadata.title && event.kind !== ExtendedKind.DISCUSSION && (
<h2 className="text-2xl font-bold mb-4 leading-tight break-words">{metadata.title}</h2>
)}
{!hideMetadata && metadata.title && <h1 className="break-words">{metadata.title}</h1>}
{!hideMetadata && metadata.summary && (
<blockquote>
<p className="break-words">{metadata.summary}</p>
</blockquote>
)}
{hideMetadata && metadata.title && event.kind !== ExtendedKind.DISCUSSION && (
<h2 className="text-2xl font-bold mb-4 leading-tight break-words">{metadata.title}</h2>
)}
{/* Metadata image */}
{!hideMetadata && metadata.image && (() => {
const cleanedMetadataImage = cleanUrl(metadata.image)
{!hideMetadata && metadata.image && (() => {
const cleanedMetadataImage = cleanUrl(metadata.image)
// Don't show if already in content
if (cleanedMetadataImage && mediaUrlsInContent.has(cleanedMetadataImage)) {
return null
}
const metadataImageIndex = imageIndexMap.get(cleanedMetadataImage)
return (
<Image
image={{ url: metadata.image, pubkey: event.pubkey }}
className="max-w-[400px] w-full h-auto my-0 cursor-zoom-in"
classNames={{
wrapper: 'rounded-lg',
errorPlaceholder: 'aspect-square h-[30vh]'
}}
onClick={(e) => {
e.stopPropagation()
return (
<Image
image={{ url: metadata.image, pubkey: event.pubkey }}
className="max-w-[400px] w-full h-auto my-0 cursor-zoom-in"
classNames={{
wrapper: 'rounded-lg',
errorPlaceholder: 'aspect-square h-[30vh]'
}}
onClick={(e) => {
e.stopPropagation()
if (metadataImageIndex !== undefined) {
openLightbox(metadataImageIndex)
}
}}
/>
)
})()}
}
}}
/>
)
})()}
{/* Media from tags (only if not in content) */}
{leftoverTagMedia.length > 0 && (
@ -636,7 +1319,7 @@ export default function MarkdownArticle({ @@ -636,7 +1319,7 @@ export default function MarkdownArticle({
}
}}
/>
</div>
</div>
)
} else if (media.type === 'video' || media.type === 'audio') {
return (
@ -651,9 +1334,9 @@ export default function MarkdownArticle({ @@ -651,9 +1334,9 @@ export default function MarkdownArticle({
}
return null
})}
</div>
)}
</div>
)}
{/* Parsed content */}
<div className="break-words whitespace-pre-wrap">
{parsedContent}
@ -661,7 +1344,7 @@ export default function MarkdownArticle({ @@ -661,7 +1344,7 @@ export default function MarkdownArticle({
{/* Hashtags from metadata (only if not already in content) */}
{leftoverMetadataTags.length > 0 && (
<div className="flex gap-2 flex-wrap pb-2 mt-4">
<div className="flex gap-2 flex-wrap pb-2 mt-4">
{leftoverMetadataTags.map((tag) => (
<div
key={tag}
@ -675,18 +1358,18 @@ export default function MarkdownArticle({ @@ -675,18 +1358,18 @@ export default function MarkdownArticle({
#<span className="truncate">{tag}</span>
</div>
))}
</div>
)}
</div>
)}
{/* WebPreview cards for links from tags (only if not already in content) */}
{/* Note: Links in content are already rendered as green hyperlinks above, so we don't show WebPreview for them */}
{leftoverTagLinks.length > 0 && (
<div className="space-y-3 mt-6">
{leftoverTagLinks.map((url, index) => (
<WebPreview key={`tag-${index}-${url}`} url={url} className="w-full" />
))}
</div>
)}
<WebPreview key={`tag-${index}-${url}`} url={url} className="w-full" />
))}
</div>
)}
</div>
{/* Image gallery lightbox */}

2
src/components/Note/PublicationIndex/PublicationIndex.tsx

@ -1225,7 +1225,7 @@ export default function PublicationIndex({ @@ -1225,7 +1225,7 @@ export default function PublicationIndex({
ToC
</Button>
)}
<MarkdownArticle event={ref.event} showImageGallery={false} hideMetadata={true} />
<MarkdownArticle event={ref.event} hideMetadata={true} />
</div>
)
} else {

189
src/components/ReplyNoteList/index.tsx

@ -21,6 +21,7 @@ import { useReply } from '@/providers/ReplyProvider' @@ -21,6 +21,7 @@ import { useReply } from '@/providers/ReplyProvider'
import { useUserTrust } from '@/providers/UserTrustProvider'
import client from '@/services/client.service'
import noteStatsService from '@/services/note-stats.service'
import discussionFeedCache from '@/services/discussion-feed-cache.service'
import { Filter, Event as NEvent, kinds } from 'nostr-tools'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
@ -255,81 +256,145 @@ function ReplyNoteList({ index, event, sort = 'oldest' }: { index?: number; even @@ -255,81 +256,145 @@ function ReplyNoteList({ index, event, sort = 'oldest' }: { index?: number; even
if (!rootInfo || currentIndex !== index) return
const init = async () => {
setLoading(true)
logger.debug('[ReplyNoteList] Fetching replies for root:', rootInfo)
try {
// Privacy: Only use user's own relays + defaults, never connect to other users' relays
const userReadRelays = userRelayList?.read || []
const userWriteRelays = userRelayList?.write || []
const finalRelayUrls = Array.from(new Set([
...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url), // Fast, well-connected relays
...userReadRelays.map(url => normalizeUrl(url) || url), // User's read relays
...userWriteRelays.map(url => normalizeUrl(url) || url) // User's write relays
]))
const filters: Filter[] = []
if (rootInfo.type === 'E') {
// Fetch all reply types for event-based replies
filters.push({
'#e': [rootInfo.id],
kinds: [kinds.ShortTextNote, ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
})
// Also fetch with uppercase E tag for replaceable events
filters.push({
'#E': [rootInfo.id],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
})
// For public messages (kind 24), also look for replies using 'q' tags
if (event.kind === ExtendedKind.PUBLIC_MESSAGE) {
// Check cache first - get cached data even if stale (for instant display)
const cachedData = discussionFeedCache.getCachedReplies(rootInfo)
const hasFreshCache = discussionFeedCache.hasFreshCache(rootInfo)
const hasCache = cachedData !== null
if (hasCache) {
logger.debug('[ReplyNoteList] Found cached replies:', cachedData.length, 'replies', hasFreshCache ? '(fresh)' : '(stale)')
// Display cached data immediately (even if stale) for instant switching
addReplies(cachedData)
setLoading(false)
} else {
// No cache at all, show loading while fetching
logger.debug('[ReplyNoteList] No cache found, fetching from relays')
setLoading(true)
}
// Always fetch fresh data from relays to update cache
// If we have fresh cache, we can skip fetching (but still do it in background after a delay)
// If we have stale cache or no cache, fetch immediately
if (hasFreshCache) {
// Fresh cache: fetch in background after a short delay to avoid unnecessary requests
setTimeout(() => {
fetchFromRelays()
}, 2000) // Wait 2 seconds before background refresh
} else {
// Stale or no cache: fetch immediately
fetchFromRelays()
}
async function fetchFromRelays() {
if (!rootInfo) return // Type guard
try {
// Privacy: Only use user's own relays + defaults, never connect to other users' relays
const userReadRelays = userRelayList?.read || []
const userWriteRelays = userRelayList?.write || []
const finalRelayUrls = Array.from(new Set([
...FAST_READ_RELAY_URLS.map(url => normalizeUrl(url) || url), // Fast, well-connected relays
...userReadRelays.map(url => normalizeUrl(url) || url), // User's read relays
...userWriteRelays.map(url => normalizeUrl(url) || url) // User's write relays
]))
const filters: Filter[] = []
if (rootInfo.type === 'E') {
// Fetch all reply types for event-based replies
filters.push({
'#q': [rootInfo.id],
kinds: [ExtendedKind.PUBLIC_MESSAGE],
limit: LIMIT
})
}
} else if (rootInfo.type === 'A') {
// Fetch all reply types for replaceable event-based replies
filters.push(
{
'#a': [rootInfo.id],
'#e': [rootInfo.id],
kinds: [kinds.ShortTextNote, ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
},
{
'#A': [rootInfo.id],
})
// Also fetch with uppercase E tag for replaceable events
filters.push({
'#E': [rootInfo.id],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
})
// For public messages (kind 24), also look for replies using 'q' tags
if (event.kind === ExtendedKind.PUBLIC_MESSAGE) {
filters.push({
'#q': [rootInfo.id],
kinds: [ExtendedKind.PUBLIC_MESSAGE],
limit: LIMIT
})
}
} else if (rootInfo.type === 'A') {
// Fetch all reply types for replaceable event-based replies
filters.push(
{
'#a': [rootInfo.id],
kinds: [kinds.ShortTextNote, ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
},
{
'#A': [rootInfo.id],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
}
)
if (rootInfo.relay) {
finalRelayUrls.push(rootInfo.relay)
}
)
if (rootInfo.relay) {
finalRelayUrls.push(rootInfo.relay)
}
}
logger.debug('[ReplyNoteList] Using filters:', filters)
logger.debug('[ReplyNoteList] Using relays:', finalRelayUrls.length)
logger.debug('[ReplyNoteList] Using filters:', filters)
logger.debug('[ReplyNoteList] Using relays:', finalRelayUrls.length)
// Use fetchEvents instead of subscribeTimeline for one-time fetching
const allReplies = await client.fetchEvents(finalRelayUrls, filters)
logger.debug('[ReplyNoteList] Fetched', allReplies.length, 'replies')
// Filter and add replies
const regularReplies = allReplies.filter((evt) => isReplyNoteEvent(evt))
addReplies(regularReplies)
setLoading(false)
} catch (error) {
logger.error('[ReplyNoteList] Error fetching replies:', error)
setLoading(false)
// Use fetchEvents instead of subscribeTimeline for one-time fetching
const allReplies = await client.fetchEvents(finalRelayUrls, filters)
logger.debug('[ReplyNoteList] Fetched', allReplies.length, 'replies')
// Filter and add replies
const regularReplies = allReplies.filter((evt) => isReplyNoteEvent(evt))
// Store in cache (this merges with existing cached replies)
// After this call, the cache contains ALL replies we've ever seen for this thread
discussionFeedCache.setCachedReplies(rootInfo, regularReplies)
// Get the merged cache (which includes all replies we've ever seen, including new ones)
const mergedCachedReplies = discussionFeedCache.getCachedReplies(rootInfo)
// Always add all merged cached replies to UI
// This ensures we keep all previously seen replies and add any new ones
// addReplies will deduplicate, so it's safe to call even if some replies are already displayed
if (mergedCachedReplies) {
logger.debug('[ReplyNoteList] Adding merged cached replies to UI:', mergedCachedReplies.length, 'total replies')
addReplies(mergedCachedReplies)
} else {
// Fallback: if cache somehow failed, at least add the fetched replies
logger.warn('[ReplyNoteList] Cache returned null after store, using fetched replies only')
addReplies(regularReplies)
}
if (!hasCache) {
// No cache: stop loading after adding replies
setLoading(false)
} else {
// Background refresh: check if we got new replies
const cachedReplyIds = new Set(cachedData!.map(r => r.id))
const hasNewReplies = regularReplies.some(r => !cachedReplyIds.has(r.id))
if (hasNewReplies) {
logger.debug('[ReplyNoteList] Background refresh found new replies, UI updated')
} else {
logger.debug('[ReplyNoteList] Background refresh: no new replies, existing replies preserved')
}
}
} catch (error) {
logger.error('[ReplyNoteList] Error fetching replies:', error)
if (!hasCache) {
// Only set loading to false if we don't have cache to fall back on
setLoading(false)
}
}
}
}
init()
}, [rootInfo, currentIndex, index])
}, [rootInfo, currentIndex, index, userRelayList, event.kind, addReplies])
useEffect(() => {
if (replies.length === 0 && !loading && timelineKey) {

212
src/pages/primary/DiscussionsPage/index.tsx

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { forwardRef, useEffect, useState, useMemo, useCallback } from 'react'
import { forwardRef, useEffect, useState, useMemo, useCallback, useRef } from 'react'
import { useTranslation } from 'react-i18next'
import { RefreshCw, Search } from 'lucide-react'
import { useNostr } from '@/providers/NostrProvider'
@ -11,6 +11,7 @@ import { kinds } from 'nostr-tools' @@ -11,6 +11,7 @@ import { kinds } from 'nostr-tools'
import { normalizeUrl } from '@/lib/url'
import { BIG_RELAY_URLS, FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants'
import client from '@/services/client.service'
import discussionFeedCache from '@/services/discussion-feed-cache.service'
import { DISCUSSION_TOPICS } from './CreateThreadDialog'
import ThreadCard from './ThreadCard'
import CreateThreadDialog from './CreateThreadDialog'
@ -352,6 +353,10 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -352,6 +353,10 @@ const DiscussionsPage = forwardRef((_, ref) => {
allTopics: DynamicTopic[]
}>({ mainTopics: [], subtopics: [], allTopics: [] })
// Track if we've initialized to prevent re-fetching on re-renders
const hasInitializedRef = useRef(false)
const isFetchingRef = useRef(false)
// Build comprehensive relay list (same as pins)
const buildComprehensiveRelayList = useCallback(async () => {
const myRelayList = pubkey ? await client.fetchRelayList(pubkey) : { write: [], read: [] }
@ -379,13 +384,32 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -379,13 +384,32 @@ const DiscussionsPage = forwardRef((_, ref) => {
}, []) // No dependencies - will be called fresh each time from fetchAllEvents
// Fetch all events
const fetchAllEvents = useCallback(async () => {
if (loading) return
setLoading(true)
setIsRefreshing(true)
const fetchAllEvents = useCallback(async (forceRefresh = false) => {
if (isFetchingRef.current && !forceRefresh) return
isFetchingRef.current = true
// Check cache first (unless forcing refresh)
let hasCachedData = false
if (!forceRefresh) {
const cachedData = discussionFeedCache.getCachedDiscussionsList()
if (cachedData) {
logger.debug('[DiscussionsPage] Using cached discussions list:', cachedData.eventMap.size, 'threads')
setAllEventMap(cachedData.eventMap)
setDynamicTopics(cachedData.dynamicTopics)
setLoading(false) // Display cached data immediately
hasCachedData = true
// Still fetch in background to update cache (but don't show loading or refreshing)
} else {
setLoading(true)
setIsRefreshing(true)
}
} else {
setLoading(true)
setIsRefreshing(true)
}
try {
logger.debug('[DiscussionsPage] Fetching all discussion threads...')
logger.debug('[DiscussionsPage] Fetching all discussion threads...', forceRefresh ? '(forced refresh)' : '')
// Get comprehensive relay list
const allRelays = await buildComprehensiveRelayList()
@ -409,27 +433,41 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -409,27 +433,41 @@ const DiscussionsPage = forwardRef((_, ref) => {
})))
}
// Step 2: Get thread IDs and fetch related comments and reactions
// Step 2: Get thread IDs for comment/reaction fetching
// Get cached data first to include cached thread IDs in the fetch
// We ALWAYS include cached thread IDs to get updated counts for all threads we know about
const cachedDataBeforeFetch = discussionFeedCache.getCachedDiscussionsList()
const threadIds = discussionThreads.map((thread: NostrEvent) => thread.id)
const allThreadIds = new Set(threadIds)
// Add cached thread IDs to fetch comments/reactions for all threads we know about
// This ensures we get updated counts for cached threads too, regardless of whether we're refreshing
if (cachedDataBeforeFetch) {
cachedDataBeforeFetch.eventMap.forEach((_entry, threadId) => {
allThreadIds.add(threadId)
})
}
const allThreadIdsArray = Array.from(allThreadIds)
const [comments, reactions] = await Promise.all([
threadIds.length > 0 ? client.fetchEvents(allRelays, [
allThreadIdsArray.length > 0 ? client.fetchEvents(allRelays, [
{
kinds: [1111], // ExtendedKind.COMMENT
'#e': threadIds,
'#e': allThreadIdsArray,
limit: 100
}
]) : Promise.resolve([]),
threadIds.length > 0 ? client.fetchEvents(allRelays, [
allThreadIdsArray.length > 0 ? client.fetchEvents(allRelays, [
{
kinds: [kinds.Reaction],
'#e': threadIds,
'#e': allThreadIdsArray,
limit: 100
}
]) : Promise.resolve([])
])
logger.debug('[DiscussionsPage] Fetched', comments.length, 'comments and', reactions.length, 'reactions')
logger.debug('[DiscussionsPage] Fetched', comments.length, 'comments and', reactions.length, 'reactions for', allThreadIdsArray.length, 'threads (', threadIds.length, 'new,', (cachedDataBeforeFetch?.eventMap.size || 0), 'cached)')
// Debug: Log some reaction details
if (reactions.length > 0) {
@ -441,14 +479,14 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -441,14 +479,14 @@ const DiscussionsPage = forwardRef((_, ref) => {
})))
}
// Step 3: Build event map with vote and comment counts
// Step 3: Build event map with vote and comment counts for newly fetched threads
const newEventMap = new Map<string, EventMapEntry>()
discussionThreads.forEach((thread: NostrEvent) => {
const threadId = thread.id
const threadAuthor = thread.pubkey
// Count votes and comments
// Count votes and comments for this thread
const voteStats = countVotesForThread(threadId, reactions, threadAuthor)
const commentStats = countCommentsForThread(threadId, comments, threadAuthor)
@ -498,7 +536,7 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -498,7 +536,7 @@ const DiscussionsPage = forwardRef((_, ref) => {
})
})
logger.debug('[DiscussionsPage] Built event map with', newEventMap.size, 'threads')
logger.debug('[DiscussionsPage] Built event map with', newEventMap.size, 'new threads')
// Log vote counts for debugging
newEventMap.forEach((entry, threadId) => {
@ -507,34 +545,112 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -507,34 +545,112 @@ const DiscussionsPage = forwardRef((_, ref) => {
}
})
// Analyze dynamic topics only if we have new data
let dynamicTopicsAnalysis: { mainTopics: DynamicTopic[]; subtopics: DynamicTopic[]; allTopics: DynamicTopic[] } = { mainTopics: [], subtopics: [], allTopics: [] }
if (newEventMap.size > 0) {
dynamicTopicsAnalysis = analyzeDynamicTopics(Array.from(newEventMap.values()))
setDynamicTopics(dynamicTopicsAnalysis)
// Start with cached threads (if any) to preserve all threads we've ever seen
// This ensures thread counts and topic counts don't go down when different relays return different subsets
const allThreadsMap = new Map<string, EventMapEntry>()
// First, add all cached threads to preserve them
// CRITICAL: Always preserve cached threads, even if they're not in the new fetch
if (cachedDataBeforeFetch) {
logger.debug('[DiscussionsPage] Preserving', cachedDataBeforeFetch.eventMap.size, 'cached threads')
cachedDataBeforeFetch.eventMap.forEach((entry, threadId) => {
allThreadsMap.set(threadId, { ...entry }) // Create a copy to avoid mutations
})
}
// Update event map with enhanced topic categorization
const updatedEventMap = new Map<string, EventMapEntry>()
// Then, add or update with newly fetched threads
// New threads will be added, existing threads will be updated with fresh data
newEventMap.forEach((entry, threadId) => {
allThreadsMap.set(threadId, { ...entry }) // Always use the fresh data from new fetch
})
const threadsBeforeCountUpdate = allThreadsMap.size
logger.debug('[DiscussionsPage] Total threads after merge:', threadsBeforeCountUpdate, '(cached:', cachedDataBeforeFetch?.eventMap.size || 0, '+ new:', newEventMap.size, ', overlaps:', (cachedDataBeforeFetch?.eventMap.size || 0) + newEventMap.size - threadsBeforeCountUpdate, ')')
// Now update comment/vote counts for ALL threads using fresh comments/reactions
// This ensures cached threads get updated counts from the latest fetch
const finalEventMap = new Map<string, EventMapEntry>()
allThreadsMap.forEach((entry, threadId) => {
const thread = entry.event
const threadAuthor = thread.pubkey
// Count votes and comments for this thread (using all fetched comments/reactions)
const voteStats = countVotesForThread(threadId, reactions, threadAuthor)
const commentStats = countCommentsForThread(threadId, comments, threadAuthor)
// Update the entry with latest counts, but preserve all other data
finalEventMap.set(threadId, {
...entry,
commentCount: commentStats.commentCount,
lastCommentTime: commentStats.lastCommentTime,
lastVoteTime: voteStats.lastVoteTime,
upVotes: voteStats.upVotes,
downVotes: voteStats.downVotes
})
})
logger.debug('[DiscussionsPage] Final event map has', finalEventMap.size, 'threads after count update')
// Analyze dynamic topics from ALL threads (cached + new)
const dynamicTopicsAnalysis = analyzeDynamicTopics(Array.from(finalEventMap.values()))
// Update event map with enhanced topic categorization for all threads
const categorizedEventMap = new Map<string, EventMapEntry>()
finalEventMap.forEach((entry, threadId) => {
const predefinedTopicIds = DISCUSSION_TOPICS.map((t: any) => t.id)
const enhancedTopic = getEnhancedTopicFromTags(entry.allTopics, predefinedTopicIds, dynamicTopicsAnalysis.allTopics, entry.isGroupDiscussion)
updatedEventMap.set(threadId, {
categorizedEventMap.set(threadId, {
...entry,
categorizedTopic: enhancedTopic
})
})
setAllEventMap(updatedEventMap)
logger.debug('[DiscussionsPage] Categorized event map has', categorizedEventMap.size, 'threads')
// Store final merged and categorized event map in cache
// IMPORTANT: We've already manually merged all cached + new threads above
// So categorizedEventMap contains ALL threads we want to preserve
// We store with merge=false because we've already done the merge manually
// This ensures we don't lose threads due to the cache service's merge logic
const expectedThreadCount = categorizedEventMap.size
discussionFeedCache.setCachedDiscussionsList(categorizedEventMap, dynamicTopicsAnalysis, false)
// Verify the cache has all our threads (immediately after storing)
const cachedAfterStore = discussionFeedCache.getCachedDiscussionsList()
if (cachedAfterStore) {
const actualThreadCount = cachedAfterStore.eventMap.size
logger.debug('[DiscussionsPage] Cache verification - stored:', expectedThreadCount, 'threads, cache has:', actualThreadCount, 'threads')
if (actualThreadCount !== expectedThreadCount) {
logger.error('[DiscussionsPage] ERROR: Thread count mismatch! Expected', expectedThreadCount, 'but cache has', actualThreadCount)
// If we lost threads, try to recover by storing again with the categorized map
// This shouldn't happen, but if it does, at least log it
}
} else {
logger.error('[DiscussionsPage] ERROR: Cache returned null after storing!')
}
// Always update state with the merged and categorized event map
// This ensures we show all threads we've ever seen, with updated counts
setAllEventMap(categorizedEventMap)
setDynamicTopics(dynamicTopicsAnalysis)
logger.debug('[DiscussionsPage] Updated UI with', categorizedEventMap.size, 'threads (merged from cache and new fetch)')
} catch (error) {
logger.error('[DiscussionsPage] Error fetching events:', error)
// If we had cached data and fetch failed, at least we have something to show
if (!hasCachedData) {
setLoading(false)
}
} finally {
setLoading(false)
if (!hasCachedData || forceRefresh) {
setLoading(false)
}
setIsRefreshing(false)
isFetchingRef.current = false
}
}, []) // Only run when explicitly called (mount or refresh button)
}, [buildComprehensiveRelayList]) // Only depend on buildComprehensiveRelayList
// Calculate time span counts
const calculateTimeSpanCounts = useCallback(() => {
@ -619,8 +735,15 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -619,8 +735,15 @@ const DiscussionsPage = forwardRef((_, ref) => {
// Effects
useEffect(() => {
fetchAllEvents()
}, []) // Only run once on mount
// Only initialize once
if (hasInitializedRef.current) {
logger.debug('[DiscussionsPage] Already initialized, skipping fetch')
return
}
hasInitializedRef.current = true
fetchAllEvents(false) // Don't force refresh on mount - use cache if available
}, [fetchAllEvents])
useEffect(() => {
if (allEventMap.size > 0) {
@ -635,10 +758,37 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -635,10 +758,37 @@ const DiscussionsPage = forwardRef((_, ref) => {
}, [allEventMap, timeSpan, selectedTopic]) // Run when allEventMap, timeSpan, or selectedTopic changes
// Get available topics sorted by most recent activity (including dynamic topics)
// Topic counts are calculated based on the current time span filter
const availableTopics = useMemo(() => {
const topicMap = new Map<string, { count: number, lastActivity: number, isDynamic: boolean, isMainTopic: boolean, isSubtopic: boolean }>()
// Calculate time span filter
const now = Date.now()
const timeSpanAgo = timeSpan === '30days' ? now - (30 * 24 * 60 * 60 * 1000) :
timeSpan === '90days' ? now - (90 * 24 * 60 * 60 * 1000) : 0
allEventMap.forEach((entry) => {
// Filter by time span - only count topics for threads that match the time filter
let passesTimeFilter = false
if (timeSpan === 'all') {
passesTimeFilter = true
} else {
const threadTime = entry.event.created_at * 1000
const lastCommentTime = entry.lastCommentTime > 0 ? entry.lastCommentTime * 1000 : 0
const lastVoteTime = entry.lastVoteTime > 0 ? entry.lastVoteTime * 1000 : 0
const mostRecentActivity = Math.max(
threadTime,
lastCommentTime,
lastVoteTime
)
passesTimeFilter = mostRecentActivity > timeSpanAgo
}
// Only count topics for threads that pass the time filter
if (!passesTimeFilter) return
const topic = entry.categorizedTopic
const lastActivity = Math.max(
entry.event.created_at * 1000,
@ -666,7 +816,7 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -666,7 +816,7 @@ const DiscussionsPage = forwardRef((_, ref) => {
return Array.from(topicMap.entries())
.map(([topic, data]) => ({ topic, ...data }))
.sort((a, b) => b.lastActivity - a.lastActivity)
}, [allEventMap, dynamicTopics])
}, [allEventMap, dynamicTopics, timeSpan]) // Include timeSpan in dependencies
// State for search results
const [searchedEntries, setSearchedEntries] = useState<EventMapEntry[]>([])
@ -804,7 +954,7 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -804,7 +954,7 @@ const DiscussionsPage = forwardRef((_, ref) => {
// Handle refresh
const handleRefresh = () => {
fetchAllEvents()
fetchAllEvents(true) // Force refresh when user clicks refresh button
}
// Handle create thread
@ -903,7 +1053,7 @@ const DiscussionsPage = forwardRef((_, ref) => { @@ -903,7 +1053,7 @@ const DiscussionsPage = forwardRef((_, ref) => {
onChange={(e) => setSelectedTopic(e.target.value)}
className="w-full sm:w-auto px-3 py-2 bg-white dark:bg-gray-800 text-black dark:text-white border border-gray-300 dark:border-gray-600 rounded-md shadow-sm focus:ring-2 focus:ring-blue-500 focus:border-blue-500"
>
<option value="all">All Topics ({allEventMap.size})</option>
<option value="all">All Topics ({timeSpanCounts[timeSpan]})</option>
{availableTopics.map(({ topic, count, isDynamic, isMainTopic, isSubtopic }) => {
const isGroupsTopic = topic === 'groups'
return (

308
src/services/discussion-feed-cache.service.ts

@ -0,0 +1,308 @@ @@ -0,0 +1,308 @@
import { Event as NEvent } from 'nostr-tools'
import logger from '@/lib/logger'
interface CachedThreadData {
replies: NEvent[]
timestamp: number
rootInfo: {
type: 'E' | 'A' | 'I'
id: string
pubkey?: string
eventId?: string
relay?: string
}
}
interface CachedDiscussionsListData {
eventMap: Map<string, any>
dynamicTopics: {
mainTopics: any[]
subtopics: any[]
allTopics: any[]
}
timestamp: number
}
/**
* Cache service for discussion feed data (thread replies/comments)
* Uses in-memory cache with timestamp-based expiration
*/
class DiscussionFeedCacheService {
static instance: DiscussionFeedCacheService
private cache: Map<string, CachedThreadData> = new Map()
private discussionsListCache: CachedDiscussionsListData | null = null
private readonly CACHE_TTL_MS = 5 * 60 * 1000 // 5 minutes
private readonly DISCUSSIONS_LIST_CACHE_TTL_MS = 2 * 60 * 1000 // 2 minutes for discussions list
static getInstance(): DiscussionFeedCacheService {
if (!DiscussionFeedCacheService.instance) {
DiscussionFeedCacheService.instance = new DiscussionFeedCacheService()
}
return DiscussionFeedCacheService.instance
}
/**
* Get cache key for a thread
*/
private getCacheKey(rootInfo: CachedThreadData['rootInfo']): string {
if (rootInfo.type === 'E') {
return `thread:E:${rootInfo.id}`
} else if (rootInfo.type === 'A') {
return `thread:A:${rootInfo.id}`
} else if (rootInfo.type === 'I') {
return `thread:I:${rootInfo.id}`
}
return `thread:unknown:${rootInfo.id}`
}
/**
* Check if cached data is stale
*/
private isStale(cachedData: CachedThreadData): boolean {
const age = Date.now() - cachedData.timestamp
return age > this.CACHE_TTL_MS
}
/**
* Get cached replies for a thread
* Returns null if cache is empty, but returns data even if stale (for instant display)
*/
getCachedReplies(rootInfo: CachedThreadData['rootInfo']): NEvent[] | null {
const cacheKey = this.getCacheKey(rootInfo)
const cachedData = this.cache.get(cacheKey)
if (!cachedData) {
logger.debug('[DiscussionFeedCache] Cache miss for thread:', cacheKey)
return null
}
// Verify rootInfo matches (in case thread structure changed)
if (
cachedData.rootInfo.type !== rootInfo.type ||
cachedData.rootInfo.id !== rootInfo.id
) {
logger.debug('[DiscussionFeedCache] Cache rootInfo mismatch for thread:', cacheKey)
this.cache.delete(cacheKey)
return null
}
// Return cached data even if stale (caller will fetch fresh data in background)
const isStale = this.isStale(cachedData)
if (isStale) {
logger.debug('[DiscussionFeedCache] Cache hit (stale) for thread:', cacheKey, 'age:', Date.now() - cachedData.timestamp, 'ms', 'replies:', cachedData.replies.length)
} else {
logger.debug('[DiscussionFeedCache] Cache hit (fresh) for thread:', cacheKey, 'replies:', cachedData.replies.length)
}
return cachedData.replies
}
/**
* Check if cached data exists and is fresh (not stale)
*/
hasFreshCache(rootInfo: CachedThreadData['rootInfo']): boolean {
const cacheKey = this.getCacheKey(rootInfo)
const cachedData = this.cache.get(cacheKey)
if (!cachedData) {
return false
}
// Verify rootInfo matches
if (
cachedData.rootInfo.type !== rootInfo.type ||
cachedData.rootInfo.id !== rootInfo.id
) {
return false
}
return !this.isStale(cachedData)
}
/**
* Store replies in cache
* Merges new replies with existing cached replies to prevent count from going down
*/
setCachedReplies(rootInfo: CachedThreadData['rootInfo'], replies: NEvent[]): void {
const cacheKey = this.getCacheKey(rootInfo)
const existingData = this.cache.get(cacheKey)
let mergedReplies: NEvent[]
if (existingData &&
existingData.rootInfo.type === rootInfo.type &&
existingData.rootInfo.id === rootInfo.id) {
// Merge with existing cached replies - keep all unique replies
const existingReplyIds = new Set(existingData.replies.map(r => r.id))
const newReplies = replies.filter(r => !existingReplyIds.has(r.id))
mergedReplies = [...existingData.replies, ...newReplies]
logger.debug('[DiscussionFeedCache] Merged replies for thread:', cacheKey, 'existing:', existingData.replies.length, 'new:', newReplies.length, 'total:', mergedReplies.length)
} else {
// No existing cache or rootInfo mismatch, use new replies
mergedReplies = [...replies]
logger.debug('[DiscussionFeedCache] Cached new replies for thread:', cacheKey, 'replies:', replies.length)
}
const cachedData: CachedThreadData = {
replies: mergedReplies, // Create a copy to avoid mutations
timestamp: Date.now(),
rootInfo: { ...rootInfo } // Create a copy
}
this.cache.set(cacheKey, cachedData)
// Clean up stale entries periodically (every 10th set operation)
if (this.cache.size > 50 && Math.random() < 0.1) {
this.cleanupStaleEntries()
}
}
/**
* Clear cache for a specific thread
*/
clearCache(rootInfo: CachedThreadData['rootInfo']): void {
const cacheKey = this.getCacheKey(rootInfo)
this.cache.delete(cacheKey)
logger.debug('[DiscussionFeedCache] Cleared cache for thread:', cacheKey)
}
/**
* Clear all cached data
*/
clearAllCache(): void {
this.cache.clear()
logger.debug('[DiscussionFeedCache] Cleared all cache')
}
/**
* Remove stale entries from cache
*/
private cleanupStaleEntries(): void {
let cleaned = 0
for (const [key, data] of this.cache.entries()) {
if (this.isStale(data)) {
this.cache.delete(key)
cleaned++
}
}
if (cleaned > 0) {
logger.debug('[DiscussionFeedCache] Cleaned up', cleaned, 'stale entries')
}
}
/**
* Get cache statistics (for debugging)
*/
getCacheStats(): { size: number; entries: Array<{ key: string; age: number; replyCount: number }> } {
const entries = Array.from(this.cache.entries()).map(([key, data]) => ({
key,
age: Date.now() - data.timestamp,
replyCount: data.replies.length
}))
return {
size: this.cache.size,
entries
}
}
/**
* Get cached discussions list data
* Returns null if cache is empty, but returns data even if stale (for merging purposes)
*/
getCachedDiscussionsList(): CachedDiscussionsListData | null {
if (!this.discussionsListCache) {
logger.debug('[DiscussionFeedCache] Discussions list cache miss')
return null
}
const age = Date.now() - this.discussionsListCache.timestamp
const isStale = age > this.DISCUSSIONS_LIST_CACHE_TTL_MS
if (isStale) {
logger.debug('[DiscussionFeedCache] Discussions list cache hit (stale), age:', age, 'ms')
} else {
logger.debug('[DiscussionFeedCache] Discussions list cache hit (fresh), age:', age, 'ms')
}
// Return cached data even if stale (caller will merge and update)
return this.discussionsListCache
}
/**
* Check if cached discussions list data exists and is fresh (not stale)
*/
hasFreshDiscussionsListCache(): boolean {
if (!this.discussionsListCache) {
return false
}
const age = Date.now() - this.discussionsListCache.timestamp
return age <= this.DISCUSSIONS_LIST_CACHE_TTL_MS
}
/**
* Store discussions list data in cache
* Merges new threads with existing cached threads to prevent count from going down
* When merge=true, ALWAYS preserves all existing threads and adds new ones
*/
setCachedDiscussionsList(eventMap: Map<string, any>, dynamicTopics: { mainTopics: any[]; subtopics: any[]; allTopics: any[] }, merge = true): void {
let mergedEventMap: Map<string, any>
const existingCacheSize = this.discussionsListCache?.eventMap.size || 0
const newDataSize = eventMap.size
if (merge && this.discussionsListCache) {
// Merge with existing cached threads - keep all threads we've ever seen
// Start with ALL existing cached threads - this is critical to prevent thread loss
mergedEventMap = new Map(this.discussionsListCache.eventMap)
// Add or update threads from the new fetch
// For existing threads, prefer the new data (which has fresher counts)
// For new threads, add them
eventMap.forEach((entry, threadId) => {
// Always update with new data if it exists (new data has fresher counts from latest fetch)
// This ensures we get updated comment/vote counts for all threads
mergedEventMap.set(threadId, entry)
})
const finalSize = mergedEventMap.size
logger.debug('[DiscussionFeedCache] Merged discussions list: existing:', existingCacheSize, 'new:', newDataSize, 'total:', finalSize, '(expected at least:', Math.max(existingCacheSize, newDataSize), ')')
// Safety check: we should never have fewer threads than we started with (unless new data has fewer)
// But we should always have at least as many as the larger of the two sets
if (finalSize < Math.max(existingCacheSize, newDataSize)) {
logger.warn('[DiscussionFeedCache] WARNING: Merge resulted in fewer threads! Existing:', existingCacheSize, 'New:', newDataSize, 'Final:', finalSize)
}
} else {
// No existing cache or merge=false, use new data directly
mergedEventMap = new Map(eventMap)
logger.debug('[DiscussionFeedCache] Cached new discussions list (no merge):', eventMap.size, 'threads')
}
// Store merged event map
this.discussionsListCache = {
eventMap: mergedEventMap,
dynamicTopics: {
mainTopics: [...dynamicTopics.mainTopics],
subtopics: [...dynamicTopics.subtopics],
allTopics: [...dynamicTopics.allTopics]
},
timestamp: Date.now()
}
// Final verification
if (this.discussionsListCache.eventMap.size !== mergedEventMap.size) {
logger.error('[DiscussionFeedCache] ERROR: Cache eventMap size mismatch after storing! Expected:', mergedEventMap.size, 'Got:', this.discussionsListCache.eventMap.size)
}
}
/**
* Clear discussions list cache
*/
clearDiscussionsListCache(): void {
this.discussionsListCache = null
logger.debug('[DiscussionFeedCache] Cleared discussions list cache')
}
}
const instance = DiscussionFeedCacheService.getInstance()
export default instance
Loading…
Cancel
Save