Browse Source

bug-fixes

imwald
Silberengel 1 month ago
parent
commit
948344941d
  1. 12
      src/PageManager.tsx
  2. 89
      src/components/Note/Highlight/index.tsx
  3. 64
      src/components/Note/MarkdownArticle/MarkdownArticle.tsx
  4. 2
      src/components/NoteOptions/useMenuActions.tsx
  5. 6
      src/components/NoteStats/index.tsx
  6. 11
      src/components/ReplyNote/index.tsx
  7. 110
      src/components/ReplyNoteList/index.tsx
  8. 92
      src/components/RssFeedList/index.tsx
  9. 87
      src/components/RssUrlThreadEventsPreview/index.tsx
  10. 5
      src/components/RssWebFeedCard/index.tsx
  11. 4
      src/i18n/locales/en.ts
  12. 82
      src/lib/nip84-highlight-display.ts
  13. 19
      src/lib/rss-article.ts
  14. 113
      src/lib/rss-web-feed.ts
  15. 15
      src/lib/thread-reply-root-match.ts
  16. 4
      src/pages/secondary/RssArticlePage/index.tsx
  17. 11
      src/providers/ReplyProvider.tsx
  18. 1
      src/routes.tsx
  19. 10
      src/services/note-stats.service.ts

12
src/PageManager.tsx

@ -322,7 +322,7 @@ function restoredPrimaryBrowserUrl(pathname: string, fullUrlForQuery: string): s @@ -322,7 +322,7 @@ function restoredPrimaryBrowserUrl(pathname: string, fullUrlForQuery: string): s
function parseNoteUrl(url: string): { noteId: string; context?: string } {
// Match patterns like /discussions/notes/{noteId} or /notes/{noteId}
const contextualMatch = url.match(
/\/(discussions|search|profile|home|feed|spells|explore)\/notes\/(.+)$/
/\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\/(.+)$/
)
if (contextualMatch) {
return { noteId: contextualMatch[2], context: contextualMatch[1] }
@ -1012,7 +1012,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) { @@ -1012,7 +1012,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
const pathname = window.location.pathname
// Check if this is a note URL - handle both /notes/{id} and /{context}/notes/{id}
const contextualNoteMatch = pathname.match(/\/(discussions|search|profile|home|feed|spells|explore)\/notes\/(.+)$/)
const contextualNoteMatch = pathname.match(/\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\/(.+)$/)
const standardNoteMatch = pathname.match(/\/notes\/(.+)$/)
const noteUrlMatch = contextualNoteMatch || standardNoteMatch
@ -1216,7 +1216,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) { @@ -1216,7 +1216,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
// Check if pathname matches a primary page name
// First, check if it's a contextual note URL (e.g., /discussions/notes/...)
const contextualNoteMatch = pathname.match(
/^\/(discussions|search|profile|home|feed|spells|explore)\/notes\//
/^\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\//
)
if (contextualNoteMatch) {
const pageContext = contextualNoteMatch[1]
@ -1281,7 +1281,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) { @@ -1281,7 +1281,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
const urlToCheck = state?.url || window.location.pathname
// Check if it's a note URL (we'll update drawer after stack is synced)
const noteUrlMatch = urlToCheck.match(/\/(discussions|search|profile|home|feed|spells|explore)\/notes\/(.+)$/) ||
const noteUrlMatch = urlToCheck.match(/\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\/(.+)$/) ||
urlToCheck.match(/\/notes\/(.+)$/)
const noteIdToShow = noteUrlMatch ? noteUrlMatch[noteUrlMatch.length - 1].split('?')[0].split('#')[0] : null
@ -1394,7 +1394,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) { @@ -1394,7 +1394,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
}
// Check if navigating to a note URL (supports both /notes/{id} and /{context}/notes/{id})
const noteUrlMatch = state.url.match(/\/(discussions|search|profile|home|feed|spells|explore)\/notes\/(.+)$/) ||
const noteUrlMatch = state.url.match(/\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\/(.+)$/) ||
state.url.match(/\/notes\/(.+)$/)
if (noteUrlMatch) {
const noteId = noteUrlMatch[noteUrlMatch.length - 1].split('?')[0].split('#')[0]
@ -1445,7 +1445,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) { @@ -1445,7 +1445,7 @@ export function PageManager({ maxStackSize = 5 }: { maxStackSize?: number }) {
// Extract noteId from top item's URL or from state.url
const topItemUrl = newStack[newStack.length - 1]?.url || state?.url
if (topItemUrl) {
const topNoteUrlMatch = topItemUrl.match(/\/(discussions|search|profile|home|feed|spells|explore)\/notes\/(.+)$/) ||
const topNoteUrlMatch = topItemUrl.match(/\/(discussions|search|profile|home|feed|spells|explore|rss)\/notes\/(.+)$/) ||
topItemUrl.match(/\/notes\/(.+)$/)
if (topNoteUrlMatch) {
const topNoteId = topNoteUrlMatch[topNoteUrlMatch.length - 1].split('?')[0].split('#')[0]

89
src/components/Note/Highlight/index.tsx

@ -10,6 +10,15 @@ import { toNote } from '@/lib/link' @@ -10,6 +10,15 @@ import { toNote } from '@/lib/link'
import { useFetchEvent } from '@/hooks'
import { useEffect, useState, useMemo } from 'react'
import { ExtendedKind } from '@/constants'
import { resolveNip84HighlightDisplay } from '@/lib/nip84-highlight-display'
function stripOuterQuotes(s: string): string {
let t = s.trim()
if (t.startsWith('"') && t.endsWith('"')) {
t = t.slice(1, -1).trim()
}
return t
}
/**
* Check if a string is a URL or Nostr address
@ -295,58 +304,52 @@ export default function Highlight({ @@ -295,58 +304,52 @@ export default function Highlight({
}
}, [sourceTag, referencedEventAuthor, hasSpecialCard])
// Extract the context (the main quote/full text being highlighted from)
const contextTag = event.tags.find(tag => tag[0] === 'context')
const context = contextTag?.[1] || event.content // Default to content if no context
const { fullText, markedSpan } = useMemo(
() => resolveNip84HighlightDisplay(event),
[event.id, event.content, event.tags]
)
// The event.content is the highlighted portion
const highlightedText = event.content
const markClassName =
'bg-green-200 dark:bg-green-600 dark:text-white px-1 rounded font-medium'
const quotedBody = useMemo(() => {
const cleanFull = stripOuterQuotes(fullText)
const cleanMark = stripOuterQuotes(markedSpan)
if (!cleanFull) return null
if (!cleanMark || cleanFull === cleanMark) {
return (
<div className={`bg-white dark:bg-gray-900 rounded-lg border border-gray-200 dark:border-gray-700 p-4 ${className || ''}`}>
<div className="flex-1 min-w-0">
{/* Full quoted text with highlighted portion */}
{context && (
<div className="note-content text-base font-normal mb-4 whitespace-pre-wrap break-words border-l-4 border-green-500 pl-5 py-4 leading-relaxed bg-green-50/30 dark:bg-green-950/20 rounded-r-lg">
{contextTag && highlightedText ? (
// If we have both context and highlighted text, show the highlight within the context
<div>
{(() => {
// Strip outer quotation marks if present
let cleanContext = context.trim()
if (cleanContext.startsWith('"') && cleanContext.endsWith('"')) {
cleanContext = cleanContext.slice(1, -1).trim()
}
// Strip outer quotation marks from highlighted text if present
let cleanHighlightedText = highlightedText.trim()
if (cleanHighlightedText.startsWith('"') && cleanHighlightedText.endsWith('"')) {
cleanHighlightedText = cleanHighlightedText.slice(1, -1).trim()
}
return cleanContext.split(cleanHighlightedText).map((part, index) => (
<mark className={markClassName} data-nip84-highlight="span">
{cleanFull}
</mark>
)
}
const pieces = cleanFull.split(cleanMark)
if (pieces.length === 1) {
return (
<mark className={markClassName} data-nip84-highlight="span">
{cleanFull}
</mark>
)
}
return pieces.map((part, index) => (
<span key={index}>
{part}
{index < cleanContext.split(cleanHighlightedText).length - 1 && (
<mark className="bg-green-200 dark:bg-green-600 dark:text-white px-1 rounded font-medium">
{cleanHighlightedText}
{index < pieces.length - 1 && (
<mark className={markClassName} data-nip84-highlight="span">
{cleanMark}
</mark>
)}
</span>
))
})()}
</div>
) : (
// If no context tag, just show the content as a regular quote
<div>
{(() => {
// Strip outer quotation marks if present
let cleanContext = context.trim()
if (cleanContext.startsWith('"') && cleanContext.endsWith('"')) {
cleanContext = cleanContext.slice(1, -1).trim()
}
return cleanContext
})()}
</div>
)}
}, [fullText, markedSpan])
return (
<div className={`bg-white dark:bg-gray-900 rounded-lg border border-gray-200 dark:border-gray-700 p-4 ${className || ''}`}>
<div className="flex-1 min-w-0">
{/* Full quoted text with highlighted portion (context, textquoteselector, or textpositionselector) */}
{quotedBody && (
<div className="note-content text-base font-normal mb-4 whitespace-pre-wrap break-words border-l-4 border-green-500 pl-5 py-4 leading-relaxed bg-green-50/30 dark:bg-green-950/20 rounded-r-lg">
<div>{quotedBody}</div>
</div>
)}

64
src/components/Note/MarkdownArticle/MarkdownArticle.tsx

@ -10,6 +10,7 @@ import { toNoteList } from '@/lib/link' @@ -10,6 +10,7 @@ import { toNoteList } from '@/lib/link'
import { useMediaExtraction } from '@/hooks'
import { cleanUrl, isImage, isMedia, isVideo, isAudio, isWebsocketUrl } from '@/lib/url'
import { getHttpUrlFromITags, getImetaInfosFromEvent } from '@/lib/event'
import { canonicalizeRssArticleUrl } from '@/lib/rss-article'
import { Event, kinds } from 'nostr-tools'
import Emoji from '@/components/Emoji'
import { ExtendedKind, WS_URL_REGEX, YOUTUBE_URL_REGEX } from '@/constants'
@ -428,8 +429,8 @@ function parseMarkdownContent( @@ -428,8 +429,8 @@ function parseMarkdownContent(
emojiInfos?: TEmoji[]
/** When viewing a kind-24 invite, render full calendar card with RSVP instead of EmbeddedNote for this naddr */
fullCalendarInvite?: { naddr: string; event: Event }
/** If set, a standalone markdown link to this cleaned URL renders as inline link (OG shown separately). */
suppressStandaloneWebPreviewForCleanedUrl?: string
/** Cleaned URL variants: standalone markdown links matching any render as inline (OG elsewhere). */
suppressStandaloneWebPreviewCleanedUrls?: ReadonlySet<string>
}
): { nodes: React.ReactNode[]; hashtagsInContent: Set<string>; footnotes: Map<string, string>; citations: Array<{ id: string; type: string; citationId: string }> } {
const {
@ -443,7 +444,7 @@ function parseMarkdownContent( @@ -443,7 +444,7 @@ function parseMarkdownContent(
getImageIdentifier,
emojiInfos = [],
fullCalendarInvite,
suppressStandaloneWebPreviewForCleanedUrl
suppressStandaloneWebPreviewCleanedUrls
} = options
const parts: React.ReactNode[] = []
const hashtagsInContent = new Set<string>()
@ -1833,8 +1834,8 @@ function parseMarkdownContent( @@ -1833,8 +1834,8 @@ function parseMarkdownContent(
const { url } = pattern.data
const cleanedStandalone = cleanUrl(url) || url
if (
suppressStandaloneWebPreviewForCleanedUrl &&
cleanedStandalone === suppressStandaloneWebPreviewForCleanedUrl
suppressStandaloneWebPreviewCleanedUrls &&
suppressStandaloneWebPreviewCleanedUrls.has(cleanedStandalone)
) {
parts.push(
<a
@ -3216,7 +3217,8 @@ export default function MarkdownArticle({ @@ -3216,7 +3217,8 @@ export default function MarkdownArticle({
className,
hideMetadata = false,
parentImageUrl,
fullCalendarInvite
fullCalendarInvite,
duplicateWebPreviewCleanedUrlHints
}: {
event: Event
className?: string
@ -3224,6 +3226,8 @@ export default function MarkdownArticle({ @@ -3224,6 +3226,8 @@ export default function MarkdownArticle({
parentImageUrl?: string
/** When viewing a kind-24 invite, render full calendar card with RSVP in place of the naddr embed */
fullCalendarInvite?: { naddr: string; event: Event }
/** e.g. RSS/article URL-thread root: suppress duplicate WebPreview for the same page already shown as OP */
duplicateWebPreviewCleanedUrlHints?: string[]
}) {
const secondaryPage = useSecondaryPageOptional()
const push = secondaryPage?.push ?? ((url: string) => { window.location.href = url })
@ -3231,10 +3235,33 @@ export default function MarkdownArticle({ @@ -3231,10 +3235,33 @@ export default function MarkdownArticle({
const { navigateToRelay } = useSmartRelayNavigationOptional()
const metadata = useMemo(() => getLongFormArticleMetadataFromEvent(event), [event])
const iArticleUrl = useMemo(() => getHttpUrlFromITags(event), [event])
const iArticleCleaned = useMemo(
() => (iArticleUrl ? cleanUrl(iArticleUrl) || iArticleUrl : ''),
[iArticleUrl]
)
const webPreviewSuppressCleanedSet = useMemo(() => {
const s = new Set<string>()
const addHint = (raw: string) => {
const t = raw.trim()
if (!t) return
const c = cleanUrl(t)
if (c) s.add(c)
else s.add(t)
if (t.startsWith('http://') || t.startsWith('https://')) {
const canon = canonicalizeRssArticleUrl(t)
if (canon) s.add(canon)
}
}
if (iArticleUrl) addHint(iArticleUrl)
for (const h of duplicateWebPreviewCleanedUrlHints ?? []) addHint(h)
return s
}, [iArticleUrl, duplicateWebPreviewCleanedUrlHints])
/** URL-thread OP already shows this link; hide the embedded i-tag card on kind 1111 / scoped replies */
const suppressITagArticleWebPreview = useMemo(() => {
if (!iArticleUrl || !duplicateWebPreviewCleanedUrlHints?.length) return false
const canon = canonicalizeRssArticleUrl(iArticleUrl)
return duplicateWebPreviewCleanedUrlHints.some(
(h) => canonicalizeRssArticleUrl(h) === canon
)
}, [iArticleUrl, duplicateWebPreviewCleanedUrlHints])
// Extract all media from event
const extractedMedia = useMediaExtraction(event, event.content)
@ -3511,10 +3538,16 @@ export default function MarkdownArticle({ @@ -3511,10 +3538,16 @@ export default function MarkdownArticle({
return tagLinks.filter((link) => {
const cleaned = cleanUrl(link)
if (!cleaned) return false
if (iArticleCleaned && cleaned === iArticleCleaned) return false
if (webPreviewSuppressCleanedSet.has(cleaned)) return false
if (
(link.startsWith('http://') || link.startsWith('https://')) &&
webPreviewSuppressCleanedSet.has(canonicalizeRssArticleUrl(link))
) {
return false
}
return !contentLinksSet.has(cleaned)
})
}, [tagLinks, contentLinks, iArticleCleaned])
}, [tagLinks, contentLinks, webPreviewSuppressCleanedSet])
// Preprocess content to convert URLs to markdown syntax
const preprocessedContent = useMemo(() => {
@ -3586,7 +3619,8 @@ export default function MarkdownArticle({ @@ -3586,7 +3619,8 @@ export default function MarkdownArticle({
getImageIdentifier,
emojiInfos,
fullCalendarInvite,
suppressStandaloneWebPreviewForCleanedUrl: iArticleCleaned || undefined
suppressStandaloneWebPreviewCleanedUrls:
webPreviewSuppressCleanedSet.size > 0 ? webPreviewSuppressCleanedSet : undefined
})
// Return nodes and hashtags (footnotes are already included in nodes)
return { nodes: result.nodes, hashtagsInContent: result.hashtagsInContent }
@ -3602,7 +3636,7 @@ export default function MarkdownArticle({ @@ -3602,7 +3636,7 @@ export default function MarkdownArticle({
getImageIdentifier,
emojiInfos,
fullCalendarInvite,
iArticleCleaned
webPreviewSuppressCleanedSet
])
// Filter metadata tags to only show what's not already in content
@ -3698,7 +3732,7 @@ export default function MarkdownArticle({ @@ -3698,7 +3732,7 @@ export default function MarkdownArticle({
}
`}</style>
<div className={`prose prose-zinc max-w-none dark:prose-invert break-words overflow-wrap-anywhere ${className || ''}`}>
{iArticleUrl && (
{iArticleUrl && !suppressITagArticleWebPreview && (
<div className="not-prose mb-4 max-w-full">
<WebPreview url={iArticleUrl} className="w-full" />
</div>

2
src/components/NoteOptions/useMenuActions.tsx

@ -638,6 +638,8 @@ export function useMenuActions({ @@ -638,6 +638,8 @@ export function useMenuActions({
const path =
currentPrimaryPage === 'spells'
? `/spells/notes/${noteId}`
: currentPrimaryPage === 'rss'
? `/rss/notes/${noteId}`
: `/notes/${noteId}`
const jumbleUrl = `https://jumble.imwald.eu${path}`
navigator.clipboard.writeText(jumbleUrl)

6
src/components/NoteStats/index.tsx

@ -43,11 +43,11 @@ export default function NoteStats({ @@ -43,11 +43,11 @@ export default function NoteStats({
// Hide interaction counts if event is in quiet mode
const hideInteractions = shouldHideInteractions(event)
/** Synthetic RSS article root: only reply + reactions (no boost/quote/zap). */
/** Synthetic RSS article root: no boost/quote/zap; still show reaction breakdown (NIP-25 + kind-17 web). */
const isRssArticleRoot = event.kind === ExtendedKind.RSS_THREAD_ROOT
/** Kind 11 / kind 1111 under kind 11: LikeButton already shows ⬆/⬇ counts — skip duplicate pill row. */
const showLikesPills = !isDiscussion && !isReplyToDiscussion && !isRssArticleRoot
/** Emoji reaction pills (aggregated likes). Shown for RSS/Web URL threads so the side panel matches feed rows. */
const showLikesPills = !isDiscussion && !isReplyToDiscussion
useEffect(() => {
if (!fetchIfNotExisting) return

11
src/components/ReplyNote/index.tsx

@ -37,13 +37,15 @@ export default function ReplyNote({ @@ -37,13 +37,15 @@ export default function ReplyNote({
parentEventId,
onClickParent = () => {},
onClickReply,
highlight = false
highlight = false,
duplicateWebPreviewCleanedUrlHints
}: {
event: Event
parentEventId?: string
onClickParent?: () => void
onClickReply?: (event: Event) => void
highlight?: boolean
duplicateWebPreviewCleanedUrlHints?: string[]
}) {
const { t } = useTranslation()
const { isSmallScreen } = useScreenSize()
@ -147,7 +149,12 @@ export default function ReplyNote({ @@ -147,7 +149,12 @@ export default function ReplyNote({
<span>{t(notificationReactionSummaryKey(reactionDisplay))}</span>
</div>
) : (
<MarkdownArticle className="mt-2" event={event} hideMetadata={true} />
<MarkdownArticle
className="mt-2"
event={event}
hideMetadata={true}
duplicateWebPreviewCleanedUrlHints={duplicateWebPreviewCleanedUrlHints}
/>
)
) : (
<Button

110
src/components/ReplyNoteList/index.tsx

@ -1,6 +1,10 @@ @@ -1,6 +1,10 @@
import { E_TAG_FILTER_BLOCKED_RELAY_URLS, ExtendedKind } from '@/constants'
import { isDiscussionDownvoteEmoji, isDiscussionUpvoteEmoji } from '@/lib/discussion-votes'
import { canonicalizeRssArticleUrl, getArticleUrlFromCommentITags } from '@/lib/rss-article'
import {
canonicalizeRssArticleUrl,
getArticleUrlFromCommentITags,
getHighlightSourceHttpUrl
} from '@/lib/rss-article'
import {
eventReferencesEventId,
getParentETag,
@ -32,6 +36,10 @@ import noteStatsService from '@/services/note-stats.service' @@ -32,6 +36,10 @@ import noteStatsService from '@/services/note-stats.service'
import discussionFeedCache from '@/services/discussion-feed-cache.service'
import { buildReplyReadRelayList, relayHintsFromEventTags } from '@/lib/relay-list-builder'
import { eventReplyMatchesThreadRoot } from '@/lib/thread-reply-root-match'
import {
buildRssArticleUrlThreadInteractionFilters,
isRssArticleUrlThreadInteraction
} from '@/lib/rss-web-feed'
import { Filter, Event as NEvent, kinds } from 'nostr-tools'
import { useNoteStatsById } from '@/hooks/useNoteStatsById'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
@ -55,13 +63,16 @@ function ReplyNoteList({ @@ -55,13 +63,16 @@ function ReplyNoteList({
index,
event,
sort = 'oldest',
showQuotes = true
showQuotes = true,
duplicateWebPreviewCleanedUrlHints
}: {
index?: number
event: NEvent
sort?: 'newest' | 'oldest' | 'top' | 'controversial' | 'most-zapped'
/** When false, omit the quotes section (e.g. discussion threads). */
showQuotes?: boolean
/** Suppress WebPreview for these URLs in replies (e.g. article URL already shown as OP). */
duplicateWebPreviewCleanedUrlHints?: string[]
}) {
const { t } = useTranslation()
const { navigateToNote } = useSmartNoteNavigation()
@ -82,6 +93,12 @@ function ReplyNoteList({ @@ -82,6 +93,12 @@ function ReplyNoteList({
const isDiscussionRoot = event.kind === ExtendedKind.DISCUSSION
const replyDuplicateWebPreviewHints = useMemo(() => {
const out: string[] = [...(duplicateWebPreviewCleanedUrlHints ?? [])]
if (rootInfo?.type === 'I') out.push(rootInfo.id)
return out.length ? out : undefined
}, [duplicateWebPreviewCleanedUrlHints, rootInfo])
// Helper function to get vote score for a reply
const getReplyVoteScore = (reply: NEvent) => {
const stats = noteStatsService.getNoteStats(reply.id)
@ -345,6 +362,59 @@ function ReplyNoteList({ @@ -345,6 +362,59 @@ function ReplyNoteList({
fetchRootEvent()
}, [event])
/** When stats saw a URL-thread reply on relays we didn't REQ in the reply list, fetch by id so count matches list. */
const rssStatsHydratedReplyIdsRef = useRef<Set<string>>(new Set())
useEffect(() => {
rssStatsHydratedReplyIdsRef.current.clear()
}, [event.id])
useEffect(() => {
if (event.kind !== ExtendedKind.RSS_THREAD_ROOT || rootInfo?.type !== 'I') return
const fromStats = noteStats?.replies
if (!fromStats?.length) return
const urlKey = canonicalizeRssArticleUrl(rootInfo.id)
const inBucket = new Set((repliesMap.get(urlKey)?.events ?? []).map((e) => e.id))
const candidates = fromStats.filter(
(r) => !inBucket.has(r.id) && !rssStatsHydratedReplyIdsRef.current.has(r.id)
)
if (candidates.length === 0) return
let cancelled = false
;(async () => {
const batch: NEvent[] = []
for (const { id } of candidates) {
rssStatsHydratedReplyIdsRef.current.add(id)
try {
const ev = await eventService.fetchEvent(id)
if (cancelled) return
if (ev && isRssArticleUrlThreadInteraction(ev, rootInfo.id)) {
batch.push(ev)
} else {
rssStatsHydratedReplyIdsRef.current.delete(id)
}
} catch {
rssStatsHydratedReplyIdsRef.current.delete(id)
}
}
if (!cancelled && batch.length > 0) addReplies(batch)
})()
return () => {
cancelled = true
}
}, [
event.kind,
event.id,
rootInfo,
noteStats?.replies,
noteStats?.updatedAt,
repliesMap,
addReplies
])
const onNewReply = useCallback((evt: NEvent) => {
addReplies([evt])
if (rootInfo) {
@ -374,7 +444,10 @@ function ReplyNoteList({ @@ -374,7 +444,10 @@ function ReplyNoteList({
const replyFetchGenRef = useRef(0)
useEffect(() => {
if (!rootInfo || currentIndex !== index) return
if (!rootInfo) return
// Hidden stack pages pass a numeric index that differs from the top panel's currentIndex.
// When index is omitted (edge routes), still fetch so replies are not stuck empty.
if (index !== undefined && currentIndex !== index) return
const fetchGeneration = ++replyFetchGenRef.current
@ -474,16 +547,7 @@ function ReplyNoteList({ @@ -474,16 +547,7 @@ function ReplyNoteList({
finalRelayUrls.push(rootInfo.relay)
}
} else if (rootInfo.type === 'I') {
filters.push({
'#i': [rootInfo.id],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
})
filters.push({
'#I': [rootInfo.id],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: LIMIT
})
filters.push(...buildRssArticleUrlThreadInteractionFilters(rootInfo.id, LIMIT))
}
// Use fetchEvents instead of subscribeTimeline for one-time fetching
@ -491,8 +555,12 @@ function ReplyNoteList({ @@ -491,8 +555,12 @@ function ReplyNoteList({
if (fetchGeneration !== replyFetchGenRef.current) return
// Filter and add replies
const regularReplies = allReplies.filter((evt) => isReplyNoteEvent(evt))
// Filter and add replies (URL threads include kind 9802 highlights of this page)
const regularReplies = allReplies.filter((evt) =>
rootInfo.type === 'I'
? isRssArticleUrlThreadInteraction(evt, rootInfo.id)
: isReplyNoteEvent(evt)
)
// Store in cache (this merges with existing cached replies)
// After this call, the cache contains ALL replies we've ever seen for this thread
@ -622,6 +690,10 @@ function ReplyNoteList({ @@ -622,6 +690,10 @@ function ReplyNoteList({
// vanishing when wotSet is still empty (all non-self appear untrusted)
if (isTrustLoaded && hideUntrustedInteractions && !isUserTrusted(item.pubkey)) {
if (isQuote) return null
// URL-scoped comments (NIP-22 / kind 1111) are keyed under the article URL in ReplyProvider,
// not under each note id — repliesMap.get(item.id) is usually empty. Skipping the "trusted
// children" rule avoids hiding every untrusted URL-thread note.
if (rootInfo?.type !== 'I') {
const repliesForThisReply = repliesMap.get(item.id)
if (
!repliesForThisReply ||
@ -630,6 +702,7 @@ function ReplyNoteList({ @@ -630,6 +702,7 @@ function ReplyNoteList({
return null
}
}
}
if (isQuote) {
const quoteLabel =
@ -671,7 +744,11 @@ function ReplyNoteList({ @@ -671,7 +744,11 @@ function ReplyNoteList({
const replyRootId = getRootEventHexId(reply)
const replyUrlForIThread =
rootInfo?.type === 'I' ? getArticleUrlFromCommentITags(reply) : undefined
rootInfo?.type === 'I'
? reply.kind === kinds.Highlights
? getHighlightSourceHttpUrl(reply)
: getArticleUrlFromCommentITags(reply)
: undefined
const belongsToSameThread = rootInfo && (
(rootInfo.type === 'E' && replyRootId === rootInfo.id) ||
(rootInfo.type === 'A' && getRootATag(reply)?.[1] === rootInfo.id) ||
@ -689,6 +766,7 @@ function ReplyNoteList({ @@ -689,6 +766,7 @@ function ReplyNoteList({
<ReplyNote
event={reply}
parentEventId={event.id !== parentEventHexId ? parentEventId : undefined}
duplicateWebPreviewCleanedUrlHints={replyDuplicateWebPreviewHints}
onClickParent={() => {
if (!parentEventHexId) return
if (replies.every((r) => r.id !== parentEventHexId)) {

92
src/components/RssFeedList/index.tsx

@ -8,15 +8,20 @@ import RssFeedItem from '../RssFeedItem' @@ -8,15 +8,20 @@ import RssFeedItem from '../RssFeedItem'
import RssWebFeedCard from '../RssWebFeedCard'
import { ArticleUrlsSection } from './ArticleUrlsSection'
import { RssEntriesSection } from './RssEntriesSection'
import { canonicalizeRssArticleUrl, isClawstrDotComHttpUrl } from '@/lib/rss-article'
import {
addManualRssWebUrl,
fetchDiscoveredWebUrlsFromRelays,
loadManualRssWebUrls,
loadRssWebFeedScopePreference,
loadRssWebHideUnifiedClutterPreference,
loadRssWebSuppressClawstrPreference,
buildArticleUrlFeedRows,
isHttpArticleUrl,
isRssWebUnifiedClutterUrl,
mergeDiscoveredRssWebUrls,
saveRssWebFeedScopePreference,
saveRssWebHideUnifiedClutterPreference,
saveRssWebSuppressClawstrPreference,
WEB_EXTERNAL_REACTION_PUBLISHED_EVENT,
type ManualRssWebUrlEntry,
@ -131,6 +136,12 @@ function ManualRssUrlAddRow({ @@ -131,6 +136,12 @@ function ManualRssUrlAddRow({
)
}
function rssFeedItemArticleIsClawstrHost(item: TRssFeedItem): boolean {
const l = item.link?.trim()
if (!l || (!l.startsWith('http://') && !l.startsWith('https://'))) return false
return isClawstrDotComHttpUrl(l) || isClawstrDotComHttpUrl(canonicalizeRssArticleUrl(l))
}
export default function RssFeedList() {
const { t } = useTranslation()
const { pubkey, rssFeedListEvent } = useNostr()
@ -156,6 +167,8 @@ export default function RssFeedList() { @@ -156,6 +167,8 @@ export default function RssFeedList() {
const [manualWebEntries, setManualWebEntries] = useState<ManualRssWebUrlEntry[]>([])
/** Latest relay discovery (in-memory); URLs appear as faux cards even before IndexedDB merge. */
const [relayDiscoveredUrls, setRelayDiscoveredUrls] = useState<ManualRssWebUrlEntry[]>([])
const [suppressClawstrLinks, setSuppressClawstrLinks] = useState(true)
const [hideUnifiedClutter, setHideUnifiedClutter] = useState(true)
const refreshManualWebUrls = useCallback(() => {
void loadManualRssWebUrls().then(setManualWebEntries)
@ -515,6 +528,16 @@ export default function RssFeedList() { @@ -515,6 +528,16 @@ export default function RssFeedList() {
return filtered
}, [items, selectedFeeds, timeFilter])
/** When “hide clutter” is on, drop those entries from the feed (not only from URL cards). */
const rssWebItemsRespectingClutterPref = useMemo(() => {
if (!hideUnifiedClutter) return baseFilteredItems
return baseFilteredItems.filter((item) => {
const link = item.link?.trim()
if (!link || !isHttpArticleUrl(link)) return true
return !isRssWebUnifiedClutterUrl(link)
})
}, [baseFilteredItems, hideUnifiedClutter])
const rssItemMatchesSearch = useCallback((item: TRssFeedItem, q: string) => {
const query = q.toLowerCase().trim()
if (!query) return true
@ -530,14 +553,17 @@ export default function RssFeedList() { @@ -530,14 +553,17 @@ export default function RssFeedList() {
/** RSS-only view: flat timeline with full-text search. */
const rssScopeItems = useMemo(() => {
const q = searchQuery.trim()
let list = baseFilteredItems
let list = rssWebItemsRespectingClutterPref
if (q) {
list = list.filter((item) => rssItemMatchesSearch(item, q))
}
if (suppressClawstrLinks) {
list = list.filter((item) => !rssFeedItemArticleIsClawstrHost(item))
}
return [...list].sort(
(a, b) => (b.pubDate?.getTime() ?? 0) - (a.pubDate?.getTime() ?? 0)
)
}, [baseFilteredItems, searchQuery, rssItemMatchesSearch])
}, [rssWebItemsRespectingClutterPref, searchQuery, rssItemMatchesSearch, suppressClawstrLinks])
type CombinedFeedRow =
| { kind: 'web'; canonicalUrl: string; rssItems: TRssFeedItem[]; latestPub: number }
@ -563,7 +589,8 @@ export default function RssFeedList() { @@ -563,7 +589,8 @@ export default function RssFeedList() {
const discovered = await fetchDiscoveredWebUrlsFromRelays({
accountPubkey: pubkey,
favoriteRelays: favoriteRelays ?? [],
blockedRelays: blockedRelays ?? []
blockedRelays: blockedRelays ?? [],
excludeClutterUrls: hideUnifiedClutter
})
if (cancelled) return
setRelayDiscoveredUrls(discovered)
@ -576,24 +603,44 @@ export default function RssFeedList() { @@ -576,24 +603,44 @@ export default function RssFeedList() {
return () => {
cancelled = true
}
}, [feedScope, pubkey, favoriteRelays, blockedRelays, refreshManualWebUrls, relayDiscoveryTick])
}, [
feedScope,
pubkey,
favoriteRelays,
blockedRelays,
refreshManualWebUrls,
relayDiscoveryTick,
hideUnifiedClutter
])
const combinedFeedRows = useMemo((): CombinedFeedRow[] => {
const { webRows, nonHttpItems } = buildArticleUrlFeedRows(
baseFilteredItems,
rssWebItemsRespectingClutterPref,
manualWebEntries,
relayDiscoveredUrls
relayDiscoveredUrls,
{ excludeClutterLinks: hideUnifiedClutter }
)
const rest: CombinedFeedRow[] = nonHttpItems.map((item) => ({
kind: 'rss' as const,
item
}))
return [...webRows, ...rest].sort((a, b) => {
const merged = [...webRows, ...rest].sort((a, b) => {
const ta = a.kind === 'web' ? a.latestPub : (a.item.pubDate?.getTime() ?? 0)
const tb = b.kind === 'web' ? b.latestPub : (b.item.pubDate?.getTime() ?? 0)
return tb - ta
})
}, [baseFilteredItems, manualWebEntries, relayDiscoveredUrls])
if (!suppressClawstrLinks) return merged
return merged.filter((row) => {
if (row.kind === 'web') return !isClawstrDotComHttpUrl(row.canonicalUrl)
return !rssFeedItemArticleIsClawstrHost(row.item)
})
}, [
rssWebItemsRespectingClutterPref,
manualWebEntries,
relayDiscoveredUrls,
suppressClawstrLinks,
hideUnifiedClutter
])
const combinedFeedRowsForSearch = useMemo((): CombinedFeedRow[] => {
const q = searchQuery.trim()
@ -652,14 +699,18 @@ export default function RssFeedList() { @@ -652,14 +699,18 @@ export default function RssFeedList() {
return { view: 'unified', rows }
}, [feedScope, rssScopeItems, combinedFeedRowsForSearch, urlKeysWithNostrFootprint])
const [suppressClawstrLinks, setSuppressClawstrLinks] = useState(true)
const persistSuppressClawstr = useCallback((checked: boolean) => {
rssWebPrefsUserTouchedRef.current = true
setSuppressClawstrLinks(checked)
void saveRssWebSuppressClawstrPreference(checked)
}, [])
const persistHideUnifiedClutter = useCallback((checked: boolean) => {
rssWebPrefsUserTouchedRef.current = true
setHideUnifiedClutter(checked)
void saveRssWebHideUnifiedClutterPreference(checked)
}, [])
const persistFeedScope = useCallback((scope: RssWebFeedScope) => {
rssWebPrefsUserTouchedRef.current = true
setFeedScope(scope)
@ -669,12 +720,14 @@ export default function RssFeedList() { @@ -669,12 +720,14 @@ export default function RssFeedList() {
useEffect(() => {
let cancelled = false
void (async () => {
const [suppressClawstr, scope] = await Promise.all([
const [suppressClawstr, hideClutter, scope] = await Promise.all([
loadRssWebSuppressClawstrPreference(),
loadRssWebHideUnifiedClutterPreference(),
loadRssWebFeedScopePreference()
])
if (cancelled || rssWebPrefsUserTouchedRef.current) return
setSuppressClawstrLinks(suppressClawstr)
setHideUnifiedClutter(hideClutter)
setFeedScope(scope)
})()
return () => {
@ -690,7 +743,7 @@ export default function RssFeedList() { @@ -690,7 +743,7 @@ export default function RssFeedList() {
// Reset pagination when filters change
useEffect(() => {
setShowRowCount(20)
}, [selectedFeeds, timeFilter, searchQuery, feedScope, suppressClawstrLinks])
}, [selectedFeeds, timeFilter, searchQuery, feedScope, suppressClawstrLinks, hideUnifiedClutter])
const displayedFeed = useMemo(():
| { view: 'rss'; items: TRssFeedItem[] }
@ -811,6 +864,7 @@ export default function RssFeedList() { @@ -811,6 +864,7 @@ export default function RssFeedList() {
{t('RSS')}
</Button>
</div>
<div className="flex flex-wrap items-center gap-x-4 gap-y-2">
<div className="flex items-center gap-2">
<Checkbox
id="suppress-clawstr-links"
@ -824,6 +878,20 @@ export default function RssFeedList() { @@ -824,6 +878,20 @@ export default function RssFeedList() {
{t('Suppress Clawstr links in RSS previews')}
</Label>
</div>
<div className="flex items-center gap-2">
<Checkbox
id="hide-unified-clutter"
checked={hideUnifiedClutter}
onCheckedChange={(c) => persistHideUnifiedClutter(c === true)}
/>
<Label
htmlFor="hide-unified-clutter"
className="cursor-pointer text-xs text-muted-foreground"
>
{t('Hide local, media & feed URLs from URL cards')}
</Label>
</div>
</div>
</div>
<p className="text-xs text-muted-foreground sm:text-right">
{t('Showing {{filtered}} of {{total}} entries', {

87
src/components/RssUrlThreadEventsPreview/index.tsx

@ -0,0 +1,87 @@ @@ -0,0 +1,87 @@
import NoteCard from '@/components/NoteCard'
import { Skeleton } from '@/components/ui/skeleton'
import { FAST_READ_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants'
import { useNoteStatsRelayHints } from '@/hooks/useNoteStatsRelayHints'
import {
buildRssArticleUrlThreadInteractionFilters,
isRssArticleUrlThreadInteraction
} from '@/lib/rss-web-feed'
import { queryService } from '@/services/client.service'
import type { Event } from 'nostr-tools'
import { useEffect, useMemo, useState } from 'react'
const PREVIEW_LIMIT = 5
const FETCH_LIMIT = 24
/**
* Compact Nostr thread rows (comments + highlights) for an article URL card in the RSS+Web feed.
*/
export default function RssUrlThreadEventsPreview({ canonicalUrl }: { canonicalUrl: string }) {
const { relays, key: relayHintsKey } = useNoteStatsRelayHints()
const relayUrls = useMemo(
() => [...new Set([...SEARCHABLE_RELAY_URLS, ...FAST_READ_RELAY_URLS, ...relays])],
[relays]
)
const [events, setEvents] = useState<Event[]>([])
const [loading, setLoading] = useState(true)
useEffect(() => {
let cancelled = false
setLoading(true)
const filters = buildRssArticleUrlThreadInteractionFilters(canonicalUrl, FETCH_LIMIT)
void queryService
.fetchEvents(relayUrls, filters)
.then((all) => {
if (cancelled) return
const seen = new Set<string>()
const merged: Event[] = []
for (const e of [...all].sort((a, b) => b.created_at - a.created_at)) {
if (seen.has(e.id)) continue
if (!isRssArticleUrlThreadInteraction(e, canonicalUrl)) continue
seen.add(e.id)
merged.push(e)
}
setEvents(merged.slice(0, PREVIEW_LIMIT))
})
.catch(() => {
if (!cancelled) setEvents([])
})
.finally(() => {
if (!cancelled) setLoading(false)
})
return () => {
cancelled = true
}
}, [canonicalUrl, relayHintsKey, relayUrls])
if (loading) {
return (
<div
className="border-t border-border/50 bg-muted/10 px-3 py-2 pointer-events-auto space-y-2"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
>
<Skeleton className="h-14 w-full rounded-md" />
<Skeleton className="h-14 w-full rounded-md" />
</div>
)
}
if (events.length === 0) return null
return (
<div
className="border-t border-border/50 bg-muted/10 pointer-events-auto max-h-72 overflow-y-auto"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
>
<div className="divide-y divide-border/40">
{events.map((evt) => (
<div key={evt.id} className="px-2 py-1.5">
<NoteCard event={evt} className="border-0 bg-transparent shadow-none" hideParentNotePreview />
</div>
))}
</div>
</div>
)
}

5
src/components/RssWebFeedCard/index.tsx

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
import RssFeedItem from '@/components/RssFeedItem'
import RssUrlThreadEventsPreview from '@/components/RssUrlThreadEventsPreview'
import RssUrlThreadStatsBar from '@/components/RssUrlThreadStatsBar'
import WebPreview from '@/components/WebPreview'
import { cn } from '@/lib/utils'
@ -100,6 +101,10 @@ export default function RssWebFeedCard({ @@ -100,6 +101,10 @@ export default function RssWebFeedCard({
</p>
) : null}
{isHttpArticleUrl(canonicalUrl) ? (
<RssUrlThreadEventsPreview canonicalUrl={canonicalUrl} />
) : null}
<RssUrlThreadStatsBar event={syntheticRoot} />
</div>
)

4
src/i18n/locales/en.ts

@ -1268,7 +1268,9 @@ export default { @@ -1268,7 +1268,9 @@ export default {
'Web URL item label': 'Web URL',
'URL thread activity': 'URL thread activity',
'Suppress Clawstr links in RSS previews':
'Hide links to clawstr.com in RSS previews',
'Hide clawstr.com in RSS and URL feed',
'Hide local, media & feed URLs from URL cards':
'Hide local, media, feed, document & XML links from the feed',
'RSS articles': 'RSS articles',
'Web comments': 'Web comments',
'Web highlights': 'Web highlights',

82
src/lib/nip84-highlight-display.ts

@ -0,0 +1,82 @@ @@ -0,0 +1,82 @@
import type { Event } from 'nostr-tools'
/**
* NIP-84 / Web Annotation style `textquoteselector` (prefix + exact + suffix).
* `exact` is always {@link Event.content}; prefix/suffix are adjacent source text.
*
* Common shapes:
* - `["textquoteselector", prefix, suffix]` (3 items)
* - `["textquoteselector", "-", prefix, suffix]` leading "-" = empty slot (Hypothesis-style)
*/
export function parseTextQuoteSelectorParts(tag: readonly string[]): { prefix: string; suffix: string } {
if (tag.length < 2 || tag[0] !== 'textquoteselector') {
return { prefix: '', suffix: '' }
}
if (tag.length >= 4 && tag[1] === '-') {
return {
prefix: (tag[2] ?? '').trim(),
suffix: (tag[3] ?? '').trim()
}
}
if (tag.length >= 3) {
return {
prefix: (tag[1] ?? '').trim(),
suffix: (tag[2] ?? '').trim()
}
}
return { prefix: '', suffix: '' }
}
/** `["textpositionselector", start, end]` — character offsets into a full document string. */
export function parseTextPositionSelector(tag: readonly string[]): { start: number; end: number } | null {
if (tag.length < 3 || tag[0] !== 'textpositionselector') return null
const start = parseInt(tag[1] ?? '', 10)
const end = parseInt(tag[2] ?? '', 10)
if (Number.isNaN(start) || Number.isNaN(end) || start < 0 || end <= start) return null
return { start, end }
}
export type Nip84HighlightDisplay = {
/** Full passage to show in the quote box */
fullText: string
/** Substring of fullText to wrap in <mark> */
markedSpan: string
}
/**
* Resolve which span to mark inside which full text, using `context`, `textquoteselector`,
* and optionally `textpositionselector` (only when offsets fit the base string).
*/
export function resolveNip84HighlightDisplay(event: Pick<Event, 'content' | 'tags'>): Nip84HighlightDisplay {
const highlightedText = event.content ?? ''
const tags = event.tags
const contextTag = tags.find((t) => t[0] === 'context')
const contextBody = contextTag?.[1]?.trim() ? contextTag[1] : undefined
const posTag = tags.find((t) => t[0] === 'textpositionselector')
const pos = posTag ? parseTextPositionSelector(posTag) : null
if (contextBody && pos) {
const { start, end } = pos
if (end <= contextBody.length) {
const slice = contextBody.slice(start, end)
if (slice.length > 0) {
return { fullText: contextBody, markedSpan: slice }
}
}
}
if (contextBody) {
return { fullText: contextBody, markedSpan: highlightedText }
}
const tqs = tags.find((t) => t[0] === 'textquoteselector')
if (tqs) {
const { prefix, suffix } = parseTextQuoteSelectorParts(tqs)
const fullText = `${prefix}${highlightedText}${suffix}`
return { fullText, markedSpan: highlightedText }
}
return { fullText: highlightedText, markedSpan: highlightedText }
}

19
src/lib/rss-article.ts

@ -99,16 +99,16 @@ export function getWebBookmarkArticleUrl(event: Pick<Event, 'kind' | 'tags'>): s @@ -99,16 +99,16 @@ export function getWebBookmarkArticleUrl(event: Pick<Event, 'kind' | 'tags'>): s
return undefined
}
/** HTTP(S) page URL from kind 9802 `r` tags (`source` marker or bare `r`). */
/** HTTP(S) page URL from kind 9802 `r` tags. */
export function getHighlightSourceHttpUrl(event: Pick<Event, 'tags'>): string | undefined {
for (const t of event.tags) {
if (t[0] !== 'r' || !t[1]) continue
if (!t[0] || String(t[0]).toLowerCase() !== 'r' || !t[1]) continue
const u = t[1].trim()
if (!u.startsWith('http://') && !u.startsWith('https://')) continue
const marker = (t[2] ?? '').trim().toLowerCase()
// NIP-84: non-source URL refs use `mention`; only `source` (any casing) or legacy bare `r` is the page.
// NIP-84: only `mention` marks a non-source URL; everything else (bare `r`, `source`, `-`, unknown) is the page.
if (marker === 'mention') continue
if (marker === 'source' || marker === '') return canonicalizeRssArticleUrl(u)
return canonicalizeRssArticleUrl(u)
}
return undefined
}
@ -138,9 +138,9 @@ export function computeRTagFilterValuesForArticleThread(canonicalUrl: string): s @@ -138,9 +138,9 @@ export function computeRTagFilterValuesForArticleThread(canonicalUrl: string): s
return [...out]
}
/** Strip anchors whose href targets https://clawstr.com/… (incl. subdomains, http(s), protocol-relative). */
export function isClawstrDotComHttpHref(href: string): boolean {
const t = href.trim()
/** True for http(s) URLs whose host is clawstr.com (incl. subdomains; supports protocol-relative `//…`). */
export function isClawstrDotComHttpUrl(url: string): boolean {
const t = url.trim()
if (!t) return false
try {
const u = t.startsWith('//') ? new URL(`https:${t}`) : new URL(t)
@ -152,6 +152,11 @@ export function isClawstrDotComHttpHref(href: string): boolean { @@ -152,6 +152,11 @@ export function isClawstrDotComHttpHref(href: string): boolean {
}
}
/** Same as {@link isClawstrDotComHttpUrl} — use for `href` attributes in HTML. */
export function isClawstrDotComHttpHref(href: string): boolean {
return isClawstrDotComHttpUrl(href)
}
/**
* NIP-25 kind 17 + NIP-73: resolve http(s) target URL for a `k: web` external reaction.
* Stops at the next `k` tag so podcast-style multi-scope reactions are not mis-parsed as web.

113
src/lib/rss-web-feed.ts

@ -1,24 +1,28 @@ @@ -1,24 +1,28 @@
import { ExtendedKind, FAST_READ_RELAY_URLS } from '@/constants'
import { buildAccountListRelayUrlsForMerge } from '@/lib/account-list-relay-urls'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { isReplyNoteEvent } from '@/lib/event'
import {
canonicalizeRssArticleUrl,
computeRTagFilterValuesForArticleThread,
getArticleUrlFromCommentITags,
getHighlightSourceHttpUrl,
getWebBookmarkArticleUrl,
getWebExternalReactionTargetUrl
} from '@/lib/rss-article'
import logger from '@/lib/logger'
import { normalizeUrl } from '@/lib/url'
import { isImage, isLocalNetworkUrl, isMedia, isVideo, normalizeUrl } from '@/lib/url'
import { queryService } from '@/services/client.service'
import indexedDb from '@/services/indexed-db.service'
import type { RssFeedItem } from '@/services/rss-feed.service'
import type { Event } from 'nostr-tools'
import { kinds } from 'nostr-tools'
import { kinds, type Event, type Filter } from 'nostr-tools'
/** IndexedDB: `'1'` (default) = strip &lt;a href&gt; to clawstr.com from RSS HTML in the feed list. */
/** IndexedDB: `'1'` (default) = hide clawstr.com (strip preview links + drop URL/RSS rows for that host). */
export const RSS_WEB_SUPPRESS_CLAWSTR_SETTING = 'rssWebSuppressClawstrLinks'
/** IndexedDB: `'1'` (default) = keep local/media/feed XML links as plain RSS rows, not URL cards. */
export const RSS_WEB_HIDE_UNIFIED_CLUTTER_SETTING = 'rssWebHideUnifiedClutter'
/** IndexedDB: feed view — article URL cards, flat RSS timeline, or both interleaved. */
export const RSS_WEB_FEED_SCOPE_SETTING = 'rssWebFeedScope'
@ -132,19 +136,89 @@ export function isHttpArticleUrl(url: string): boolean { @@ -132,19 +136,89 @@ export function isHttpArticleUrl(url: string): boolean {
return t.startsWith('http://') || t.startsWith('https://')
}
/**
* URLs that make poor article URL cards: localhost/LAN, direct media files, and common RSS/Atom document paths.
* When filtering is on, these stay as normal RSS timeline rows instead of Web URL cards.
*/
export function isRssWebUnifiedClutterUrl(url: string): boolean {
const t = url.trim()
if (!isHttpArticleUrl(t)) return false
let parsed: URL
try {
parsed = new URL(t)
} catch {
return false
}
const host = parsed.hostname.toLowerCase()
if (host.endsWith('.local')) return true
if (isLocalNetworkUrl(t)) return true
const ipv4 = host.match(/^(\d+)\.(\d+)\.(\d+)\.(\d+)$/)
if (ipv4 && Number(ipv4[1]) === 127) return true
if (isMedia(t) || isVideo(t) || isImage(t)) return true
const path = parsed.pathname.toLowerCase()
const segments = path.split('/').filter(Boolean)
const last = segments[segments.length - 1] || ''
// Documents — not article pages
if (
/\.(pdf|epub|mobi|azw3|doc|docx|xls|xlsx|ppt|pptx|ods|odt|rtf)(\?.*)?$/i.test(path)
) {
return true
}
if (/\.(rss|atom)$/i.test(last)) return true
if (last === 'feed.xml' || last === 'rss.xml' || last === 'atom.xml') return true
if (last.endsWith('.xml')) return true
if (last === 'feed' || last === 'rss' || last === 'atom') return true
return false
}
/** REQ filters for Nostr comments, voice comments, and highlights on one article URL (synthetic RSS thread). */
export function buildRssArticleUrlThreadInteractionFilters(
canonicalArticleUrl: string,
limit: number
): Filter[] {
const canonical = canonicalizeRssArticleUrl(canonicalArticleUrl)
const rVals = computeRTagFilterValuesForArticleThread(canonical)
const filters: Filter[] = [
{ '#i': [canonical], kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT], limit },
{ '#I': [canonical], kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT], limit }
]
if (rVals.length > 0) {
filters.push({ '#r': rVals, kinds: [kinds.Highlights], limit })
}
return filters
}
/** Whether `evt` belongs to the URL-scoped article thread (comments / voice / highlight of this page). */
export function isRssArticleUrlThreadInteraction(evt: Event, canonicalArticleUrl: string): boolean {
const key = canonicalizeRssArticleUrl(canonicalArticleUrl)
if (evt.kind === kinds.Highlights) {
const hu = getHighlightSourceHttpUrl(evt)
return !!hu && canonicalizeRssArticleUrl(hu) === key
}
if (!isReplyNoteEvent(evt)) return false
const u = getArticleUrlFromCommentITags(evt)
return !!u && canonicalizeRssArticleUrl(u) === key
}
/**
* Group RSS entries by canonical article URL (NIP-22 / web thread key).
*/
export function groupRssItemsByCanonicalUrl(items: RssFeedItem[]): RssUrlGroup[] {
const { groups } = partitionRssItemsForWebFeed(items)
const { groups } = partitionRssItemsForWebFeed(items, { excludeClutterLinks: true })
return groups
}
/** HTTP(S) article groups for combined cards; everything else stays as plain RSS rows. */
export function partitionRssItemsForWebFeed(items: RssFeedItem[]): {
export function partitionRssItemsForWebFeed(
items: RssFeedItem[],
options?: { excludeClutterLinks?: boolean }
): {
groups: RssUrlGroup[]
nonHttpItems: RssFeedItem[]
} {
const excludeClutter = options?.excludeClutterLinks !== false
const map = new Map<string, RssFeedItem[]>()
const nonHttpItems: RssFeedItem[] = []
for (const item of items) {
@ -153,6 +227,10 @@ export function partitionRssItemsForWebFeed(items: RssFeedItem[]): { @@ -153,6 +227,10 @@ export function partitionRssItemsForWebFeed(items: RssFeedItem[]): {
nonHttpItems.push(item)
continue
}
if (excludeClutter && isRssWebUnifiedClutterUrl(link)) {
nonHttpItems.push(item)
continue
}
const key = canonicalizeRssArticleUrl(link)
const list = map.get(key)
if (list) list.push(item)
@ -191,9 +269,11 @@ export type ArticleUrlFeedWebRow = { @@ -191,9 +269,11 @@ export type ArticleUrlFeedWebRow = {
export function buildArticleUrlFeedRows(
filteredItems: RssFeedItem[],
manualEntries: ManualRssWebUrlEntry[],
relayDiscoveredEntries: ManualRssWebUrlEntry[]
relayDiscoveredEntries: ManualRssWebUrlEntry[],
options?: { excludeClutterLinks?: boolean }
): { webRows: ArticleUrlFeedWebRow[]; nonHttpItems: RssFeedItem[] } {
const { groups, nonHttpItems } = partitionRssItemsForWebFeed(filteredItems)
const { groups, nonHttpItems } = partitionRssItemsForWebFeed(filteredItems, options)
const excludeClutter = options?.excludeClutterLinks !== false
const webByUrl = new Map<string, { rssItems: RssFeedItem[]; latestPub: number }>()
for (const g of groups) {
@ -214,10 +294,12 @@ export function buildArticleUrlFeedRows( @@ -214,10 +294,12 @@ export function buildArticleUrlFeedRows(
for (const { url, addedAt } of manualEntries) {
if (!isHttpArticleUrl(url)) continue
if (excludeClutter && isRssWebUnifiedClutterUrl(url)) continue
mergeNostrTimestamp(canonicalizeRssArticleUrl(url), addedAt)
}
for (const { url, addedAt } of relayDiscoveredEntries) {
if (!isHttpArticleUrl(url)) continue
if (excludeClutter && isRssWebUnifiedClutterUrl(url)) continue
mergeNostrTimestamp(canonicalizeRssArticleUrl(url), addedAt)
}
@ -306,7 +388,10 @@ export async function fetchDiscoveredWebUrlsFromRelays(options: { @@ -306,7 +388,10 @@ export async function fetchDiscoveredWebUrlsFromRelays(options: {
accountPubkey: string | null
favoriteRelays: string[]
blockedRelays: string[]
/** When true (default), omit localhost, media files, and feed-document URLs from discovery. */
excludeClutterUrls?: boolean
}): Promise<ManualRssWebUrlEntry[]> {
const excludeClutter = options.excludeClutterUrls !== false
const relayUrls = await buildRssWebNostrQueryRelayUrls(options)
if (relayUrls.length === 0) {
logger.info('[RssWebFeed] Relay URL discovery skipped (no relays)')
@ -323,6 +408,7 @@ export async function fetchDiscoveredWebUrlsFromRelays(options: { @@ -323,6 +408,7 @@ export async function fetchDiscoveredWebUrlsFromRelays(options: {
const onEvent = (evt: Event) => {
const url = extractArticleUrlFromWebActivityEvent(evt)
if (!url) return
if (excludeClutter && isRssWebUnifiedClutterUrl(url)) return
const key = canonicalizeRssArticleUrl(url)
const prev = latestByUrl.get(key) ?? 0
if (evt.created_at > prev) latestByUrl.set(key, evt.created_at)
@ -370,6 +456,17 @@ export async function saveRssWebSuppressClawstrPreference(suppress: boolean): Pr @@ -370,6 +456,17 @@ export async function saveRssWebSuppressClawstrPreference(suppress: boolean): Pr
await indexedDb.setSetting(RSS_WEB_SUPPRESS_CLAWSTR_SETTING, suppress ? '1' : '0')
}
export async function loadRssWebHideUnifiedClutterPreference(): Promise<boolean> {
const v = await indexedDb.getSetting(RSS_WEB_HIDE_UNIFIED_CLUTTER_SETTING)
if (v === '0' || v === 'false') return false
if (v === '1' || v === 'true') return true
return true
}
export async function saveRssWebHideUnifiedClutterPreference(hide: boolean): Promise<void> {
await indexedDb.setSetting(RSS_WEB_HIDE_UNIFIED_CLUTTER_SETTING, hide ? '1' : '0')
}
export async function loadRssWebFeedScopePreference(): Promise<RssWebFeedScope> {
const v = await indexedDb.getSetting(RSS_WEB_FEED_SCOPE_SETTING)
return parseRssWebFeedScope(v)

15
src/lib/thread-reply-root-match.ts

@ -1,6 +1,11 @@ @@ -1,6 +1,11 @@
import { getRootATag, getRootEventHexId } from '@/lib/event'
import { canonicalizeRssArticleUrl, getArticleUrlFromCommentITags } from '@/lib/rss-article'
import {
canonicalizeRssArticleUrl,
getArticleUrlFromCommentITags,
getHighlightSourceHttpUrl
} from '@/lib/rss-article'
import type { Event } from 'nostr-tools'
import { kinds } from 'nostr-tools'
/** Matches `ReplyNoteList` / discussion thread root shapes. */
export type TThreadRootRef =
@ -12,8 +17,12 @@ export type TThreadRootRef = @@ -12,8 +17,12 @@ export type TThreadRootRef =
export function eventReplyMatchesThreadRoot(evt: Event, root: TThreadRootRef): boolean {
if (root.type === 'I') {
const u = getArticleUrlFromCommentITags(evt)
if (!u) return false
return canonicalizeRssArticleUrl(u) === canonicalizeRssArticleUrl(root.id)
if (u && canonicalizeRssArticleUrl(u) === canonicalizeRssArticleUrl(root.id)) return true
if (evt.kind === kinds.Highlights) {
const hu = getHighlightSourceHttpUrl(evt)
return !!hu && canonicalizeRssArticleUrl(hu) === canonicalizeRssArticleUrl(root.id)
}
return false
}
if (root.type === 'A') {
const coord = getRootATag(evt)?.[1]

4
src/pages/secondary/RssArticlePage/index.tsx

@ -232,7 +232,7 @@ const RssArticlePage = forwardRef( @@ -232,7 +232,7 @@ const RssArticlePage = forwardRef(
</p>
{syntheticRoot && (
<div className="px-0 w-full">
<NoteStats className="mt-2" event={syntheticRoot} fetchIfNotExisting displayTopZapsAndLikes={false} />
<NoteStats className="mt-2" event={syntheticRoot} fetchIfNotExisting displayTopZapsAndLikes />
</div>
)}
<Separator />
@ -301,7 +301,7 @@ const RssArticlePage = forwardRef( @@ -301,7 +301,7 @@ const RssArticlePage = forwardRef(
</div>
{syntheticRoot && (
<div className="px-4 w-full">
<NoteStats className="mt-3" event={syntheticRoot} fetchIfNotExisting displayTopZapsAndLikes={false} />
<NoteStats className="mt-3" event={syntheticRoot} fetchIfNotExisting displayTopZapsAndLikes />
</div>
)}
<Separator className="mt-4" />

11
src/providers/ReplyProvider.tsx

@ -1,4 +1,8 @@ @@ -1,4 +1,8 @@
import { canonicalizeRssArticleUrl, getArticleUrlFromCommentITags } from '@/lib/rss-article'
import {
canonicalizeRssArticleUrl,
getArticleUrlFromCommentITags,
getHighlightSourceHttpUrl
} from '@/lib/rss-article'
import {
getParentATag,
getParentETag,
@ -7,7 +11,7 @@ import { @@ -7,7 +11,7 @@ import {
getRootETag,
isNip25ReactionKind
} from '@/lib/event'
import { Event } from 'nostr-tools'
import { Event, kinds } from 'nostr-tools'
import { createContext, useCallback, useContext, useState } from 'react'
type TReplyContext = {
@ -50,6 +54,9 @@ export function ReplyProvider({ children }: { children: React.ReactNode }) { @@ -50,6 +54,9 @@ export function ReplyProvider({ children }: { children: React.ReactNode }) {
const articleUrl = getArticleUrlFromCommentITags(reply)
if (articleUrl) {
rootId = canonicalizeRssArticleUrl(articleUrl)
} else if (reply.kind === kinds.Highlights) {
const hu = getHighlightSourceHttpUrl(reply)
if (hu) rootId = canonicalizeRssArticleUrl(hu)
}
}
}

1
src/routes.tsx

@ -51,6 +51,7 @@ const ROUTES = [ @@ -51,6 +51,7 @@ const ROUTES = [
{ path: '/home/notes/:id', element: SR(NotePageLazy) },
{ path: '/feed/notes/:id', element: SR(NotePageLazy) },
{ path: '/spells/notes/:id', element: SR(NotePageLazy) },
{ path: '/rss/notes/:id', element: SR(NotePageLazy) },
{ path: '/rss-item/:articleKey', element: SR(RssArticlePageLazy) },
{ path: '/rss/rss-item/:articleKey', element: SR(RssArticlePageLazy) },
{ path: '/feed/rss-item/:articleKey', element: SR(RssArticlePageLazy) },

10
src/services/note-stats.service.ts

@ -303,11 +303,21 @@ class NoteStatsService { @@ -303,11 +303,21 @@ class NoteStatsService {
kinds: [ExtendedKind.EXTERNAL_REACTION],
limit: reactionLimit
},
{
'#I': [canonical],
kinds: [ExtendedKind.EXTERNAL_REACTION],
limit: reactionLimit
},
{
'#i': [canonical],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: interactionLimit
},
{
'#I': [canonical],
kinds: [ExtendedKind.COMMENT, ExtendedKind.VOICE_COMMENT],
limit: interactionLimit
},
{
'#r': computeRTagFilterValuesForArticleThread(canonical),
kinds: [kinds.Highlights],

Loading…
Cancel
Save