Browse Source

increase bookstr performance

imwald
Silberengel 4 months ago
parent
commit
06c09bd55b
  1. 663
      src/components/Bookstr/BookstrContent.tsx
  2. 64
      src/lib/bookstr-parser.ts
  3. 774
      src/services/client.service.ts

663
src/components/Bookstr/BookstrContent.tsx

@ -1,9 +1,9 @@ @@ -1,9 +1,9 @@
import { useState, useEffect, useMemo } from 'react'
import { useState, useEffect, useMemo, useRef } from 'react'
import { Event } from 'nostr-tools'
import { parseBookWikilink, extractBookMetadata, BookReference } from '@/lib/bookstr-parser'
import client from '@/services/client.service'
import { ExtendedKind } from '@/constants'
import { Loader2, AlertCircle, ChevronDown, ChevronUp } from 'lucide-react'
import { Loader2, AlertCircle, ChevronDown, ChevronUp, ExternalLink } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
Select,
@ -29,12 +29,43 @@ interface BookSection { @@ -29,12 +29,43 @@ interface BookSection {
originalChapter?: number
}
/**
* Build Bible Gateway URL for a passage
*/
function buildBibleGatewayUrl(reference: BookReference, version?: string): string {
// Format passage: "Psalm 23:4-7" or "Genesis 1:4" or "1 John 3:16"
let passage = reference.book
if (reference.chapter !== undefined) {
passage += ` ${reference.chapter}`
}
if (reference.verse) {
passage += `:${reference.verse}`
}
// Map version codes to Bible Gateway codes
// Common mappings: DRB -> DRA (Douay-Rheims), etc.
const versionMap: Record<string, string> = {
'DRB': 'DRA', // Douay-Rheims Bible -> Douay-Rheims 1899 American Edition
'DRA': 'DRA', // Already correct
}
const bgVersion = version ? (versionMap[version.toUpperCase()] || version.toUpperCase()) : 'DRA'
// URL encode the passage
const encodedPassage = encodeURIComponent(passage)
return `https://www.biblegateway.com/passage/?search=${encodedPassage}&version=${bgVersion}`
}
export function BookstrContent({ wikilink, className }: BookstrContentProps) {
const [sections, setSections] = useState<BookSection[]>([])
const [isLoading, setIsLoading] = useState(true)
const [error, setError] = useState<string | null>(null)
const [expandedSections, setExpandedSections] = useState<Set<number>>(new Set())
const [selectedVersions, setSelectedVersions] = useState<Map<number, string>>(new Map())
const [collapsedCards, setCollapsedCards] = useState<Set<number>>(new Set())
const [cardHeights, setCardHeights] = useState<Map<number, number>>(new Map())
const cardRefs = useRef<Map<number, HTMLDivElement>>(new Map())
// Parse the wikilink
const parsed = useMemo(() => {
@ -60,6 +91,21 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -60,6 +91,21 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
}
const result = parseBookWikilink(`[[book:${bookType}:${content}]]`, bookType)
if (result) {
logger.debug('BookstrContent: Parsed wikilink', {
wikilink,
content,
bookType,
referenceCount: result.references.length,
references: result.references.map(r => ({
book: r.book,
chapter: r.chapter,
verse: r.verse,
version: r.version
})),
versions: result.versions
})
}
return result ? { ...result, bookType } : null
} catch (err) {
logger.error('Error parsing bookstr wikilink', { error: err, wikilink })
@ -87,18 +133,230 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -87,18 +133,230 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
setError(null)
try {
logger.debug('BookstrContent: Processing references', {
totalReferences: parsed.references.length,
references: parsed.references.map(r => ({
book: r.book,
chapter: r.chapter,
verse: r.verse
}))
})
// Step 0: Create placeholder sections immediately so links don't disappear
const placeholderSections: BookSection[] = parsed.references.map(ref => ({
reference: ref,
events: [],
versions: [],
originalVerses: ref.verse,
originalChapter: ref.chapter
}))
setSections(placeholderSections)
setIsLoading(false) // Show placeholders immediately
const newSections: BookSection[] = []
for (const ref of parsed.references) {
// Normalize book name (lowercase, hyphenated)
const normalizedBook = ref.book.toLowerCase().replace(/\s+/g, '-')
// Step 1: Check cache for ALL references first (in parallel)
const bookType = (parsed as any).bookType || 'bible'
// Determine which versions to fetch
const cacheChecks = parsed.references.map(async (ref) => {
const normalizedBook = ref.book.toLowerCase().replace(/\s+/g, '-')
const versionsToFetch = parsed.versions || (ref.version ? [ref.version] : [])
// If no versions specified, try to find available versions
if (versionsToFetch.length === 0) {
// Check cache for each version (or without version if none specified)
const cachePromises = versionsToFetch.length > 0
? versionsToFetch.map(version =>
client.getCachedBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse,
version: version.toLowerCase()
})
)
: [
client.getCachedBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse
})
]
const cachedResults = await Promise.all(cachePromises)
const allCachedEvents = cachedResults.flat()
return { ref, cachedEvents: allCachedEvents, versionsToFetch }
})
const cacheResults = await Promise.all(cacheChecks)
// Step 2: Display cached results IMMEDIATELY
for (const { ref, cachedEvents } of cacheResults) {
if (cachedEvents.length > 0) {
const allVersions = new Set<string>()
cachedEvents.forEach(event => {
const metadata = extractBookMetadata(event)
if (metadata.version) {
allVersions.add(metadata.version.toUpperCase())
}
})
// Filter events based on what was requested
let filteredEvents = cachedEvents
// Filter by chapter if specified
if (ref.chapter !== undefined) {
filteredEvents = filteredEvents.filter(event => {
const metadata = extractBookMetadata(event)
const eventChapter = parseInt(metadata.chapter || '0')
return eventChapter === ref.chapter
})
}
// Filter by verse if specified
if (ref.verse) {
const verseNumbers = new Set<number>()
const verseSpecs = ref.verse.split(',').map(v => v.trim()).filter(v => v)
for (const spec of verseSpecs) {
if (spec.includes('-')) {
const [startStr, endStr] = spec.split('-').map(v => v.trim())
const start = parseInt(startStr)
const end = parseInt(endStr)
if (!isNaN(start) && !isNaN(end) && start <= end) {
for (let v = start; v <= end; v++) {
verseNumbers.add(v)
}
}
} else {
const verseNum = parseInt(spec)
if (!isNaN(verseNum)) {
verseNumbers.add(verseNum)
}
}
}
filteredEvents = filteredEvents.filter(event => {
const metadata = extractBookMetadata(event)
const eventVerse = metadata.verse
if (!eventVerse) return false
const eventVerseNum = parseInt(eventVerse)
return !isNaN(eventVerseNum) && verseNumbers.has(eventVerseNum)
})
}
// Sort events by verse number
filteredEvents.sort((a, b) => {
const aMeta = extractBookMetadata(a)
const bMeta = extractBookMetadata(b)
const aVerse = parseInt(aMeta.verse || '0')
const bVerse = parseInt(bMeta.verse || '0')
return aVerse - bVerse
})
newSections.push({
reference: ref,
events: filteredEvents,
versions: Array.from(allVersions),
originalVerses: ref.verse,
originalChapter: ref.chapter
})
}
}
// Display cached results immediately (merge with placeholders)
if (!isCancelled) {
// Create a map of sections by reference key for easy lookup
const sectionsByRef = new Map<string, BookSection>()
newSections.forEach(section => {
const key = `${section.reference.book}-${section.reference.chapter}-${section.reference.verse}`
sectionsByRef.set(key, section)
})
// Update placeholders with cached results, keep placeholders for missing ones
const updatedSections = placeholderSections.map(placeholder => {
const key = `${placeholder.reference.book}-${placeholder.reference.chapter}-${placeholder.reference.verse}`
const cachedSection = sectionsByRef.get(key)
return cachedSection || placeholder
})
setSections(updatedSections)
// Set initial selected versions
const initialVersions = new Map<number, string>()
updatedSections.forEach((section, index) => {
if (section.versions.length > 0) {
initialVersions.set(index, section.versions[0])
}
})
setSelectedVersions(initialVersions)
}
// Step 3: Fetch missing events from network in the background
for (const { ref, cachedEvents, versionsToFetch } of cacheResults) {
if (isCancelled) break
// If we already have cached events for this reference, skip or do background refresh
if (cachedEvents.length > 0) {
// Still fetch in background to get updates
const normalizedBook = ref.book.toLowerCase().replace(/\s+/g, '-')
const fetchPromises = versionsToFetch.length > 0
? versionsToFetch.map(version =>
client.fetchBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse,
version: version.toLowerCase()
})
)
: [
client.fetchBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse
})
]
Promise.all(fetchPromises).then(fetchedResults => {
if (isCancelled) return
const allFetchedEvents = fetchedResults.flat()
if (allFetchedEvents.length > 0) {
// Update the section with fresh data
setSections(prevSections => {
const updated = [...prevSections]
const sectionIndex = updated.findIndex(s =>
s.reference.book === ref.book &&
s.reference.chapter === ref.chapter &&
s.reference.verse === ref.verse
)
if (sectionIndex >= 0) {
// Merge with existing events (deduplicate by event id)
const existingIds = new Set(updated[sectionIndex].events.map(e => e.id))
const newEvents = allFetchedEvents.filter(e => !existingIds.has(e.id))
updated[sectionIndex] = {
...updated[sectionIndex],
events: [...updated[sectionIndex].events, ...newEvents]
}
}
return updated
})
}
}).catch(err => {
logger.warn('BookstrContent: Background fetch failed', { error: err, ref })
})
continue
}
// No cached events, fetch from network
const normalizedBook = ref.book.toLowerCase().replace(/\s+/g, '-')
// Determine which versions to fetch
let versionsToFetchFinal = versionsToFetch
if (versionsToFetchFinal.length === 0) {
// First, try to find any version for this book/chapter/verse
const allEvents = await client.fetchBookstrEvents({
type: bookType,
@ -117,22 +375,22 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -117,22 +375,22 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
})
if (availableVersions.size > 0) {
versionsToFetch.push(Array.from(availableVersions)[0]) // Use first available
versionsToFetchFinal = [Array.from(availableVersions)[0]] // Use first available
} else {
// No versions found, try without version filter
const eventsWithoutVersion = await client.fetchBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse
if (allEvents.length > 0) {
// Use events without version filter
const allVersions = new Set<string>()
allEvents.forEach(event => {
const metadata = extractBookMetadata(event)
if (metadata.version) {
allVersions.add(metadata.version.toUpperCase())
}
})
if (eventsWithoutVersion.length > 0) {
// Use events without version filter
newSections.push({
reference: ref,
events: eventsWithoutVersion,
versions: [],
events: allEvents,
versions: Array.from(allVersions),
originalVerses: ref.verse,
originalChapter: ref.chapter
})
@ -145,22 +403,13 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -145,22 +403,13 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
const allEvents: Event[] = []
const allVersions = new Set<string>()
for (const version of versionsToFetch) {
// Fetch entire chapter if verse is specified, entire book if only chapter is specified
for (const version of versionsToFetchFinal) {
const events = await client.fetchBookstrEvents({
type: bookType,
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse, // Pass verse for context, but we'll fetch entire chapter
version: version.toLowerCase()
})
logger.debug('BookstrContent: Fetched events', {
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse,
version,
eventCount: events.length
version: version.toLowerCase()
})
events.forEach(event => {
@ -172,10 +421,7 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -172,10 +421,7 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
})
}
// Filter events based on what was requested:
// - Book only: Show all events (all chapters)
// - Chapter only: Show all events for that chapter (all verses)
// - Verses: Show only the requested verses (but we have all verses cached for expansion)
// Filter events based on what was requested
let filteredEvents = allEvents
// Filter by chapter if specified
@ -187,25 +433,35 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -187,25 +433,35 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
})
}
// Filter by verse if specified (for verse-level queries)
// Filter by verse if specified
if (ref.verse) {
const verseParts = ref.verse.split(/[,\s-]+/).map(v => v.trim()).filter(v => v)
const verseNumbers = new Set<number>()
const verseSpecs = ref.verse.split(',').map(v => v.trim()).filter(v => v)
for (const spec of verseSpecs) {
if (spec.includes('-')) {
const [startStr, endStr] = spec.split('-').map(v => v.trim())
const start = parseInt(startStr)
const end = parseInt(endStr)
if (!isNaN(start) && !isNaN(end) && start <= end) {
for (let v = start; v <= end; v++) {
verseNumbers.add(v)
}
}
} else {
const verseNum = parseInt(spec)
if (!isNaN(verseNum)) {
verseNumbers.add(verseNum)
}
}
}
filteredEvents = filteredEvents.filter(event => {
const metadata = extractBookMetadata(event)
const eventVerse = metadata.verse
if (!eventVerse) return false
// Check if this verse matches any of the requested verses
const verseNum = parseInt(eventVerse)
return verseParts.some(part => {
if (part.includes('-')) {
const [start, end] = part.split('-').map(v => parseInt(v.trim()))
return !isNaN(start) && !isNaN(end) && verseNum >= start && verseNum <= end
} else {
const partNum = parseInt(part)
return !isNaN(partNum) && partNum === verseNum
}
})
const eventVerseNum = parseInt(eventVerse)
return !isNaN(eventVerseNum) && verseNumbers.has(eventVerseNum)
})
}
@ -218,14 +474,6 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -218,14 +474,6 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
return aVerse - bVerse
})
logger.debug('BookstrContent: Filtered events', {
book: normalizedBook,
chapter: ref.chapter,
verse: ref.verse,
totalFetched: allEvents.length,
filteredCount: filteredEvents.length
})
newSections.push({
reference: ref,
events: filteredEvents,
@ -235,29 +483,55 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -235,29 +483,55 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
})
}
logger.debug('BookstrContent: Setting sections', {
sectionCount: newSections.length,
sections: newSections.map(s => ({
book: s.reference.book,
chapter: s.reference.chapter,
verse: s.reference.verse,
eventCount: s.events.length,
versions: s.versions
}))
if (isCancelled) return
// Merge network results with existing sections (replace placeholders or update with new data)
setSections(prevSections => {
const sectionsByRef = new Map<string, BookSection>()
newSections.forEach(section => {
const key = `${section.reference.book}-${section.reference.chapter}-${section.reference.verse}`
sectionsByRef.set(key, section)
})
if (isCancelled) return
// Update existing sections with network results, or add new ones
const updated = prevSections.map(section => {
const key = `${section.reference.book}-${section.reference.chapter}-${section.reference.verse}`
const networkSection = sectionsByRef.get(key)
if (networkSection) {
// Merge events (deduplicate by event id)
const existingIds = new Set(section.events.map(e => e.id))
const newEvents = networkSection.events.filter(e => !existingIds.has(e.id))
return {
...networkSection,
events: [...section.events, ...newEvents]
}
}
return section
})
setSections(newSections)
// Add any new sections that weren't in placeholders
newSections.forEach(section => {
const key = `${section.reference.book}-${section.reference.chapter}-${section.reference.verse}`
if (!prevSections.some(s =>
`${s.reference.book}-${s.reference.chapter}-${s.reference.verse}` === key
)) {
updated.push(section)
}
})
// Set initial selected versions
const initialVersions = new Map<number, string>()
return updated
})
// Update selected versions
setSelectedVersions(prevVersions => {
const updated = new Map(prevVersions)
newSections.forEach((section, index) => {
if (section.versions.length > 0) {
initialVersions.set(index, section.versions[0])
if (section.versions.length > 0 && !updated.has(index)) {
updated.set(index, section.versions[0])
}
})
setSelectedVersions(initialVersions)
return updated
})
} catch (err) {
if (isCancelled) return
logger.error('Error fetching bookstr events', { error: err, wikilink })
@ -277,6 +551,69 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -277,6 +551,69 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [wikilink]) // Only depend on wikilink - parsed is derived from it via useMemo
// Measure card heights - measure BEFORE applying collapse
useEffect(() => {
const timeoutId = setTimeout(() => {
cardRefs.current.forEach((element, index) => {
if (element) {
// IMPORTANT: Temporarily remove ALL constraints to get true height
// This must happen BEFORE any collapse is applied
const originalMaxHeight = element.style.maxHeight
const originalOverflow = element.style.overflow
const originalHeight = element.style.height
// Remove all constraints
element.style.maxHeight = 'none'
element.style.overflow = 'visible'
element.style.height = 'auto'
// Force a reflow to ensure we get the true height
void element.offsetHeight
const height = element.scrollHeight
// Restore original styles
element.style.maxHeight = originalMaxHeight
element.style.overflow = originalOverflow
element.style.height = originalHeight
// Store the TRUE height (before collapse)
setCardHeights(prev => {
const currentHeight = prev.get(index)
if (currentHeight !== height && height > 0) {
const newMap = new Map(prev)
newMap.set(index, height)
logger.debug('BookstrContent: Measured card height', {
sectionIndex: index,
height,
needsCollapse: height > 500,
wasCollapsed: collapsedCards.has(index)
})
// Only auto-collapse if height > 500px and not already manually toggled
if (height > 500) {
setCollapsedCards(prevCollapsed => {
// Only auto-collapse if user hasn't manually expanded it
if (!prevCollapsed.has(index)) {
logger.debug('BookstrContent: Auto-collapsing card', { sectionIndex: index, height })
return new Set(prevCollapsed).add(index)
}
return prevCollapsed
})
}
return newMap
}
return prev
})
}
})
}, 500) // Wait longer for content to fully render
return () => clearTimeout(timeoutId)
}, [sections, collapsedCards])
if (isLoading) {
return (
<span className={cn('inline-flex items-center gap-1', className)}>
@ -318,25 +655,60 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -318,25 +655,60 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
const isExpanded = expandedSections.has(sectionIndex)
const hasVerses = section.originalVerses !== undefined && section.originalVerses.length > 0
const hasChapter = section.originalChapter !== undefined && !hasVerses
const isLast = sectionIndex === sections.length - 1
const cardHeight = cardHeights.get(sectionIndex) || 0
const isCardCollapsed = collapsedCards.has(sectionIndex)
const needsCollapse = cardHeight > 500
// Only show button if card is actually tall (needs collapse) or is currently collapsed
const shouldShowButton = filteredEvents.length > 0 && (needsCollapse || isCardCollapsed)
// Debug logging
if (filteredEvents.length > 0) {
logger.debug('BookstrContent: Card collapse check', {
sectionIndex,
eventCount: filteredEvents.length,
cardHeight,
isCardCollapsed,
needsCollapse,
shouldShowButton
})
}
return (
<>
<div
key={sectionIndex}
ref={(el) => {
if (el) {
cardRefs.current.set(sectionIndex, el)
} else {
cardRefs.current.delete(sectionIndex)
}
}}
className={cn(
'p-3',
!isLast && 'border-b'
!isLast && 'border-b',
needsCollapse && isCardCollapsed && 'overflow-hidden'
)}
style={needsCollapse && isCardCollapsed ? {
maxHeight: '500px',
transition: 'max-height 0.3s ease-out'
} : undefined}
>
{/* Header */}
<div className="flex items-center gap-2 mb-2">
<div className="flex items-center justify-between gap-2 mb-2">
<div className="flex items-center gap-2 flex-1 min-w-0">
<h4 className="font-semibold text-sm">
{section.reference.book}
{section.reference.chapter && ` ${section.reference.chapter}`}
{section.reference.verse && `:${section.reference.verse}`}
{selectedVersion && ` (${selectedVersion})`}
</h4>
{filteredEvents.length === 0 && (
<Loader2 className="h-3 w-3 animate-spin text-muted-foreground" />
)}
<VersionSelector
section={section}
sectionIndex={sectionIndex}
@ -348,45 +720,68 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -348,45 +720,68 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
}}
/>
</div>
<Button
variant="ghost"
size="sm"
className="h-6 w-6 p-0 shrink-0"
asChild
>
<a
href={buildBibleGatewayUrl(section.reference, selectedVersion)}
target="_blank"
rel="noopener noreferrer"
title="View on Bible Gateway"
>
<ExternalLink className="h-3 w-3" />
</a>
</Button>
</div>
{/* Verses */}
{filteredEvents.length > 0 && (
<VerseContent
events={filteredEvents}
hasVerses={hasVerses}
originalVerses={section.originalVerses}
isExpanded={isExpanded}
/>
)}
</div>
{/* Expand/Collapse buttons - only show if events were found */}
{hasVerses && filteredEvents.length > 0 && (
{/* Show more/less button for tall cards - OUTSIDE collapsed div so it's always visible */}
{shouldShowButton ? (
<div className="px-3 pb-3 border-t pt-2">
<Button
variant="ghost"
size="sm"
className="mt-2 h-6 text-xs"
className="h-6 text-xs w-full"
onClick={() => {
const newExpanded = new Set(expandedSections)
if (newExpanded.has(sectionIndex)) {
newExpanded.delete(sectionIndex)
setCollapsedCards(prev => {
const newSet = new Set(prev)
if (newSet.has(sectionIndex)) {
newSet.delete(sectionIndex)
} else {
newExpanded.add(sectionIndex)
newSet.add(sectionIndex)
}
setExpandedSections(newExpanded)
return newSet
})
}}
>
{isExpanded ? (
{isCardCollapsed ? (
<>
<ChevronUp className="h-3 w-3 mr-1" />
Collapse chapter
<ChevronDown className="h-3 w-3 mr-1" />
Show more
</>
) : (
<>
<ChevronDown className="h-3 w-3 mr-1" />
Read full chapter
<ChevronUp className="h-3 w-3 mr-1" />
Show less
</>
)}
</Button>
)}
{hasChapter && !hasVerses && filteredEvents.length > 0 && (
</div>
) : null}
{/* Expand/Collapse buttons - only show if events were found */}
{hasVerses && filteredEvents.length > 0 && (
<div className="px-3 pb-3">
<Button
variant="ghost"
size="sm"
@ -404,30 +799,31 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -404,30 +799,31 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
{isExpanded ? (
<>
<ChevronUp className="h-3 w-3 mr-1" />
Collapse book
Collapse chapter
</>
) : (
<>
<ChevronDown className="h-3 w-3 mr-1" />
Read full book
Read full chapter
</>
)}
</Button>
</div>
)}
{/* Expanded content */}
{isExpanded && (
<div className="mt-3 pt-3 border-t">
<div className="px-3 pb-3 mt-3 pt-3 border-t">
{/* Fetch and display full chapter/book */}
<ExpandedContent
section={section}
selectedVersion={selectedVersion}
originalVerses={section.originalVerses}
originalChapter={section.originalChapter}
originalVerses={section.originalVerses}
/>
</div>
)}
</div>
</>
)
})}
</div>
@ -438,11 +834,11 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) { @@ -438,11 +834,11 @@ export function BookstrContent({ wikilink, className }: BookstrContentProps) {
interface ExpandedContentProps {
section: BookSection
selectedVersion: string
originalVerses?: string
originalChapter?: number
originalVerses?: string
}
function ExpandedContent({ section, selectedVersion, originalVerses, originalChapter }: ExpandedContentProps) {
function ExpandedContent({ section, selectedVersion, originalChapter, originalVerses }: ExpandedContentProps) {
const [expandedEvents, setExpandedEvents] = useState<Event[]>([])
const [isLoading, setIsLoading] = useState(true)
@ -499,26 +895,43 @@ function ExpandedContent({ section, selectedVersion, originalVerses, originalCha @@ -499,26 +895,43 @@ function ExpandedContent({ section, selectedVersion, originalVerses, originalCha
return <div className="text-xs text-muted-foreground">Loading...</div>
}
// Parse original verses to determine which ones should have a border
const originalVerseNumbers = new Set<number>()
if (originalVerses) {
const verseSpecs = originalVerses.split(',').map(v => v.trim()).filter(v => v)
for (const spec of verseSpecs) {
if (spec.includes('-')) {
const [startStr, endStr] = spec.split('-').map(v => v.trim())
const start = parseInt(startStr)
const end = parseInt(endStr)
if (!isNaN(start) && !isNaN(end) && start <= end) {
for (let v = start; v <= end; v++) {
originalVerseNumbers.add(v)
}
}
} else {
const verseNum = parseInt(spec)
if (!isNaN(verseNum)) {
originalVerseNumbers.add(verseNum)
}
}
}
}
return (
<VerseContent
events={expandedEvents}
hasVerses={!!originalVerses}
originalVerses={originalVerses}
isExpanded={true}
originalChapter={originalChapter}
originalVerseNumbers={originalVerseNumbers}
/>
)
}
interface VerseContentProps {
events: Event[]
hasVerses: boolean
originalVerses?: string
isExpanded: boolean
originalChapter?: number
originalVerseNumbers?: Set<number>
}
function VerseContent({ events, hasVerses, originalVerses, isExpanded, originalChapter }: VerseContentProps) {
function VerseContent({ events, originalVerseNumbers }: VerseContentProps) {
const [parsedContents, setParsedContents] = useState<Map<string, string>>(new Map())
useEffect(() => {
@ -552,38 +965,16 @@ function VerseContent({ events, hasVerses, originalVerses, isExpanded, originalC @@ -552,38 +965,16 @@ function VerseContent({ events, hasVerses, originalVerses, isExpanded, originalC
{events.map((event) => {
const metadata = extractBookMetadata(event)
const verseNum = metadata.verse
const chapterNum = metadata.chapter
// Check if this verse is in the original verses list
const isOriginalVerse = hasVerses && originalVerses && verseNum && (() => {
const verseParts = originalVerses.split(/[,\s-]+/).map(v => v.trim())
const verseNumInt = parseInt(verseNum)
// Check exact match or range
for (const part of verseParts) {
if (part.includes('-')) {
const [start, end] = part.split('-').map(v => parseInt(v.trim()))
if (!isNaN(start) && !isNaN(end) && verseNumInt >= start && verseNumInt <= end) {
return true
}
} else {
const partNum = parseInt(part)
if (!isNaN(partNum) && partNum === verseNumInt) {
return true
}
}
}
return false
})()
const isOriginalChapter = originalChapter !== undefined &&
chapterNum && parseInt(chapterNum) === originalChapter
const verseNumInt = verseNum ? parseInt(verseNum) : null
const isOriginalVerse = originalVerseNumbers && verseNumInt !== null && originalVerseNumbers.has(verseNumInt)
const content = parsedContents.get(event.id) || event.content
return (
<div
key={event.id}
className={cn(
'flex gap-2 text-sm leading-relaxed items-baseline',
isExpanded && (isOriginalVerse || isOriginalChapter) && 'border-l-2 border-gray-400 pl-2'
"flex gap-2 text-sm leading-relaxed items-baseline",
isOriginalVerse && "border-l-2 border-muted-foreground/30 pl-2 py-1"
)}
>
{/* Verse number on the left - only show verse number, not chapter:verse */}

64
src/lib/bookstr-parser.ts

@ -33,42 +33,38 @@ export function parseBookNotation(notation: string, bookType: string = 'bible'): @@ -33,42 +33,38 @@ export function parseBookNotation(notation: string, bookType: string = 'bible'):
const references: BookReference[] = []
// Split by comma or semicolon to handle multiple references
// Use a regex to split on commas/semicolons, but be careful with verse ranges like "1-3"
// We'll split on commas/semicolons that are followed by a space and a capital letter (new book name)
// or split on commas/semicolons that are not part of a verse range
// Strategy:
// 1. First, try to intelligently split on commas/semicolons that are followed by a capital letter (new book)
// 2. If that doesn't work, check if all parts start with capital letters (multiple references)
// 3. Otherwise, treat as a single reference with verse lists
// Step 1: Try intelligent splitting
const parts: string[] = []
let currentPart = ''
let inVerseRange = false
for (let i = 0; i < notation.length; i++) {
const char = notation[i]
const nextChar = notation[i + 1]
if (char === '-' && /^\d/.test(currentPart.slice(-1))) {
// This is part of a verse range (e.g., "1-3")
inVerseRange = true
currentPart += char
} else if (char === ',' || char === ';') {
// Check if this comma/semicolon is separating references
// If the next non-whitespace character is a capital letter, it's likely a new book
const rest = notation.substring(i + 1).trim()
if (rest.length > 0 && /^[A-Z]/.test(rest)) {
// This is separating references - save current part and start new one
if (char === ',' || char === ';') {
// Look ahead to see if this is separating references
// Check if there's whitespace followed by a capital letter or number after this comma/semicolon
// (Numbers handle cases like "1 John", "2 Corinthians")
const afterComma = notation.substring(i + 1)
const trimmedAfter = afterComma.trim()
// If the next non-whitespace character is a capital letter or number, it's likely a new book reference
if (trimmedAfter.length > 0 && /^[A-Z0-9]/.test(trimmedAfter)) {
// This comma/semicolon is separating references
if (currentPart.trim()) {
parts.push(currentPart.trim())
}
currentPart = ''
inVerseRange = false
} else {
// This is part of the current reference (e.g., verse list "1,3,5")
// This comma/semicolon is part of the current reference (e.g., verse list "1,3,5")
currentPart += char
inVerseRange = false
}
} else {
currentPart += char
if (char === ' ' && inVerseRange) {
inVerseRange = false
}
}
}
@ -77,26 +73,28 @@ export function parseBookNotation(notation: string, bookType: string = 'bible'): @@ -77,26 +73,28 @@ export function parseBookNotation(notation: string, bookType: string = 'bible'):
parts.push(currentPart.trim())
}
// If no splitting occurred, try simple split as fallback
if (parts.length === 0) {
parts.push(notation.trim())
} else if (parts.length === 1 && (notation.includes(',') || notation.includes(';'))) {
// Fallback: if we didn't split but there are commas/semicolons, try simple split
// This handles cases like "Genesis 1:1,2,3" (verse list, not multiple references)
const simpleParts = notation.split(/[,;]/).map(p => p.trim())
// Step 2: If we only got one part but there are commas/semicolons, try simple split
if (parts.length === 1 && (notation.includes(',') || notation.includes(';'))) {
const simpleParts = notation.split(/[,;]/).map(p => p.trim()).filter(p => p.length > 0)
if (simpleParts.length > 1) {
// Check if these look like separate references (each has a book name)
const looksLikeMultipleRefs = simpleParts.every(part => {
// Check if part starts with a capital letter (likely a book name)
return /^[A-Z]/.test(part.trim())
// Check if these look like separate references (each starts with a capital letter or number)
// Numbers handle cases like "1 John", "2 Corinthians"
const allStartWithCapitalOrNumber = simpleParts.every(part => {
const trimmed = part.trim()
return trimmed.length > 0 && /^[A-Z0-9]/.test(trimmed)
})
if (looksLikeMultipleRefs) {
if (allStartWithCapitalOrNumber) {
// These are multiple references
parts.length = 0
parts.push(...simpleParts)
}
// Otherwise, treat as a single reference with verse lists (e.g., "Genesis 1:1,2,3")
}
}
// Step 3: Parse each part
for (const part of parts) {
const normalizedPart = normalizeBookReferenceWhitespace(part)
const ref = parseSingleBookReference(normalizedPart, bookType)

774
src/services/client.service.ts

@ -2098,6 +2098,40 @@ class ClientService extends EventTarget { @@ -2098,6 +2098,40 @@ class ClientService extends EventTarget {
}))
}
/**
* Expand verse string into individual verse numbers
* Examples: "4-5" -> [4, 5], "4,5,6" -> [4, 5, 6], "4-7,10" -> [4, 5, 6, 7, 10]
*/
private expandVerseRange(verse: string): number[] {
const verseNumbers = new Set<number>()
// Split by comma to get individual verse specs (could be ranges or single verses)
const verseSpecs = verse.split(',').map(v => v.trim()).filter(v => v)
for (const spec of verseSpecs) {
if (spec.includes('-')) {
// This is a range like "4-5" or "4-7"
const [startStr, endStr] = spec.split('-').map(v => v.trim())
const start = parseInt(startStr)
const end = parseInt(endStr)
if (!isNaN(start) && !isNaN(end) && start <= end) {
// Add all verses in the range
for (let v = start; v <= end; v++) {
verseNumbers.add(v)
}
}
} else {
// Single verse number
const verseNum = parseInt(spec)
if (!isNaN(verseNum)) {
verseNumbers.add(verseNum)
}
}
}
return Array.from(verseNumbers).sort((a, b) => a - b)
}
/**
* Fetch bookstr events by tag filters
* Strategy:
@ -2105,6 +2139,9 @@ class ClientService extends EventTarget { @@ -2105,6 +2139,9 @@ class ClientService extends EventTarget {
* 2. Use tag filters with composite bookstr index on orly relay (most efficient)
* 3. Fall back to other relays if needed
* 4. Save fetched events to cache
*
* Note: If verse is a range (e.g., "4-5"), we expand it and fetch each verse individually
* since each verse is a separate event.
*/
async fetchBookstrEvents(filters: {
type?: string
@ -2115,25 +2152,131 @@ class ClientService extends EventTarget { @@ -2115,25 +2152,131 @@ class ClientService extends EventTarget {
}): Promise<NEvent[]> {
logger.info('fetchBookstrEvents: Called', { filters })
try {
// Step 1: Check cache first
// Step 1: Check cache FIRST before any network requests
// This is critical for performance - we should always check cache before making network calls
const cachedEvents = await this.getCachedBookstrEvents(filters)
if (cachedEvents.length > 0) {
logger.info('fetchBookstrEvents: Found cached events', {
logger.info('fetchBookstrEvents: Found cached events (before verse expansion)', {
count: cachedEvents.length,
filters
})
// Still fetch in background to get updates, but return cached immediately
// Skip orly relay in background fetch since it's consistently failing
this.fetchBookstrEventsFromRelays(filters, { skipOrly: true }).catch(err => {
this.fetchBookstrEventsFromRelays(filters).catch(err => {
logger.warn('fetchBookstrEvents: Background fetch failed', { error: err })
})
return cachedEvents
}
// Step 2: Fetch from relays
const events = await this.fetchBookstrEventsFromRelays(filters)
// Step 2: If verse is specified and contains a range, expand it and fetch each verse individually
// Each verse is a separate event, so we need to fetch them separately
// BUT: Check cache for each verse FIRST before making network requests
if (filters.verse) {
const verseNumbers = this.expandVerseRange(filters.verse)
// If we expanded to multiple verses, fetch each one separately and combine results
if (verseNumbers.length > 1) {
logger.info('fetchBookstrEvents: Expanding verse range', {
originalVerse: filters.verse,
expandedVerses: verseNumbers
})
const allEvents: NEvent[] = []
const seenEventIds = new Set<string>()
// Check cache for each verse FIRST before making network requests
for (const verseNum of verseNumbers) {
const verseFilter = { ...filters, verse: verseNum.toString() }
// Check cache first for this specific verse
const verseCachedEvents = await this.getCachedBookstrEvents(verseFilter)
if (verseCachedEvents.length > 0) {
logger.info('fetchBookstrEvents: Found cached events for verse', {
verse: verseNum,
count: verseCachedEvents.length
})
for (const event of verseCachedEvents) {
if (!seenEventIds.has(event.id)) {
seenEventIds.add(event.id)
allEvents.push(event)
}
}
// Still fetch in background for this verse
this.fetchBookstrEventsFromRelays(verseFilter).catch(err => {
logger.warn('fetchBookstrEvents: Background fetch failed for verse', { verse: verseNum, error: err })
})
} else {
// No cache hit, fetch from network
const verseEvents = await this.fetchBookstrEvents(verseFilter)
for (const event of verseEvents) {
if (!seenEventIds.has(event.id)) {
seenEventIds.add(event.id)
allEvents.push(event)
}
}
}
}
logger.info('fetchBookstrEvents: Combined results from verse range', {
originalVerse: filters.verse,
expandedVerses: verseNumbers,
totalEvents: allEvents.length
})
return allEvents
}
// If only one verse after expansion, continue with normal flow
}
// Step 3: Check cache again (in case verse expansion didn't happen or only one verse)
// This is redundant but ensures we always check cache
const finalCachedEvents = await this.getCachedBookstrEvents(filters)
if (finalCachedEvents.length > 0) {
logger.info('fetchBookstrEvents: Found cached events (final check)', {
count: finalCachedEvents.length,
filters
})
// Still fetch in background to get updates, but return cached immediately
// Skip orly relay in background fetch since it's consistently failing
this.fetchBookstrEventsFromRelays(filters).catch(err => {
logger.warn('fetchBookstrEvents: Background fetch failed', { error: err })
})
return finalCachedEvents
}
// Step 2: First try the known book publishing pubkey (most efficient)
const bookstrPublisherPubkey = '3e1ad0f3a5d3c12245db7788546c43ade3d97c6e046c594f6017cd6cd4164690'
let events: NEvent[] = []
try {
logger.info('fetchBookstrEvents: Querying known book publishing pubkey first', {
pubkey: bookstrPublisherPubkey,
filters: JSON.stringify(filters)
})
events = await this.fetchBookstrEventsFromPublicationPubkey(bookstrPublisherPubkey, filters)
if (events.length > 0) {
logger.info('fetchBookstrEvents: Successfully fetched from known publisher', {
eventCount: events.length,
filters: JSON.stringify(filters)
})
}
} catch (error) {
logger.warn('fetchBookstrEvents: Error fetching from known publisher', {
error,
filters: JSON.stringify(filters)
})
}
// Step 3: If no results from known publisher, try fallback relays
if (events.length === 0) {
logger.info('fetchBookstrEvents: No results from known publisher, trying fallback relays', {
filters: JSON.stringify(filters)
})
events = await this.fetchBookstrEventsFromRelays(filters)
}
// Step 3: Save events to cache
// Step 4: Save events to cache
if (events.length > 0) {
try {
// Group events by publication (master event)
@ -2182,7 +2325,7 @@ class ClientService extends EventTarget { @@ -2182,7 +2325,7 @@ class ClientService extends EventTarget {
/**
* Get cached bookstr events from IndexedDB
*/
private async getCachedBookstrEvents(filters: {
async getCachedBookstrEvents(filters: {
type?: string
book?: string
chapter?: number
@ -2192,26 +2335,107 @@ class ClientService extends EventTarget { @@ -2192,26 +2335,107 @@ class ClientService extends EventTarget {
try {
const allCached = await indexedDb.getStoreItems(StoreNames.PUBLICATION_EVENTS)
const cachedEvents: NEvent[] = []
let checkedCount = 0
let skippedCount = 0
logger.debug('getCachedBookstrEvents: Checking cache', {
logger.info('getCachedBookstrEvents: Checking cache', {
totalCached: allCached.length,
filters
filters: JSON.stringify(filters)
})
// If verse is specified, expand it to individual verse numbers
// Each verse is a separate event, so we need to check each one
const verseNumbers = filters.verse ? this.expandVerseRange(filters.verse) : null
// Sample a few events to see what's in the cache
const sampleEvents: any[] = []
let sampleCount = 0
for (const item of allCached) {
if (!item?.value || item.value.kind !== ExtendedKind.PUBLICATION_CONTENT) {
if (!item?.value) {
skippedCount++
continue
}
const event = item.value as NEvent
if (this.eventMatchesBookstrFilters(event, filters)) {
// Sample first few 30041 events to see what metadata they have
if (event.kind === ExtendedKind.PUBLICATION_CONTENT && sampleCount < 5) {
const metadata = this.extractBookMetadataFromEvent(event)
sampleEvents.push({
id: event.id.substring(0, 8),
kind: event.kind,
metadata: {
type: metadata.type,
book: metadata.book,
chapter: metadata.chapter,
verse: metadata.verse,
version: metadata.version
}
})
sampleCount++
}
// Check both 30040 (publications) and 30041 (content)
// For 30040s, we want to find matching publications, then we can fetch their content
// For 30041s, we want to return matching content directly
if (event.kind === ExtendedKind.PUBLICATION_CONTENT) {
checkedCount++
// If verse range was expanded, check each verse individually
if (verseNumbers && verseNumbers.length > 0) {
const matchesAnyVerse = verseNumbers.some(verseNum => {
const verseFilter = { ...filters, verse: verseNum.toString() }
const matches = this.eventMatchesBookstrFilters(event, verseFilter)
if (matches) {
logger.debug('getCachedBookstrEvents: Event matches verse filter', {
eventId: event.id.substring(0, 8),
eventVerse: this.extractBookMetadataFromEvent(event).verse,
verseFilter: verseNum.toString(),
filters: JSON.stringify(verseFilter)
})
}
return matches
})
if (matchesAnyVerse) {
cachedEvents.push(event)
}
} else {
// No verse expansion needed, use original filter
const matches = this.eventMatchesBookstrFilters(event, filters)
if (matches) {
logger.debug('getCachedBookstrEvents: Event matches filter', {
eventId: event.id.substring(0, 8),
filters: JSON.stringify(filters)
})
cachedEvents.push(event)
}
}
} else if (event.kind === ExtendedKind.PUBLICATION) {
// For 30040s, we check if they match (without verse filtering)
// If they match, we could potentially return them, but for now we only return 30041s
// This is because we want to return the actual content, not just the publication index
checkedCount++
} else {
skippedCount++
}
}
logger.debug('getCachedBookstrEvents: Found matching events', {
// Log sample events to help diagnose why nothing matches
if (sampleEvents.length > 0 && cachedEvents.length === 0) {
logger.warn('getCachedBookstrEvents: No matches found, showing sample cached events', {
filters: JSON.stringify(filters),
sampleEvents,
totalChecked: checkedCount
})
}
logger.info('getCachedBookstrEvents: Cache check complete', {
totalCached: allCached.length,
checked: checkedCount,
skipped: skippedCount,
matched: cachedEvents.length,
filters
filters: JSON.stringify(filters)
})
return cachedEvents
@ -2222,129 +2446,220 @@ class ClientService extends EventTarget { @@ -2222,129 +2446,220 @@ class ClientService extends EventTarget {
}
/**
* Fetch bookstr events from relays
* Query orly and thecitadel relays using publication pubkey
* This is the optimized path when we have a matching publication
* Always queries 30040s first, then fetches 30041s from those publications
*/
private async fetchBookstrEventsFromRelays(filters: {
private async fetchBookstrEventsFromPublicationPubkey(
publicationPubkey: string,
filters: {
type?: string
book?: string
chapter?: number
verse?: string
version?: string
}, options: { skipOrly?: boolean } = {}): Promise<NEvent[]> {
// Strategy:
// 1. First try to find the 30040 publication that matches (it has the bookstr metadata)
// 2. Then fetch all a-tagged 30041 events from that publication
// 3. Also query for 30041 events directly (in case they're not nested)
// Build tag filter for publication (30040) queries
const publicationTagFilter: Filter = {
kinds: [ExtendedKind.PUBLICATION]
}
): Promise<NEvent[]> {
const thecitadelRelay = 'wss://thecitadel.nostr1.com'
const prioritizedFallbackRelays = BIG_RELAY_URLS.filter(url => !BOOKSTR_RELAY_URLS.includes(url))
const prioritizedFallbackRelaysWithCitadel = prioritizedFallbackRelays.includes(thecitadelRelay)
? [thecitadelRelay, ...prioritizedFallbackRelays.filter(url => url !== thecitadelRelay)]
: prioritizedFallbackRelays
logger.info('fetchBookstrEventsFromPublicationPubkey: Querying for 30040 publications by pubkey', {
pubkey: publicationPubkey,
filters: JSON.stringify(filters)
})
// Build tag filter for bookstr queries (30041)
const bookstrTagFilter: Filter = {
kinds: [ExtendedKind.PUBLICATION_CONTENT]
let events: NEvent[] = []
try {
// Query ONLY 30040s (publications/indexes) by pubkey and kind
const publicationFilter: Filter = {
authors: [publicationPubkey],
kinds: [ExtendedKind.PUBLICATION],
limit: 500
}
// Add bookstr tags to both filters
// For publications (30040), we include chapter filter to find the right publication
// For content (30041), we don't filter by chapter/verse here - we fetch all from the publication
const addBookstrTags = (filter: Filter, includeChapter: boolean = true) => {
if (filters.type) {
filter['#type'] = [filters.type.toLowerCase()]
const allPublications = await this.fetchEvents(prioritizedFallbackRelaysWithCitadel, publicationFilter, {
eoseTimeout: 5000,
globalTimeout: 8000
})
logger.info('fetchBookstrEventsFromPublicationPubkey: Fetched 30040 publications', {
total: allPublications.length,
filters: JSON.stringify(filters)
})
// Filter 30040s client-side to find matching book/chapter
const matchingPublications = allPublications.filter(pub => {
return this.eventMatchesBookstrFilters(pub, filters)
})
logger.info('fetchBookstrEventsFromPublicationPubkey: Filtered 30040 publications', {
total: allPublications.length,
matching: matchingPublications.length,
filters: JSON.stringify(filters)
})
// For each matching 30040, fetch its a-tagged 30041 events (content)
for (const publication of matchingPublications) {
const aTags = publication.tags
.filter(tag => tag[0] === 'a' && tag[1])
.map(tag => tag[1])
logger.info('fetchBookstrEventsFromPublicationPubkey: Fetching 30041s from matching publication', {
publicationId: publication.id.substring(0, 8),
aTagCount: aTags.length,
filters: JSON.stringify(filters)
})
// Fetch all a-tagged 30041 events in parallel
const aTagPromises = aTags.map(async (aTag) => {
const parts = aTag.split(':')
if (parts.length < 2) return null
const kind = parseInt(parts[0])
const pubkey = parts[1]
const d = parts[2] || ''
// Only fetch 30041 events (content events)
if (kind !== ExtendedKind.PUBLICATION_CONTENT) {
return null
}
if (filters.book) {
// Normalize book name (slugify)
const normalizedBook = filters.book.toLowerCase().replace(/\s+/g, '-')
filter['#book'] = [normalizedBook]
const aTagFilter: Filter = {
authors: [pubkey],
kinds: [ExtendedKind.PUBLICATION_CONTENT],
limit: 1
}
// Only include chapter in publication filter (to find the right publication)
// Don't include chapter/verse in content filter - we fetch all from the publication
if (includeChapter && filters.chapter !== undefined) {
filter['#chapter'] = [filters.chapter.toString()]
if (d) {
aTagFilter['#d'] = [d]
}
// Never include verse in filters - we fetch all events and filter in BookstrContent
if (filters.version) {
filter['#version'] = [filters.version.toLowerCase()]
try {
const aTagEvents = await this.fetchEvents(prioritizedFallbackRelaysWithCitadel, aTagFilter, {
eoseTimeout: 3000,
globalTimeout: 5000
})
// Filter 30041s client-side by book, type, version, chapter, verse
return aTagEvents.filter(event => {
return this.eventMatchesBookstrFilters(event, filters)
})
} catch (err) {
logger.debug('fetchBookstrEventsFromPublicationPubkey: Error fetching a-tag event', {
aTag,
error: err
})
return []
}
})
const aTagResults = await Promise.all(aTagPromises)
const aTagEvents = aTagResults.flat().filter((e): e is NEvent => e !== null)
logger.info('fetchBookstrEventsFromPublicationPubkey: Fetched 30041s from publication', {
publicationId: publication.id.substring(0, 8),
fetched: aTagEvents.length,
totalSoFar: events.length + aTagEvents.length
})
events.push(...aTagEvents)
}
// Publication filter: include chapter to find the right publication
addBookstrTags(publicationTagFilter, true)
// Content filter: don't include chapter/verse - we'll fetch all from the publication
addBookstrTags(bookstrTagFilter, false)
if (events.length > 0) {
logger.info('fetchBookstrEventsFromPublicationPubkey: Successfully fetched content events', {
publicationCount: matchingPublications.length,
eventCount: events.length,
filters: JSON.stringify(filters)
})
}
} catch (error) {
logger.warn('fetchBookstrEventsFromPublicationPubkey: Error fetching from relays', {
error,
filters: JSON.stringify(filters)
})
}
const orlyRelays = BOOKSTR_RELAY_URLS
// Prioritize thecitadel relay for bookstr events since user confirmed events are there
return events
}
/**
* Fetch bookstr events from relays
* Strategy: Query ONLY 30040s (indexes) by type and kind, filter client-side, then fetch 30041s
*/
private async fetchBookstrEventsFromRelays(filters: {
type?: string
book?: string
chapter?: number
verse?: string
version?: string
}): Promise<NEvent[]> {
const thecitadelRelay = 'wss://thecitadel.nostr1.com'
const fallbackRelays = BIG_RELAY_URLS.filter(url => !BOOKSTR_RELAY_URLS.includes(url))
// Put thecitadel first in fallback list if it's there
const prioritizedFallbackRelays = fallbackRelays.includes(thecitadelRelay)
? [thecitadelRelay, ...fallbackRelays.filter(url => url !== thecitadelRelay)]
: fallbackRelays
logger.info('fetchBookstrEventsFromRelays: Querying with tag filters', {
logger.info('fetchBookstrEventsFromRelays: Querying for 30040 publications (indexes only)', {
filters: JSON.stringify(filters),
publicationTagFilter: JSON.stringify(publicationTagFilter),
bookstrTagFilter: JSON.stringify(bookstrTagFilter),
orlyRelays: orlyRelays.length,
fallbackRelays: fallbackRelays.length
relayCount: prioritizedFallbackRelays.length
})
let events: NEvent[] = []
// Step 1: Try to find the 30040 publication(s) first
// Strategy:
// - Book-level query (no chapter): Find all chapter-level 30040 publications for that book
// - Chapter-level query: Find the specific 30040 publication for that chapter
// - Verse-level query: Find the chapter 30040, fetch all a-tags (filtering happens in BookstrContent)
// Note: Only orly has bookstr tag indexes. For fallback relays, we query by kind only and filter client-side.
try {
// For fallback relays, we can't use bookstr tag filters - query by kind only
const fallbackPublicationFilter: Filter = {
kinds: [ExtendedKind.PUBLICATION]
const bookstrPublisherPubkey = '3e1ad0f3a5d3c12245db7788546c43ade3d97c6e046c594f6017cd6cd4164690'
// Query ONLY 30040s (publications/indexes) with just type and kind filters
const publicationFilter: Filter = {
kinds: [ExtendedKind.PUBLICATION],
authors: [bookstrPublisherPubkey],
limit: 500
}
// Only add #type filter if we have a type
if (filters.type) {
publicationFilter['#type'] = [filters.type.toLowerCase()]
}
const publications = await this.fetchEvents(prioritizedFallbackRelays, fallbackPublicationFilter, {
const publisherPublications = await this.fetchEvents(prioritizedFallbackRelays, publicationFilter, {
eoseTimeout: 5000,
globalTimeout: 8000
})
logger.info('fetchBookstrEventsFromRelays: Found publications (before filtering)', {
count: publications.length,
filters: JSON.stringify(filters),
queryType: filters.chapter === undefined ? 'book-level' : 'chapter-level'
logger.info('fetchBookstrEventsFromRelays: Fetched 30040 publications', {
count: publisherPublications.length,
filters: JSON.stringify(filters)
})
// Filter publications client-side to match bookstr criteria
const matchingPublications = publications.filter(pub => {
// Filter 30040s client-side to find matching book/chapter
// Note: Don't filter by verse for 30040s - verses are in 30041s
const matchingPublications = publisherPublications.filter(pub => {
return this.eventMatchesBookstrFilters(pub, filters)
})
logger.info('fetchBookstrEventsFromRelays: Found matching publications (after filtering)', {
total: publications.length,
logger.info('fetchBookstrEventsFromRelays: Filtered 30040 publications', {
total: publisherPublications.length,
matching: matchingPublications.length,
filters: JSON.stringify(filters)
})
// For each matching publication, fetch ALL a-tagged 30041 events
// We fetch all of them because:
// - For book-level queries, we want all chapters
// - For chapter-level queries, we want all verses in that chapter
// - For verse-level queries, we fetch all verses but filter in BookstrContent
// For each matching 30040, fetch its a-tagged 30041 events (content)
for (const publication of matchingPublications) {
const aTags = publication.tags
.filter(tag => tag[0] === 'a' && tag[1])
.map(tag => tag[1])
logger.debug('fetchBookstrEventsFromRelays: Fetching from publication', {
logger.info('fetchBookstrEventsFromRelays: Fetching 30041s from matching publication', {
publicationId: publication.id.substring(0, 8),
aTagCount: aTags.length
aTagCount: aTags.length,
filters: JSON.stringify(filters)
})
// Fetch all a-tagged events in parallel batches
// Fetch all a-tagged 30041 events in parallel
const aTagPromises = aTags.map(async (aTag) => {
// Parse a tag: "kind:pubkey:d"
const parts = aTag.split(':')
if (parts.length < 2) return null
@ -2354,8 +2669,6 @@ class ClientService extends EventTarget { @@ -2354,8 +2669,6 @@ class ClientService extends EventTarget {
// Only fetch 30041 events (content events)
if (kind !== ExtendedKind.PUBLICATION_CONTENT) {
// If it's a nested 30040 publication, we could recursively fetch from it
// But for now, we'll skip nested publications
return null
}
@ -2374,40 +2687,10 @@ class ClientService extends EventTarget { @@ -2374,40 +2687,10 @@ class ClientService extends EventTarget {
globalTimeout: 5000
})
// For verse-level queries, we still fetch all events but will filter in BookstrContent
// For book/chapter queries, we fetch all matching events
// Only filter by book/type/version here - chapter/verse filtering happens in BookstrContent
const matchingEvents = aTagEvents.filter(event => {
const metadata = this.extractBookMetadataFromEvent(event)
// Must match type if specified
if (filters.type && metadata.type?.toLowerCase() !== filters.type.toLowerCase()) {
return false
}
// Must match book if specified
if (filters.book) {
const normalizedBook = filters.book.toLowerCase().replace(/\s+/g, '-')
const eventBookTags = event.tags
.filter(tag => tag[0] === 'book' && tag[1])
.map(tag => tag[1].toLowerCase())
const hasMatchingBook = eventBookTags.some(eventBook =>
this.bookNamesMatch(eventBook, normalizedBook)
)
if (!hasMatchingBook) return false
}
// Must match version if specified
if (filters.version && metadata.version?.toLowerCase() !== filters.version.toLowerCase()) {
return false
}
// Chapter and verse filtering happens in BookstrContent for display
// We fetch all events from the publication here
return true
// Filter 30041s client-side by book, type, version, chapter, verse
return aTagEvents.filter(event => {
return this.eventMatchesBookstrFilters(event, filters)
})
return matchingEvents
} catch (err) {
logger.debug('fetchBookstrEventsFromRelays: Error fetching a-tag event', {
aTag,
@ -2418,12 +2701,19 @@ class ClientService extends EventTarget { @@ -2418,12 +2701,19 @@ class ClientService extends EventTarget {
})
const aTagResults = await Promise.all(aTagPromises)
const fetchedEvents = aTagResults.flat().filter((e): e is NEvent => e !== null)
events.push(...fetchedEvents)
const aTagEvents = aTagResults.flat().filter((e): e is NEvent => e !== null)
logger.info('fetchBookstrEventsFromRelays: Fetched 30041s from publication', {
publicationId: publication.id.substring(0, 8),
fetched: aTagEvents.length,
totalSoFar: events.length + aTagEvents.length
})
events.push(...aTagEvents)
}
if (events.length > 0) {
logger.info('fetchBookstrEventsFromRelays: Fetched from publications', {
logger.info('fetchBookstrEventsFromRelays: Successfully fetched content events', {
publicationCount: matchingPublications.length,
eventCount: events.length,
filters: JSON.stringify(filters)
@ -2437,91 +2727,113 @@ class ClientService extends EventTarget { @@ -2437,91 +2727,113 @@ class ClientService extends EventTarget {
})
}
// Try orly relay first (supports composite bookstr index)
// Skip if explicitly requested or if it's consistently failing
if (!options.skipOrly && orlyRelays.length > 0) {
try {
events = await this.fetchEvents(orlyRelays, bookstrTagFilter, {
eoseTimeout: 5000, // Shorter timeout since it often fails
globalTimeout: 8000
})
logger.info('fetchBookstrEventsFromRelays: Fetched from orly relay', {
count: events.length,
filters
})
} catch (orlyError) {
logger.warn('fetchBookstrEventsFromRelays: Error querying orly relay (will try fallback)', {
error: orlyError,
filters
})
// Continue to fallback relays
}
} else if (options.skipOrly) {
logger.debug('fetchBookstrEventsFromRelays: Skipping orly relay (background fetch)', { filters })
}
// If no results from publications approach, try fallback relays directly
// If no results from publications approach, try fallback relays for 30040s
// (This is a fallback in case the publication approach didn't work)
// BUT: Only query from the known publisher's pubkey to avoid fetching all events
if (events.length === 0 && prioritizedFallbackRelays.length > 0) {
logger.info('fetchBookstrEventsFromRelays: Trying fallback relays (direct content query)', {
logger.info('fetchBookstrEventsFromRelays: Trying fallback relays (30040 query from known publisher)', {
fallbackRelays: prioritizedFallbackRelays.length,
prioritized: prioritizedFallbackRelays[0] === thecitadelRelay ? 'thecitadel first' : 'normal order'
})
try {
// For fallback relays, we need to fetch all and filter client-side
// (they don't have multi-letter tag indexes)
// Query by kind only - no bookstr tag filters
// Query only 30040s from the known bookstr publisher to avoid fetching all events
// Do NOT include bookstr tags - these relays don't support them
// Query by kind and author only, then filter client-side
const bookstrPublisherPubkey = '3e1ad0f3a5d3c12245db7788546c43ade3d97c6e046c594f6017cd6cd4164690'
const fallbackFilter: Filter = {
kinds: [ExtendedKind.PUBLICATION_CONTENT]
kinds: [ExtendedKind.PUBLICATION],
authors: [bookstrPublisherPubkey],
limit: 500 // Limit to avoid fetching too many
}
const fallbackEvents = await this.fetchEvents(prioritizedFallbackRelays, fallbackFilter, {
const fallbackPublications = await this.fetchEvents(prioritizedFallbackRelays, fallbackFilter, {
eoseTimeout: 5000,
globalTimeout: 10000
})
// Filter client-side (this will check all book tags)
let matchedCount = 0
let rejectedCount = 0
const rejectionReasons: Record<string, number> = {}
const sampleRejections: any[] = []
// Filter client-side to match bookstr criteria
const matchingPublications = fallbackPublications.filter(pub =>
this.eventMatchesBookstrFilters(pub, filters)
)
events = fallbackEvents.filter(event => {
const matches = this.eventMatchesBookstrFilters(event, filters)
if (!matches) {
rejectedCount++
// Sample rejections to understand why (up to 10 samples)
if (sampleRejections.length < 10) {
const metadata = this.extractBookMetadataFromEvent(event)
const reason = this.getFilterRejectionReason(event, filters, metadata)
rejectionReasons[reason] = (rejectionReasons[reason] || 0) + 1
sampleRejections.push({
reason,
eventBook: metadata.book,
eventChapter: metadata.chapter,
eventVerse: metadata.verse,
eventVersion: metadata.version,
hasBookTag: !!metadata.book,
eventId: event.id.substring(0, 8)
// Fetch a-tagged 30041 events from matching publications
for (const publication of matchingPublications) {
const aTags = publication.tags
.filter(tag => tag[0] === 'a' && tag[1])
.map(tag => tag[1])
const aTagPromises = aTags.map(async (aTag) => {
const parts = aTag.split(':')
if (parts.length < 2) return null
const kind = parseInt(parts[0])
const pubkey = parts[1]
const d = parts[2] || ''
if (kind !== ExtendedKind.PUBLICATION_CONTENT) return null
const aTagFilter: Filter = {
authors: [pubkey],
kinds: [ExtendedKind.PUBLICATION_CONTENT],
limit: 1
}
if (d) {
aTagFilter['#d'] = [d]
}
try {
const aTagEvents = await this.fetchEvents(prioritizedFallbackRelays, aTagFilter, {
eoseTimeout: 3000,
globalTimeout: 5000
})
} else {
// Still count reasons even if we don't log details
// Filter client-side for type, book, and version
return aTagEvents.filter(event => {
const metadata = this.extractBookMetadataFromEvent(event)
const reason = this.getFilterRejectionReason(event, filters, metadata)
rejectionReasons[reason] = (rejectionReasons[reason] || 0) + 1
if (filters.type && metadata.type?.toLowerCase() !== filters.type.toLowerCase()) {
return false
}
} else {
matchedCount++
if (filters.book) {
const normalizedBook = filters.book.toLowerCase().replace(/\s+/g, '-')
const eventBookTags = event.tags
.filter(tag => tag[0] === 'book' && tag[1])
.map(tag => tag[1].toLowerCase())
const hasMatchingBook = eventBookTags.some(eventBook =>
this.bookNamesMatch(eventBook, normalizedBook)
)
if (!hasMatchingBook) return false
}
if (filters.version && metadata.version?.toLowerCase() !== filters.version.toLowerCase()) {
return false
}
return true
})
} catch (error) {
logger.debug('fetchBookstrEventsFromRelays: Error fetching a-tag event from fallback', {
aTag,
error
})
return []
}
return matches
})
logger.info('fetchBookstrEventsFromRelays: Fetched from fallback relays', {
totalFetched: fallbackEvents.length,
filtered: events.length,
filters: JSON.stringify(filters),
rejectionReasons: Object.keys(rejectionReasons).length > 0 ? rejectionReasons : undefined,
sampleRejections: sampleRejections.length > 0 ? sampleRejections : undefined
const aTagResults = await Promise.all(aTagPromises)
const aTagEvents = aTagResults.flat().filter((e): e is NEvent => e !== null)
events.push(...aTagEvents)
}
if (events.length > 0) {
logger.info('fetchBookstrEventsFromRelays: Fetched 30041s from fallback 30040s', {
publicationCount: matchingPublications.length,
eventCount: events.length,
filters: JSON.stringify(filters)
})
return events
}
} catch (fallbackError) {
logger.warn('fetchBookstrEventsFromRelays: Error querying fallback relays', {
error: fallbackError,
@ -2535,6 +2847,7 @@ class ClientService extends EventTarget { @@ -2535,6 +2847,7 @@ class ClientService extends EventTarget {
/**
* Check if event matches bookstr filters (for client-side filtering)
* Note: For 30040 publications, we filter by chapter but NOT verse (verses are in 30041 content events)
*/
private eventMatchesBookstrFilters(event: NEvent, filters: {
type?: string
@ -2544,6 +2857,7 @@ class ClientService extends EventTarget { @@ -2544,6 +2857,7 @@ class ClientService extends EventTarget {
version?: string
}): boolean {
const metadata = this.extractBookMetadataFromEvent(event)
const isPublication = event.kind === ExtendedKind.PUBLICATION
if (filters.type && metadata.type?.toLowerCase() !== filters.type.toLowerCase()) {
return false
@ -2561,21 +2875,30 @@ class ClientService extends EventTarget { @@ -2561,21 +2875,30 @@ class ClientService extends EventTarget {
)
if (!hasMatchingBook) {
// Only log debug for first few mismatches to avoid spam
if (eventBookTags.length > 0) {
logger.debug('eventMatchesBookstrFilters: Book mismatch', {
normalizedBook,
eventBookTags,
eventId: event.id.substring(0, 8)
eventId: event.id.substring(0, 8),
matches: eventBookTags.map(tag => ({
tag,
matches: this.bookNamesMatch(tag, normalizedBook)
}))
})
}
return false
}
}
// Chapter filtering applies to both 30040 and 30041
if (filters.chapter !== undefined) {
const eventChapter = parseInt(metadata.chapter || '0')
if (eventChapter !== filters.chapter) {
return false
}
}
if (filters.verse) {
// Verse filtering only applies to 30041 content events (not 30040 publications)
if (filters.verse && !isPublication) {
const eventVerse = metadata.verse
if (!eventVerse) return false
@ -2600,71 +2923,10 @@ class ClientService extends EventTarget { @@ -2600,71 +2923,10 @@ class ClientService extends EventTarget {
return true
}
/**
* Get the reason why an event was rejected by filters (for debugging)
*/
private getFilterRejectionReason(event: NEvent, filters: {
type?: string
book?: string
chapter?: number
verse?: string
version?: string
}, metadata: {
type?: string
book?: string
chapter?: string
verse?: string
version?: string
}): string {
if (filters.type && metadata.type?.toLowerCase() !== filters.type.toLowerCase()) {
return `type mismatch: ${metadata.type} != ${filters.type}`
}
if (filters.book) {
const normalizedBook = filters.book.toLowerCase().replace(/\s+/g, '-')
const eventBookTags = event.tags
.filter(tag => tag[0] === 'book' && tag[1])
.map(tag => tag[1].toLowerCase())
const hasMatchingBook = eventBookTags.some(eventBook =>
this.bookNamesMatch(eventBook, normalizedBook)
)
if (!hasMatchingBook) {
return `book mismatch: [${eventBookTags.join(', ')}] != ${normalizedBook}`
}
}
if (filters.chapter !== undefined) {
const eventChapter = parseInt(metadata.chapter || '0')
if (eventChapter !== filters.chapter) {
return `chapter mismatch: ${eventChapter} != ${filters.chapter}`
}
}
if (filters.verse) {
const eventVerse = metadata.verse
if (!eventVerse) {
return `no verse tag in event`
}
const verseParts = filters.verse.split(/[,\s-]+/).map(v => v.trim()).filter(v => v)
const verseNum = parseInt(eventVerse)
const matches = verseParts.some(part => {
if (part.includes('-')) {
const [start, end] = part.split('-').map(v => parseInt(v.trim()))
return !isNaN(start) && !isNaN(end) && verseNum >= start && verseNum <= end
} else {
const partNum = parseInt(part)
return !isNaN(partNum) && partNum === verseNum
}
})
if (!matches) {
return `verse mismatch: ${verseNum} not in [${verseParts.join(', ')}]`
}
}
if (filters.version && metadata.version?.toLowerCase() !== filters.version.toLowerCase()) {
return `version mismatch: ${metadata.version} != ${filters.version}`
}
return 'unknown'
}
/**
* Match book names with fuzzy matching
* Handles variations like "psalm" vs "psalms", "genesis" vs "the-book-of-genesis", etc.
*/
private bookNamesMatch(book1: string, book2: string): boolean {
const normalized1 = book1.toLowerCase().replace(/\s+/g, '-')
@ -2673,6 +2935,12 @@ class ClientService extends EventTarget { @@ -2673,6 +2935,12 @@ class ClientService extends EventTarget {
// Exact match
if (normalized1 === normalized2) return true
// Remove common suffixes for comparison (e.g., "psalm" vs "psalms")
const removeSuffix = (str: string) => str.replace(/s$/, '').replace(/s-$/, '-')
const base1 = removeSuffix(normalized1)
const base2 = removeSuffix(normalized2)
if (base1 === base2) return true
// One contains the other
if (normalized1.includes(normalized2) || normalized2.includes(normalized1)) return true
@ -2680,7 +2948,9 @@ class ClientService extends EventTarget { @@ -2680,7 +2948,9 @@ class ClientService extends EventTarget {
const parts1 = normalized1.split('-')
const parts2 = normalized2.split('-')
if (parts1.length > 0 && parts2.length > 0) {
if (parts1[parts1.length - 1] === parts2[parts2.length - 1]) return true
const last1 = removeSuffix(parts1[parts1.length - 1])
const last2 = removeSuffix(parts2[parts2.length - 1])
if (last1 === last2) return true
}
return false

Loading…
Cancel
Save