8 changed files with 583 additions and 4 deletions
@ -0,0 +1,71 @@
@@ -0,0 +1,71 @@
|
||||
import { |
||||
BOOKSTR_RELAY_URLS, |
||||
DOCUMENT_RELAY_URLS, |
||||
FAST_READ_RELAY_URLS, |
||||
NIP66_DISCOVERY_RELAY_URLS, |
||||
SEARCHABLE_RELAY_URLS |
||||
} from '@/constants' |
||||
import { mergeRelayUrlLayers } from '@/lib/favorites-feed-relays' |
||||
import { buildComprehensiveRelayList } from '@/lib/relay-list-builder' |
||||
import { normalizeUrl } from '@/lib/url' |
||||
import client from '@/services/client.service' |
||||
import nip66Service from '@/services/nip66.service' |
||||
|
||||
/** Broad NIP-50 / index relays not always present in {@link SEARCHABLE_RELAY_URLS}. */ |
||||
const CITATION_SEARCH_EXTRA_INDEX_RELAYS = ['wss://relay.nostr.band'] as const |
||||
|
||||
/** Cap NIP-66 “supports search” relays so we do not open hundreds of sockets. */ |
||||
const CITATION_SEARCH_NIP66_NIP50_CAP = 42 |
||||
|
||||
/** Final cap after merge (priority = earlier layers in {@link mergeRelayUrlLayers}). */ |
||||
const CITATION_SEARCH_MAX_RELAYS = 56 |
||||
|
||||
function normList(urls: readonly string[]): string[] { |
||||
return urls.map((u) => normalizeUrl(u) || u.trim()).filter(Boolean) |
||||
} |
||||
|
||||
/** |
||||
* Relay stack for NIP-32 citation (kinds 30–33) NIP-50 search: user NIP-65 / favorites / local / profile, |
||||
* static searchable + document + discovery pools, NIP-66 search-capable relays, then fast read. |
||||
*/ |
||||
export async function buildCitationPickerSearchRelayUrls(): Promise<string[]> { |
||||
const viewer = client.pubkey?.trim() || undefined |
||||
|
||||
let userCentric: string[] = [] |
||||
try { |
||||
userCentric = await buildComprehensiveRelayList({ |
||||
userPubkey: viewer, |
||||
includeUserOwnRelays: !!viewer, |
||||
includeProfileFetchRelays: true, |
||||
includeFastReadRelays: true, |
||||
includeFastWriteRelays: true, |
||||
includeSearchableRelays: true, |
||||
includeLocalRelays: !!viewer, |
||||
includeFavoriteRelays: !!viewer |
||||
}) |
||||
} catch { |
||||
/* continue with static layers */ |
||||
} |
||||
|
||||
const nip66Search = nip66Service |
||||
.getSearchableRelayUrls() |
||||
.map((u) => normalizeUrl(u) || u.trim()) |
||||
.filter(Boolean) |
||||
.slice(0, CITATION_SEARCH_NIP66_NIP50_CAP) |
||||
|
||||
const merged = mergeRelayUrlLayers( |
||||
[ |
||||
userCentric, |
||||
normList(SEARCHABLE_RELAY_URLS), |
||||
normList(DOCUMENT_RELAY_URLS), |
||||
normList(NIP66_DISCOVERY_RELAY_URLS), |
||||
normList(BOOKSTR_RELAY_URLS), |
||||
normList([...CITATION_SEARCH_EXTRA_INDEX_RELAYS]), |
||||
nip66Search, |
||||
normList(FAST_READ_RELAY_URLS) |
||||
], |
||||
[] |
||||
) |
||||
|
||||
return merged.slice(0, CITATION_SEARCH_MAX_RELAYS) |
||||
} |
||||
@ -0,0 +1,65 @@
@@ -0,0 +1,65 @@
|
||||
import type { Event } from 'nostr-tools' |
||||
import { nip19 } from 'nostr-tools' |
||||
|
||||
/** Tag values joined for tags whose first letter is `name` (e.g. `title`, `summary`). */ |
||||
function citationTagLine(ev: Event, name: string): string { |
||||
const parts: string[] = [] |
||||
for (const row of ev.tags ?? []) { |
||||
if (row[0] !== name) continue |
||||
const rest = row.slice(1).filter(Boolean) |
||||
if (rest.length) parts.push(rest.join(' ')) |
||||
} |
||||
return parts.join(' ') |
||||
} |
||||
|
||||
/** |
||||
* Lowercased haystack for NIP-32 citation kinds: body plus common metadata tags |
||||
* (title, summary, author, identifiers, etc.). Used for client-side matching when |
||||
* relays do not index these fields for NIP-50. |
||||
*/ |
||||
export function citationPickerHaystack(ev: Event): string { |
||||
const chunks = [ |
||||
ev.content ?? '', |
||||
citationTagLine(ev, 'title'), |
||||
citationTagLine(ev, 'summary'), |
||||
citationTagLine(ev, 'author'), |
||||
citationTagLine(ev, 'chapter_title'), |
||||
citationTagLine(ev, 'published_in'), |
||||
citationTagLine(ev, 'published_by'), |
||||
citationTagLine(ev, 'published_on'), |
||||
citationTagLine(ev, 'accessed_on'), |
||||
citationTagLine(ev, 'location'), |
||||
citationTagLine(ev, 'u'), |
||||
citationTagLine(ev, 'doi'), |
||||
citationTagLine(ev, 'c'), |
||||
citationTagLine(ev, 'llm'), |
||||
citationTagLine(ev, 'page_range'), |
||||
citationTagLine(ev, 'editor'), |
||||
citationTagLine(ev, 'version') |
||||
] |
||||
return chunks.join('\n').toLowerCase() |
||||
} |
||||
|
||||
export function citationPickerMatchesQuery(ev: Event, query: string): boolean { |
||||
const q = query.trim().toLowerCase() |
||||
if (!q) return true |
||||
const h = citationPickerHaystack(ev) |
||||
if (h.includes(q)) return true |
||||
const words = q.split(/\s+/).filter((w) => w.length > 1) |
||||
if (words.length >= 2 && words.every((w) => h.includes(w))) return true |
||||
return false |
||||
} |
||||
|
||||
/** Hex id, `note1…`, or `nevent1…` for direct citation lookup. */ |
||||
export function tryParseCitationEventIdFromQuery(query: string): string | null { |
||||
const t = query.trim() |
||||
if (/^[0-9a-f]{64}$/i.test(t)) return t.toLowerCase() |
||||
try { |
||||
const d = nip19.decode(t) |
||||
if (d.type === 'note') return d.data as string |
||||
if (d.type === 'nevent') return d.data.id |
||||
} catch { |
||||
/* ignore */ |
||||
} |
||||
return null |
||||
} |
||||
Loading…
Reference in new issue