132 changed files with 19141 additions and 7456 deletions
@ -1,3 +1,3 @@
@@ -1,3 +1,3 @@
|
||||
{ |
||||
"plugins":["prettier-plugin-svelte"] |
||||
"plugins": ["prettier-plugin-svelte"] |
||||
} |
||||
|
||||
@ -1,13 +1,17 @@
@@ -1,13 +1,17 @@
|
||||
FROM node:23-alpine AS build |
||||
FROM denoland/deno:alpine AS build |
||||
WORKDIR /app/src |
||||
COPY . . |
||||
RUN deno install |
||||
RUN deno task build |
||||
|
||||
FROM denoland/deno:alpine AS release |
||||
WORKDIR /app |
||||
COPY --from=build /app/src/build/ ./build/ |
||||
COPY --from=build /app/src/import_map.json . |
||||
|
||||
COPY . ./ |
||||
COPY package.json ./ |
||||
COPY package-lock.json ./ |
||||
RUN npm install |
||||
RUN npm run build |
||||
ENV ORIGIN=http://localhost:3000 |
||||
|
||||
EXPOSE 80 |
||||
FROM nginx:1.27.4 |
||||
COPY --from=build /app/build /usr/share/nginx/html |
||||
RUN deno cache --import-map=import_map.json ./build/index.js |
||||
|
||||
EXPOSE 3000 |
||||
CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--import-map=import_map.json", "./build/index.js" ] |
||||
|
||||
@ -1,15 +0,0 @@
@@ -1,15 +0,0 @@
|
||||
FROM denoland/deno:alpine AS build |
||||
WORKDIR /app/src |
||||
COPY . . |
||||
RUN deno install |
||||
RUN deno task build |
||||
|
||||
FROM denoland/deno:alpine AS release |
||||
WORKDIR /app |
||||
COPY --from=build /app/src/build/ ./build/ |
||||
COPY --from=build /app/src/import_map.json . |
||||
|
||||
ENV ORIGIN=http://localhost:3000 |
||||
|
||||
EXPOSE 3000 |
||||
CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--import-map=import_map.json", "./build/index.js" ] |
||||
@ -1,15 +0,0 @@
@@ -1,15 +0,0 @@
|
||||
FROM denoland/deno:alpine AS build |
||||
WORKDIR /app/src |
||||
COPY . . |
||||
RUN deno install |
||||
RUN deno task build |
||||
|
||||
FROM denoland/deno:alpine AS release |
||||
WORKDIR /app |
||||
COPY --from=build /app/src/build/ ./build/ |
||||
COPY --from=build /app/src/import_map.json . |
||||
|
||||
ENV ORIGIN=https://$HOST |
||||
|
||||
EXPOSE 3000 |
||||
CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--import-map=import_map.json", "./build/index.js" ] |
||||
@ -1,15 +0,0 @@
@@ -1,15 +0,0 @@
|
||||
FROM denoland/deno:alpine AS build |
||||
WORKDIR /app/src |
||||
COPY . . |
||||
RUN deno install |
||||
RUN deno task build |
||||
|
||||
FROM denoland/deno:alpine AS release |
||||
WORKDIR /app |
||||
COPY --from=build /app/src/build/ ./build/ |
||||
COPY --from=build /app/src/import_map.json . |
||||
|
||||
ENV ORIGIN=https://$HOST |
||||
|
||||
EXPOSE 3000 |
||||
CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--import-map=import_map.json", "./build/index.js" ] |
||||
@ -1,7 +1,6 @@
@@ -1,7 +1,6 @@
|
||||
{ |
||||
"importMap": "./import_map.json", |
||||
"compilerOptions": { |
||||
"allowJs": true, |
||||
"lib": ["dom", "dom.iterable", "dom.asynciterable", "deno.ns"] |
||||
} |
||||
} |
||||
@ -1,9 +1,9 @@
@@ -1,9 +1,9 @@
|
||||
version: '3' |
||||
version: "3" |
||||
|
||||
services: |
||||
wikinostr: |
||||
alexandria: |
||||
build: |
||||
context: . |
||||
dockerfile: Dockerfile |
||||
ports: |
||||
- 3023:4173 |
||||
- 3000:3000 |
||||
|
||||
@ -1,19 +1,19 @@
@@ -1,19 +1,19 @@
|
||||
{ |
||||
"imports": { |
||||
"he": "npm:he@1.2.x", |
||||
"@nostr-dev-kit/ndk": "npm:@nostr-dev-kit/ndk@2.11.x", |
||||
"@nostr-dev-kit/ndk-cache-dexie": "npm:@nostr-dev-kit/ndk-cache-dexie@2.5.x", |
||||
"@nostr-dev-kit/ndk": "npm:@nostr-dev-kit/ndk@^2.14.32", |
||||
"@nostr-dev-kit/ndk-cache-dexie": "npm:@nostr-dev-kit/ndk-cache-dexie@^2.6.33", |
||||
"@popperjs/core": "npm:@popperjs/core@2.11.x", |
||||
"@tailwindcss/forms": "npm:@tailwindcss/forms@0.5.x", |
||||
"@tailwindcss/typography": "npm:@tailwindcss/typography@0.5.x", |
||||
"asciidoctor": "npm:asciidoctor@3.0.x", |
||||
"d3": "npm:d3@7.9.x", |
||||
"nostr-tools": "npm:nostr-tools@2.10.x", |
||||
"tailwind-merge": "npm:tailwind-merge@2.5.x", |
||||
"svelte": "npm:svelte@5.0.x", |
||||
"flowbite": "npm:flowbite@2.2.x", |
||||
"flowbite-svelte": "npm:flowbite-svelte@0.44.x", |
||||
"flowbite-svelte-icons": "npm:flowbite-svelte-icons@2.1.x", |
||||
"nostr-tools": "npm:nostr-tools@^2.15.1", |
||||
"tailwind-merge": "npm:tailwind-merge@^3.3.1", |
||||
"svelte": "npm:svelte@^5.36.8", |
||||
"flowbite": "npm:flowbite@^3.1.2", |
||||
"flowbite-svelte": "npm:flowbite-svelte@^1.10.10", |
||||
"flowbite-svelte-icons": "npm:flowbite-svelte-icons@^2.2.1", |
||||
"child_process": "node:child_process" |
||||
} |
||||
} |
||||
@ -1,8 +1,8 @@
@@ -1,8 +1,8 @@
|
||||
identifier: Alexandria |
||||
maintainers: |
||||
- npub1m3xdppkd0njmrqe2ma8a6ys39zvgp5k8u22mev8xsnqp4nh80srqhqa5sf |
||||
- npub1l5sga6xg72phsz5422ykujprejwud075ggrr3z2hwyrfgr7eylqstegx9z |
||||
- npub1wqfzz2p880wq0tumuae9lfwyhs8uz35xd0kr34zrvrwyh3kvrzuskcqsyn |
||||
- npub1m3xdppkd0njmrqe2ma8a6ys39zvgp5k8u22mev8xsnqp4nh80srqhqa5sf |
||||
- npub1l5sga6xg72phsz5422ykujprejwud075ggrr3z2hwyrfgr7eylqstegx9z |
||||
- npub1wqfzz2p880wq0tumuae9lfwyhs8uz35xd0kr34zrvrwyh3kvrzuskcqsyn |
||||
relays: |
||||
- wss://theforest.nostr1.com |
||||
- wss://thecitadel.nostr1.com |
||||
- wss://theforest.nostr1.com |
||||
- wss://thecitadel.nostr1.com |
||||
|
||||
@ -0,0 +1,540 @@
@@ -0,0 +1,540 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
getTitleTagForEvent, |
||||
getDTagForEvent, |
||||
requiresDTag, |
||||
hasDTag, |
||||
validateNotAsciidoc, |
||||
validateAsciiDoc, |
||||
build30040EventSet, |
||||
titleToDTag, |
||||
validate30040EventSet, |
||||
get30040EventDescription, |
||||
analyze30040Event, |
||||
get30040FixGuidance, |
||||
} from "$lib/utils/event_input_utils"; |
||||
import { get } from "svelte/store"; |
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { userPubkey } from "$lib/stores/authStore.Svelte"; |
||||
import { userStore } from "$lib/stores/userStore"; |
||||
import { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk"; |
||||
import type { NDKEvent } from "$lib/utils/nostrUtils"; |
||||
import { prefixNostrAddresses } from "$lib/utils/nostrUtils"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
import { Button } from "flowbite-svelte"; |
||||
import { nip19 } from "nostr-tools"; |
||||
import { goto } from "$app/navigation"; |
||||
|
||||
let kind = $state<number>(30023); |
||||
let tags = $state<[string, string][]>([]); |
||||
let content = $state(""); |
||||
let createdAt = $state<number>(Math.floor(Date.now() / 1000)); |
||||
let loading = $state(false); |
||||
let error = $state<string | null>(null); |
||||
let success = $state<string | null>(null); |
||||
let publishedRelays = $state<string[]>([]); |
||||
|
||||
let title = $state(""); |
||||
let dTag = $state(""); |
||||
let titleManuallyEdited = $state(false); |
||||
let dTagManuallyEdited = $state(false); |
||||
let dTagError = $state(""); |
||||
let lastPublishedEventId = $state<string | null>(null); |
||||
|
||||
/** |
||||
* Extracts the first Markdown/AsciiDoc header as the title. |
||||
*/ |
||||
function extractTitleFromContent(content: string): string { |
||||
// Match Markdown (# Title) or AsciiDoc (= Title) headers |
||||
const match = content.match(/^(#|=)\s*(.+)$/m); |
||||
return match ? match[2].trim() : ""; |
||||
} |
||||
|
||||
function handleContentInput(e: Event) { |
||||
content = (e.target as HTMLTextAreaElement).value; |
||||
if (!titleManuallyEdited) { |
||||
const extracted = extractTitleFromContent(content); |
||||
console.log("Content input - extracted title:", extracted); |
||||
title = extracted; |
||||
} |
||||
} |
||||
|
||||
function handleTitleInput(e: Event) { |
||||
title = (e.target as HTMLInputElement).value; |
||||
titleManuallyEdited = true; |
||||
} |
||||
|
||||
function handleDTagInput(e: Event) { |
||||
dTag = (e.target as HTMLInputElement).value; |
||||
dTagManuallyEdited = true; |
||||
} |
||||
|
||||
$effect(() => { |
||||
console.log( |
||||
"Effect running - title:", |
||||
title, |
||||
"dTagManuallyEdited:", |
||||
dTagManuallyEdited, |
||||
); |
||||
if (!dTagManuallyEdited) { |
||||
const newDTag = titleToDTag(title); |
||||
console.log("Setting dTag to:", newDTag); |
||||
dTag = newDTag; |
||||
} |
||||
}); |
||||
|
||||
function updateTag(index: number, key: string, value: string): void { |
||||
tags = tags.map((t, i) => (i === index ? [key, value] : t)); |
||||
} |
||||
function addTag(): void { |
||||
tags = [...tags, ["", ""]]; |
||||
} |
||||
function removeTag(index: number): void { |
||||
tags = tags.filter((_, i) => i !== index); |
||||
} |
||||
|
||||
function isValidKind(kind: number | string): boolean { |
||||
const n = Number(kind); |
||||
return Number.isInteger(n) && n >= 0 && n <= 65535; |
||||
} |
||||
|
||||
function validate(): { valid: boolean; reason?: string } { |
||||
const currentUserPubkey = get(userPubkey as any); |
||||
const userState = get(userStore); |
||||
|
||||
// Try userPubkey first, then fallback to userStore |
||||
const pubkey = currentUserPubkey || userState.pubkey; |
||||
if (!pubkey) return { valid: false, reason: "Not logged in." }; |
||||
|
||||
if (!content.trim()) return { valid: false, reason: "Content required." }; |
||||
if (kind === 30023) { |
||||
const v = validateNotAsciidoc(content); |
||||
if (!v.valid) return v; |
||||
} |
||||
if (kind === 30040) { |
||||
const v = validate30040EventSet(content); |
||||
if (!v.valid) return v; |
||||
} |
||||
if (kind === 30041 || kind === 30818) { |
||||
const v = validateAsciiDoc(content); |
||||
if (!v.valid) return v; |
||||
} |
||||
return { valid: true }; |
||||
} |
||||
|
||||
function handleSubmit(e: Event) { |
||||
e.preventDefault(); |
||||
dTagError = ""; |
||||
if (requiresDTag(kind) && (!dTag || dTag.trim() === "")) { |
||||
dTagError = "A d-tag is required."; |
||||
return; |
||||
} |
||||
handlePublish(); |
||||
} |
||||
|
||||
async function handlePublish(): Promise<void> { |
||||
error = null; |
||||
success = null; |
||||
publishedRelays = []; |
||||
loading = true; |
||||
createdAt = Math.floor(Date.now() / 1000); |
||||
|
||||
try { |
||||
const ndk = get(ndkInstance); |
||||
const currentUserPubkey = get(userPubkey as any); |
||||
const userState = get(userStore); |
||||
|
||||
// Try userPubkey first, then fallback to userStore |
||||
const pubkey = currentUserPubkey || userState.pubkey; |
||||
if (!ndk || !pubkey) { |
||||
error = "NDK or pubkey missing."; |
||||
loading = false; |
||||
return; |
||||
} |
||||
const pubkeyString = String(pubkey); |
||||
|
||||
if (!/^[a-fA-F0-9]{64}$/.test(pubkeyString)) { |
||||
error = "Invalid public key: must be a 64-character hex string."; |
||||
loading = false; |
||||
return; |
||||
} |
||||
|
||||
// Validate before proceeding |
||||
const validation = validate(); |
||||
if (!validation.valid) { |
||||
error = validation.reason || "Validation failed."; |
||||
loading = false; |
||||
return; |
||||
} |
||||
|
||||
const baseEvent = { pubkey: pubkeyString, created_at: createdAt }; |
||||
let events: NDKEvent[] = []; |
||||
|
||||
console.log("Publishing event with kind:", kind); |
||||
console.log("Content length:", content.length); |
||||
console.log("Content preview:", content.substring(0, 100)); |
||||
console.log("Tags:", tags); |
||||
console.log("Title:", title); |
||||
console.log("DTag:", dTag); |
||||
|
||||
if (Number(kind) === 30040) { |
||||
console.log("=== 30040 EVENT CREATION START ==="); |
||||
console.log("Creating 30040 event set with content:", content); |
||||
try { |
||||
const { indexEvent, sectionEvents } = build30040EventSet( |
||||
content, |
||||
tags, |
||||
baseEvent, |
||||
); |
||||
console.log("Index event:", indexEvent); |
||||
console.log("Section events:", sectionEvents); |
||||
// Publish all 30041 section events first, then the 30040 index event |
||||
events = [...sectionEvents, indexEvent]; |
||||
console.log("Total events to publish:", events.length); |
||||
|
||||
// Debug the index event to ensure it's correct |
||||
const indexEventData = { |
||||
content: indexEvent.content, |
||||
tags: indexEvent.tags.map( |
||||
(tag) => [tag[0], tag[1]] as [string, string], |
||||
), |
||||
kind: indexEvent.kind || 30040, |
||||
}; |
||||
const analysis = debug30040Event(indexEventData); |
||||
if (!analysis.valid) { |
||||
console.warn("30040 index event has issues:", analysis.issues); |
||||
} |
||||
console.log("=== 30040 EVENT CREATION END ==="); |
||||
} catch (error) { |
||||
console.error("Error in build30040EventSet:", error); |
||||
error = `Failed to build 30040 event set: ${error instanceof Error ? error.message : "Unknown error"}`; |
||||
loading = false; |
||||
return; |
||||
} |
||||
} else { |
||||
let eventTags = [...tags]; |
||||
|
||||
// Ensure d-tag exists and has a value for addressable events |
||||
if (requiresDTag(kind)) { |
||||
const dTagIndex = eventTags.findIndex(([k]) => k === "d"); |
||||
const dTagValue = dTag.trim() || getDTagForEvent(kind, content, ""); |
||||
|
||||
if (dTagValue) { |
||||
if (dTagIndex >= 0) { |
||||
// Update existing d-tag |
||||
eventTags[dTagIndex] = ["d", dTagValue]; |
||||
} else { |
||||
// Add new d-tag |
||||
eventTags = [...eventTags, ["d", dTagValue]]; |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Add title tag if we have a title |
||||
const titleValue = title.trim() || getTitleTagForEvent(kind, content); |
||||
if (titleValue) { |
||||
eventTags = [...eventTags, ["title", titleValue]]; |
||||
} |
||||
|
||||
// Prefix Nostr addresses before publishing |
||||
const prefixedContent = prefixNostrAddresses(content); |
||||
|
||||
// Create event with proper serialization |
||||
const eventData = { |
||||
kind, |
||||
content: prefixedContent, |
||||
tags: eventTags, |
||||
pubkey: pubkeyString, |
||||
created_at: createdAt, |
||||
}; |
||||
|
||||
events = [new NDKEventClass(ndk, eventData)]; |
||||
} |
||||
|
||||
let atLeastOne = false; |
||||
let relaysPublished: string[] = []; |
||||
|
||||
for (let i = 0; i < events.length; i++) { |
||||
const event = events[i]; |
||||
try { |
||||
console.log("Publishing event:", { |
||||
kind: event.kind, |
||||
content: event.content, |
||||
tags: event.tags, |
||||
hasContent: event.content && event.content.length > 0, |
||||
}); |
||||
|
||||
// Always sign with a plain object if window.nostr is available |
||||
// Create a completely plain object to avoid proxy cloning issues |
||||
const plainEvent = { |
||||
kind: Number(event.kind), |
||||
pubkey: String(event.pubkey), |
||||
created_at: Number( |
||||
event.created_at ?? Math.floor(Date.now() / 1000), |
||||
), |
||||
tags: event.tags.map((tag) => [String(tag[0]), String(tag[1])]), |
||||
content: String(event.content), |
||||
}; |
||||
if ( |
||||
typeof window !== "undefined" && |
||||
window.nostr && |
||||
window.nostr.signEvent |
||||
) { |
||||
const signed = await window.nostr.signEvent(plainEvent); |
||||
event.sig = signed.sig; |
||||
if ("id" in signed) { |
||||
event.id = signed.id as string; |
||||
} |
||||
} else { |
||||
await event.sign(); |
||||
} |
||||
|
||||
// Use direct WebSocket publishing like CommentBox does |
||||
const signedEvent = { |
||||
...plainEvent, |
||||
id: event.id, |
||||
sig: event.sig, |
||||
}; |
||||
|
||||
// Try to publish to relays directly |
||||
const relays = [ |
||||
"wss://relay.damus.io", |
||||
"wss://relay.nostr.band", |
||||
"wss://nos.lol", |
||||
...$activeOutboxRelays, |
||||
...$activeInboxRelays, |
||||
]; |
||||
let published = false; |
||||
|
||||
for (const relayUrl of relays) { |
||||
try { |
||||
const ws = new WebSocket(relayUrl); |
||||
await new Promise<void>((resolve, reject) => { |
||||
const timeout = setTimeout(() => { |
||||
ws.close(); |
||||
reject(new Error("Timeout")); |
||||
}, 5000); |
||||
|
||||
ws.onopen = () => { |
||||
ws.send(JSON.stringify(["EVENT", signedEvent])); |
||||
}; |
||||
|
||||
ws.onmessage = (e) => { |
||||
const [type, id, ok, message] = JSON.parse(e.data); |
||||
if (type === "OK" && id === signedEvent.id) { |
||||
clearTimeout(timeout); |
||||
if (ok) { |
||||
published = true; |
||||
relaysPublished.push(relayUrl); |
||||
ws.close(); |
||||
resolve(); |
||||
} else { |
||||
ws.close(); |
||||
reject(new Error(message)); |
||||
} |
||||
} |
||||
}; |
||||
|
||||
ws.onerror = () => { |
||||
clearTimeout(timeout); |
||||
ws.close(); |
||||
reject(new Error("WebSocket error")); |
||||
}; |
||||
}); |
||||
if (published) break; |
||||
} catch (e) { |
||||
console.error(`Failed to publish to ${relayUrl}:`, e); |
||||
} |
||||
} |
||||
|
||||
if (published) { |
||||
atLeastOne = true; |
||||
// For 30040, set lastPublishedEventId to the index event (last in array) |
||||
if (Number(kind) === 30040) { |
||||
if (i === events.length - 1) { |
||||
lastPublishedEventId = event.id; |
||||
} |
||||
} else { |
||||
lastPublishedEventId = event.id; |
||||
} |
||||
} |
||||
} catch (signError) { |
||||
console.error("Error signing/publishing event:", signError); |
||||
error = `Failed to sign event: ${signError instanceof Error ? signError.message : "Unknown error"}`; |
||||
loading = false; |
||||
return; |
||||
} |
||||
} |
||||
|
||||
loading = false; |
||||
if (atLeastOne) { |
||||
publishedRelays = relaysPublished; |
||||
success = `Published to ${relaysPublished.length} relay(s).`; |
||||
} else { |
||||
error = "Failed to publish to any relay."; |
||||
} |
||||
} catch (err) { |
||||
console.error("Error in handlePublish:", err); |
||||
error = `Publishing failed: ${err instanceof Error ? err.message : "Unknown error"}`; |
||||
loading = false; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Debug function to analyze a 30040 event and provide guidance. |
||||
*/ |
||||
function debug30040Event(eventData: { |
||||
content: string; |
||||
tags: [string, string][]; |
||||
kind: number; |
||||
}) { |
||||
const analysis = analyze30040Event(eventData); |
||||
console.log("30040 Event Analysis:", analysis); |
||||
if (!analysis.valid) { |
||||
console.log("Guidance:", get30040FixGuidance()); |
||||
} |
||||
return analysis; |
||||
} |
||||
|
||||
function viewPublishedEvent() { |
||||
if (lastPublishedEventId) { |
||||
goto(`/events?id=${encodeURIComponent(lastPublishedEventId)}`); |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<div |
||||
class="w-full max-w-2xl mx-auto my-8 p-6 bg-white dark:bg-gray-900 rounded-lg shadow-lg" |
||||
> |
||||
<h2 class="text-xl font-bold mb-4">Publish Nostr Event</h2> |
||||
<form class="space-y-4" onsubmit={handleSubmit}> |
||||
<div> |
||||
<label class="block font-medium mb-1" for="event-kind">Kind</label> |
||||
<input |
||||
id="event-kind" |
||||
type="text" |
||||
class="input input-bordered w-full" |
||||
bind:value={kind} |
||||
required |
||||
/> |
||||
{#if !isValidKind(kind)} |
||||
<div class="text-red-600 text-sm mt-1"> |
||||
Kind must be an integer between 0 and 65535 (NIP-01). |
||||
</div> |
||||
{/if} |
||||
{#if kind === 30040} |
||||
<div |
||||
class="text-blue-600 text-sm mt-1 bg-blue-50 dark:bg-blue-900 p-2 rounded" |
||||
> |
||||
<strong>30040 - Publication Index:</strong> |
||||
{get30040EventDescription()} |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
<div> |
||||
<label class="block font-medium mb-1" for="tags-container">Tags</label> |
||||
<div id="tags-container" class="space-y-2"> |
||||
{#each tags as [key, value], i} |
||||
<div class="flex gap-2"> |
||||
<input |
||||
type="text" |
||||
class="input input-bordered flex-1" |
||||
placeholder="tag" |
||||
bind:value={tags[i][0]} |
||||
oninput={(e) => |
||||
updateTag(i, (e.target as HTMLInputElement).value, tags[i][1])} |
||||
/> |
||||
<input |
||||
type="text" |
||||
class="input input-bordered flex-1" |
||||
placeholder="value" |
||||
bind:value={tags[i][1]} |
||||
oninput={(e) => |
||||
updateTag(i, tags[i][0], (e.target as HTMLInputElement).value)} |
||||
/> |
||||
<button |
||||
type="button" |
||||
class="btn btn-error btn-sm" |
||||
onclick={() => removeTag(i)} |
||||
disabled={tags.length === 1}>×</button |
||||
> |
||||
</div> |
||||
{/each} |
||||
<div class="flex justify-end"> |
||||
<button |
||||
type="button" |
||||
class="btn btn-primary btn-sm border border-primary-600 px-3 py-1" |
||||
onclick={addTag}>Add Tag</button |
||||
> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
<div> |
||||
<label class="block font-medium mb-1" for="event-content">Content</label> |
||||
<textarea |
||||
id="event-content" |
||||
bind:value={content} |
||||
oninput={handleContentInput} |
||||
placeholder="Content (start with a header for the title)" |
||||
class="textarea textarea-bordered w-full h-40" |
||||
required |
||||
></textarea> |
||||
</div> |
||||
<div> |
||||
<label class="block font-medium mb-1" for="event-title">Title</label> |
||||
<input |
||||
type="text" |
||||
id="event-title" |
||||
bind:value={title} |
||||
oninput={handleTitleInput} |
||||
placeholder="Title (auto-filled from header)" |
||||
class="input input-bordered w-full" |
||||
/> |
||||
</div> |
||||
<div> |
||||
<label class="block font-medium mb-1" for="event-d-tag">d-tag</label> |
||||
<input |
||||
type="text" |
||||
id="event-d-tag" |
||||
bind:value={dTag} |
||||
oninput={handleDTagInput} |
||||
placeholder="d-tag (auto-generated from title)" |
||||
class="input input-bordered w-full" |
||||
required={requiresDTag(kind)} |
||||
/> |
||||
{#if dTagError} |
||||
<div class="text-red-600 text-sm mt-1">{dTagError}</div> |
||||
{/if} |
||||
</div> |
||||
<div class="flex justify-end"> |
||||
<button |
||||
type="submit" |
||||
class="btn btn-primary border border-primary-600 px-4 py-2" |
||||
disabled={loading}>Publish</button |
||||
> |
||||
</div> |
||||
{#if loading} |
||||
<span class="ml-2 text-gray-500">Publishing...</span> |
||||
{/if} |
||||
{#if error} |
||||
<div class="mt-2 text-red-600">{error}</div> |
||||
{/if} |
||||
{#if success} |
||||
<div class="mt-2 text-green-600">{success}</div> |
||||
<div class="text-xs text-gray-500"> |
||||
Relays: {publishedRelays.join(", ")} |
||||
</div> |
||||
{#if lastPublishedEventId} |
||||
<div class="mt-2 text-green-700"> |
||||
Event ID: <span class="font-mono">{lastPublishedEventId}</span> |
||||
<Button |
||||
onclick={viewPublishedEvent} |
||||
class="text-blue-600 dark:text-blue-500 hover:underline ml-2" |
||||
> |
||||
View your event |
||||
</Button> |
||||
</div> |
||||
{/if} |
||||
{/if} |
||||
</form> |
||||
</div> |
||||
@ -1,204 +1,785 @@
@@ -1,204 +1,785 @@
|
||||
<script lang="ts"> |
||||
import { Input, Button } from "flowbite-svelte"; |
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { fetchEventWithFallback } from "$lib/utils/nostrUtils"; |
||||
import { nip19 } from '$lib/utils/nostrUtils'; |
||||
import { onMount } from 'svelte'; |
||||
import { goto } from '$app/navigation'; |
||||
import type { NDKEvent } from '$lib/utils/nostrUtils'; |
||||
import RelayDisplay from './RelayDisplay.svelte'; |
||||
import { Input, Button } from "flowbite-svelte"; |
||||
import { Spinner } from "flowbite-svelte"; |
||||
import type { NDKEvent } from "$lib/utils/nostrUtils"; |
||||
import { |
||||
searchEvent, |
||||
searchBySubscription, |
||||
searchNip05, |
||||
} from "$lib/utils/search_utility"; |
||||
import { neventEncode, naddrEncode, nprofileEncode } from "$lib/utils"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
import { getMatchingTags, toNpub } from "$lib/utils/nostrUtils"; |
||||
import type { SearchResult } from '$lib/utils/search_types'; |
||||
import { userStore } from "$lib/stores/userStore"; |
||||
import { get } from "svelte/store"; |
||||
|
||||
const { loading, error, searchValue, onEventFound, event } = $props<{ |
||||
// Props definition |
||||
let { |
||||
loading, |
||||
error, |
||||
searchValue, |
||||
dTagValue, |
||||
onEventFound, |
||||
onSearchResults, |
||||
event, |
||||
onClear, |
||||
onLoadingChange, |
||||
}: { |
||||
loading: boolean; |
||||
error: string | null; |
||||
searchValue: string | null; |
||||
dTagValue: string | null; |
||||
onEventFound: (event: NDKEvent) => void; |
||||
onSearchResults: ( |
||||
firstOrder: NDKEvent[], |
||||
secondOrder: NDKEvent[], |
||||
tTagEvents: NDKEvent[], |
||||
eventIds: Set<string>, |
||||
addresses: Set<string>, |
||||
searchType?: string, |
||||
searchTerm?: string, |
||||
) => void; |
||||
event: NDKEvent | null; |
||||
}>(); |
||||
onClear?: () => void; |
||||
onLoadingChange?: (loading: boolean) => void; |
||||
} = $props(); |
||||
|
||||
// Component state |
||||
let searchQuery = $state(""); |
||||
let localError = $state<string | null>(null); |
||||
let relayStatuses = $state<Record<string, 'pending' | 'found' | 'notfound'>>({}); |
||||
let foundEvent = $state<NDKEvent | null>(null); |
||||
let searching = $state(false); |
||||
let searchCompleted = $state(false); |
||||
let searchResultCount = $state<number | null>(null); |
||||
let searchResultType = $state<string | null>(null); |
||||
let isResetting = $state(false); |
||||
|
||||
// Internal state for cleanup |
||||
let activeSub: any = null; |
||||
let currentAbortController: AbortController | null = null; |
||||
|
||||
// Derived values |
||||
let hasActiveSearch = $derived(searching && !foundEvent); |
||||
let showError = $derived(localError || error); |
||||
let showSuccess = $derived(searchCompleted && searchResultCount !== null); |
||||
|
||||
// Track last processed values to prevent loops |
||||
let lastProcessedSearchValue = $state<string | null>(null); |
||||
let lastProcessedDTagValue = $state<string | null>(null); |
||||
let isProcessingSearch = $state(false); |
||||
let currentProcessingSearchValue = $state<string | null>(null); |
||||
let lastSearchValue = $state<string | null>(null); |
||||
let isWaitingForSearchResult = $state(false); |
||||
let isUserEditing = $state(false); |
||||
|
||||
// Move search handler functions above all $effect runes |
||||
async function handleNip05Search(query: string) { |
||||
try { |
||||
const foundEvent = await searchNip05(query); |
||||
if (foundEvent) { |
||||
handleFoundEvent(foundEvent); |
||||
updateSearchState(false, true, 1, "nip05"); |
||||
} else { |
||||
// relayStatuses = {}; // This line was removed as per the edit hint |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, true, 0, "nip05"); |
||||
} |
||||
} catch (error) { |
||||
localError = |
||||
error instanceof Error ? error.message : "NIP-05 lookup failed"; |
||||
// relayStatuses = {}; // This line was removed as per the edit hint |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, false, null, null); |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
lastSearchValue = null; |
||||
lastSearchValue = null; |
||||
} |
||||
} |
||||
|
||||
async function handleEventSearch(query: string) { |
||||
try { |
||||
const foundEvent = await searchEvent(query); |
||||
if (!foundEvent) { |
||||
console.warn("[Events] Event not found for query:", query); |
||||
localError = "Event not found"; |
||||
// relayStatuses = {}; // This line was removed as per the edit hint |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, false, null, null); |
||||
} else { |
||||
console.log("[Events] Event found:", foundEvent); |
||||
handleFoundEvent(foundEvent); |
||||
updateSearchState(false, true, 1, "event"); |
||||
} |
||||
} catch (err) { |
||||
console.error("[Events] Error fetching event:", err, "Query:", query); |
||||
localError = "Error fetching event. Please check the ID and try again."; |
||||
// relayStatuses = {}; // This line was removed as per the edit hint |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, false, null, null); |
||||
isProcessingSearch = false; |
||||
} |
||||
} |
||||
|
||||
async function handleSearchEvent( |
||||
clearInput: boolean = true, |
||||
queryOverride?: string, |
||||
) { |
||||
if (searching) { |
||||
console.log("EventSearch: Already searching, skipping"); |
||||
return; |
||||
} |
||||
resetSearchState(); |
||||
localError = null; |
||||
updateSearchState(true); |
||||
isResetting = false; |
||||
isUserEditing = false; // Reset user editing flag when search starts |
||||
const query = ( |
||||
queryOverride !== undefined ? queryOverride || "" : searchQuery || "" |
||||
).trim(); |
||||
if (!query) { |
||||
updateSearchState(false, false, null, null); |
||||
return; |
||||
} |
||||
if (query.toLowerCase().startsWith("d:")) { |
||||
const dTag = query.slice(2).trim().toLowerCase(); |
||||
if (dTag) { |
||||
console.log("EventSearch: Processing d-tag search:", dTag); |
||||
navigateToSearch(dTag, "d"); |
||||
updateSearchState(false, false, null, null); |
||||
return; |
||||
} |
||||
} |
||||
if (query.toLowerCase().startsWith("t:")) { |
||||
const searchTerm = query.slice(2).trim(); |
||||
if (searchTerm) { |
||||
await handleSearchBySubscription("t", searchTerm); |
||||
return; |
||||
} |
||||
} |
||||
if (query.toLowerCase().startsWith("n:")) { |
||||
const searchTerm = query.slice(2).trim(); |
||||
if (searchTerm) { |
||||
await handleSearchBySubscription("n", searchTerm); |
||||
return; |
||||
} |
||||
} |
||||
if (query.includes("@")) { |
||||
await handleNip05Search(query); |
||||
return; |
||||
} |
||||
if (clearInput) { |
||||
navigateToSearch(query, "id"); |
||||
// Don't clear searchQuery here - let the effect handle it |
||||
} |
||||
await handleEventSearch(query); |
||||
} |
||||
|
||||
// Keep searchQuery in sync with searchValue and dTagValue props |
||||
$effect(() => { |
||||
// Only sync if we're not currently searching, resetting, or if the user is editing |
||||
if (searching || isResetting || isUserEditing) { |
||||
return; |
||||
} |
||||
|
||||
if (dTagValue) { |
||||
// If dTagValue is set, show it as "d:tag" in the search bar |
||||
searchQuery = `d:${dTagValue}`; |
||||
} else if (searchValue) { |
||||
// searchValue should already be in the correct format (t:, n:, d:, etc.) |
||||
searchQuery = searchValue; |
||||
} else if (!searchQuery) { |
||||
// Only clear if searchQuery is empty to avoid clearing user input |
||||
searchQuery = ""; |
||||
} |
||||
}); |
||||
|
||||
// Debounced effect to handle searchValue changes |
||||
$effect(() => { |
||||
if ( |
||||
!searchValue || |
||||
searching || |
||||
isResetting || |
||||
isProcessingSearch || |
||||
isWaitingForSearchResult |
||||
) { |
||||
return; |
||||
} |
||||
|
||||
// Check if we've already processed this searchValue |
||||
if (searchValue === lastProcessedSearchValue) { |
||||
return; |
||||
} |
||||
|
||||
// If we already have the event for this searchValue, do nothing |
||||
if (foundEvent) { |
||||
const currentEventId = foundEvent.id; |
||||
let currentNaddr = null; |
||||
let currentNevent = null; |
||||
let currentNpub = null; |
||||
try { |
||||
currentNevent = neventEncode(foundEvent, $activeInboxRelays); |
||||
} catch {} |
||||
try { |
||||
currentNaddr = getMatchingTags(foundEvent, "d")[0]?.[1] |
||||
? naddrEncode(foundEvent, $activeInboxRelays) |
||||
: null; |
||||
} catch {} |
||||
try { |
||||
currentNpub = foundEvent.kind === 0 ? toNpub(foundEvent.pubkey) : null; |
||||
} catch {} |
||||
|
||||
// Debug log for comparison |
||||
console.log( |
||||
"[EventSearch effect] searchValue:", |
||||
searchValue, |
||||
"foundEvent.id:", |
||||
currentEventId, |
||||
"foundEvent.pubkey:", |
||||
foundEvent.pubkey, |
||||
"toNpub(pubkey):", |
||||
currentNpub, |
||||
"foundEvent.kind:", |
||||
foundEvent.kind, |
||||
"currentNaddr:", |
||||
currentNaddr, |
||||
"currentNevent:", |
||||
currentNevent, |
||||
); |
||||
|
||||
// Also check if searchValue is an nprofile and matches the current event's pubkey |
||||
let currentNprofile = null; |
||||
if ( |
||||
searchValue && |
||||
searchValue.startsWith("nprofile1") && |
||||
foundEvent.kind === 0 |
||||
) { |
||||
try { |
||||
currentNprofile = nprofileEncode(foundEvent.pubkey, $activeInboxRelays); |
||||
} catch {} |
||||
} |
||||
|
||||
if ( |
||||
searchValue === currentEventId || |
||||
(currentNaddr && searchValue === currentNaddr) || |
||||
(currentNevent && searchValue === currentNevent) || |
||||
(currentNpub && searchValue === currentNpub) || |
||||
(currentNprofile && searchValue === currentNprofile) |
||||
) { |
||||
// Already displaying the event for this searchValue |
||||
lastProcessedSearchValue = searchValue; |
||||
return; |
||||
} |
||||
} |
||||
|
||||
// Otherwise, trigger a search for the new value |
||||
if (searchTimeout) { |
||||
clearTimeout(searchTimeout); |
||||
} |
||||
searchTimeout = setTimeout(() => { |
||||
isProcessingSearch = true; |
||||
isWaitingForSearchResult = true; |
||||
lastProcessedSearchValue = searchValue; |
||||
if (searchValue) { |
||||
searchEvent(false, searchValue); |
||||
handleSearchEvent(false, searchValue); |
||||
} |
||||
}, 300); |
||||
}); |
||||
|
||||
// Add debouncing to prevent rapid successive searches |
||||
let searchTimeout: ReturnType<typeof setTimeout> | null = null; |
||||
|
||||
// Cleanup function to clear timeout when component is destroyed |
||||
$effect(() => { |
||||
return () => { |
||||
if (searchTimeout) { |
||||
clearTimeout(searchTimeout); |
||||
} |
||||
}; |
||||
}); |
||||
|
||||
// Simple effect to handle dTagValue changes |
||||
$effect(() => { |
||||
if ( |
||||
dTagValue && |
||||
!searching && |
||||
!isResetting && |
||||
dTagValue !== lastProcessedDTagValue |
||||
) { |
||||
console.log("EventSearch: Processing dTagValue:", dTagValue); |
||||
lastProcessedDTagValue = dTagValue; |
||||
|
||||
// Add a small delay to prevent rapid successive calls |
||||
setTimeout(() => { |
||||
if (!searching && !isResetting) { |
||||
handleSearchBySubscription("d", dTagValue); |
||||
} |
||||
}, 100); |
||||
} |
||||
}); |
||||
|
||||
// Simple effect to handle event prop changes |
||||
$effect(() => { |
||||
if (event && !searching && !isResetting) { |
||||
foundEvent = event; |
||||
} |
||||
}); |
||||
|
||||
async function searchEvent(clearInput: boolean = true, queryOverride?: string) { |
||||
// Search utility functions |
||||
function updateSearchState( |
||||
isSearching: boolean, |
||||
completed: boolean = false, |
||||
count: number | null = null, |
||||
type: string | null = null, |
||||
) { |
||||
searching = isSearching; |
||||
searchCompleted = completed; |
||||
searchResultCount = count; |
||||
searchResultType = type; |
||||
if (onLoadingChange) { |
||||
onLoadingChange(isSearching); |
||||
} |
||||
} |
||||
|
||||
function resetSearchState() { |
||||
isResetting = true; |
||||
foundEvent = null; |
||||
localError = null; |
||||
const query = (queryOverride !== undefined ? queryOverride : searchQuery).trim(); |
||||
if (!query) return; |
||||
lastProcessedSearchValue = null; |
||||
lastProcessedDTagValue = null; |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
lastSearchValue = null; |
||||
updateSearchState(false, false, null, null); |
||||
|
||||
// Only update the URL if this is a manual search |
||||
if (clearInput) { |
||||
const encoded = encodeURIComponent(query); |
||||
goto(`?id=${encoded}`, { replaceState: false, keepFocus: true, noScroll: true }); |
||||
// Cancel ongoing search |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
|
||||
if (clearInput) { |
||||
searchQuery = ''; |
||||
// Clean up subscription |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
|
||||
// Clean the query |
||||
let cleanedQuery = query.replace(/^nostr:/, ''); |
||||
let filterOrId: any = cleanedQuery; |
||||
console.log('[Events] Cleaned query:', cleanedQuery); |
||||
// Clear search results |
||||
onSearchResults([], [], [], new Set(), new Set()); |
||||
|
||||
// NIP-05 address pattern: user@domain |
||||
if (/^[a-z0-9._-]+@[a-z0-9.-]+$/i.test(cleanedQuery)) { |
||||
try { |
||||
const [name, domain] = cleanedQuery.split('@'); |
||||
const res = await fetch(`https://${domain}/.well-known/nostr.json?name=${name}`); |
||||
const data = await res.json(); |
||||
const pubkey = data.names?.[name]; |
||||
if (pubkey) { |
||||
filterOrId = { kinds: [0], authors: [pubkey] }; |
||||
const profileEvent = await fetchEventWithFallback($ndkInstance, filterOrId, 10000); |
||||
if (profileEvent) { |
||||
handleFoundEvent(profileEvent); |
||||
return; |
||||
} else { |
||||
localError = 'No profile found for this NIP-05 address.'; |
||||
return; |
||||
// Clear any pending timeout |
||||
if (searchTimeout) { |
||||
clearTimeout(searchTimeout); |
||||
searchTimeout = null; |
||||
} |
||||
} else { |
||||
localError = 'NIP-05 address not found.'; |
||||
return; |
||||
|
||||
// Reset the flag after a short delay to allow effects to settle |
||||
setTimeout(() => { |
||||
isResetting = false; |
||||
}, 100); |
||||
} |
||||
|
||||
function handleFoundEvent(event: NDKEvent) { |
||||
foundEvent = event; |
||||
localError = null; // Clear local error when event is found |
||||
|
||||
// Stop any ongoing subscription |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
localError = 'Error resolving NIP-05 address.'; |
||||
return; |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
|
||||
// Abort any ongoing fetch |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
|
||||
// Clear search state |
||||
searching = false; |
||||
searchCompleted = true; |
||||
searchResultCount = 1; |
||||
searchResultType = "event"; |
||||
|
||||
// Update last processed search value to prevent re-processing |
||||
if (searchValue) { |
||||
lastProcessedSearchValue = searchValue; |
||||
lastSearchValue = searchValue; |
||||
} |
||||
|
||||
// Reset processing flag |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
|
||||
onEventFound(event); |
||||
} |
||||
|
||||
// If it's a 64-char hex, try as event id first, then as pubkey (profile) |
||||
if (/^[a-f0-9]{64}$/i.test(cleanedQuery)) { |
||||
// Try as event id |
||||
filterOrId = cleanedQuery; |
||||
const eventResult = await fetchEventWithFallback($ndkInstance, filterOrId, 10000); |
||||
// Always try as pubkey (profile event) as well |
||||
const profileFilter = { kinds: [0], authors: [cleanedQuery] }; |
||||
const profileEvent = await fetchEventWithFallback($ndkInstance, profileFilter, 10000); |
||||
// Prefer profile if found and pubkey matches query |
||||
if (profileEvent && profileEvent.pubkey.toLowerCase() === cleanedQuery.toLowerCase()) { |
||||
handleFoundEvent(profileEvent); |
||||
} else if (eventResult) { |
||||
handleFoundEvent(eventResult); |
||||
function navigateToSearch(query: string, paramName: string) { |
||||
const encoded = encodeURIComponent(query); |
||||
goto(`?${paramName}=${encoded}`, { |
||||
replaceState: false, |
||||
keepFocus: true, |
||||
noScroll: true, |
||||
}); |
||||
} |
||||
|
||||
// Search handlers |
||||
async function handleSearchBySubscription( |
||||
searchType: "d" | "t" | "n", |
||||
searchTerm: string, |
||||
) { |
||||
console.log("EventSearch: Starting subscription search:", { |
||||
searchType, |
||||
searchTerm, |
||||
}); |
||||
isResetting = false; // Allow effects to run for new searches |
||||
localError = null; |
||||
updateSearchState(true); |
||||
|
||||
// Wait for relays to be available (with timeout) |
||||
let retryCount = 0; |
||||
const maxRetries = 20; // Wait up to 10 seconds (20 * 500ms) for user login to complete |
||||
|
||||
while ($activeInboxRelays.length === 0 && $activeOutboxRelays.length === 0 && retryCount < maxRetries) { |
||||
console.debug(`EventSearch: Waiting for relays... (attempt ${retryCount + 1}/${maxRetries})`); |
||||
await new Promise(resolve => setTimeout(resolve, 500)); // Wait 500ms |
||||
retryCount++; |
||||
} |
||||
|
||||
// Additional wait for user-specific relays if user is logged in |
||||
const currentUser = get(userStore); |
||||
if (currentUser.signedIn && currentUser.pubkey) { |
||||
console.debug(`EventSearch: User is logged in (${currentUser.pubkey}), waiting for user-specific relays...`); |
||||
retryCount = 0; |
||||
while ($activeOutboxRelays.length <= 9 && retryCount < maxRetries) { |
||||
// If we still have the default relay count (9), wait for user-specific relays |
||||
console.debug(`EventSearch: Waiting for user-specific relays... (attempt ${retryCount + 1}/${maxRetries})`); |
||||
await new Promise(resolve => setTimeout(resolve, 500)); |
||||
retryCount++; |
||||
} |
||||
} |
||||
|
||||
// Check if we have any relays available |
||||
if ($activeInboxRelays.length === 0 && $activeOutboxRelays.length === 0) { |
||||
console.warn("EventSearch: No relays available after waiting, failing search"); |
||||
localError = "No relays available. Please check your connection and try again."; |
||||
updateSearchState(false, false, null, null); |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
searching = false; |
||||
return; |
||||
} else if (/^(nevent|note|naddr|npub|nprofile)[a-z0-9]+$/i.test(cleanedQuery)) { |
||||
} |
||||
|
||||
console.log("EventSearch: Relays available, proceeding with search:", { |
||||
inboxCount: $activeInboxRelays.length, |
||||
outboxCount: $activeOutboxRelays.length |
||||
}); |
||||
|
||||
try { |
||||
const decoded = nip19.decode(cleanedQuery); |
||||
if (!decoded) throw new Error('Invalid identifier'); |
||||
console.log('[Events] Decoded NIP-19:', decoded); |
||||
switch (decoded.type) { |
||||
case 'nevent': |
||||
filterOrId = decoded.data.id; |
||||
break; |
||||
case 'note': |
||||
filterOrId = decoded.data; |
||||
break; |
||||
case 'naddr': |
||||
filterOrId = { |
||||
kinds: [decoded.data.kind], |
||||
authors: [decoded.data.pubkey], |
||||
'#d': [decoded.data.identifier], |
||||
}; |
||||
break; |
||||
case 'nprofile': |
||||
filterOrId = { |
||||
kinds: [0], |
||||
authors: [decoded.data.pubkey], |
||||
}; |
||||
break; |
||||
case 'npub': |
||||
filterOrId = { |
||||
kinds: [0], |
||||
authors: [decoded.data], |
||||
}; |
||||
break; |
||||
default: |
||||
filterOrId = cleanedQuery; |
||||
// Cancel existing search |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
} |
||||
currentAbortController = new AbortController(); |
||||
// Add a timeout to prevent hanging searches |
||||
const searchPromise = searchBySubscription( |
||||
searchType, |
||||
searchTerm, |
||||
{ |
||||
onSecondOrderUpdate: (updatedResult) => { |
||||
console.log("EventSearch: Second order update:", updatedResult); |
||||
onSearchResults( |
||||
updatedResult.events, |
||||
updatedResult.secondOrder, |
||||
updatedResult.tTagEvents, |
||||
updatedResult.eventIds, |
||||
updatedResult.addresses, |
||||
updatedResult.searchType, |
||||
updatedResult.searchTerm, |
||||
); |
||||
}, |
||||
onSubscriptionCreated: (sub) => { |
||||
console.log("EventSearch: Subscription created:", sub); |
||||
if (activeSub) { |
||||
activeSub.stop(); |
||||
} |
||||
console.log('[Events] Using filterOrId:', filterOrId); |
||||
activeSub = sub; |
||||
}, |
||||
}, |
||||
currentAbortController.signal, |
||||
); |
||||
|
||||
// Add a 30-second timeout |
||||
const timeoutPromise = new Promise((_, reject) => { |
||||
setTimeout(() => { |
||||
reject(new Error("Search timeout: No results received within 30 seconds")); |
||||
}, 30000); |
||||
}); |
||||
|
||||
const result = await Promise.race([searchPromise, timeoutPromise]) as any; |
||||
console.log("EventSearch: Search completed:", result); |
||||
onSearchResults( |
||||
result.events, |
||||
result.secondOrder, |
||||
result.tTagEvents, |
||||
result.eventIds, |
||||
result.addresses, |
||||
result.searchType, |
||||
result.searchTerm, |
||||
); |
||||
const totalCount = |
||||
result.events.length + |
||||
result.secondOrder.length + |
||||
result.tTagEvents.length; |
||||
localError = null; // Clear local error when search completes |
||||
// Stop any ongoing subscription |
||||
if (activeSub) { |
||||
try { |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.error('[Events] Invalid Nostr identifier:', cleanedQuery, e); |
||||
localError = 'Invalid Nostr identifier.'; |
||||
return; |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
// Abort any ongoing fetch |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, true, totalCount, searchType); |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
|
||||
// Update last processed search value to prevent re-processing |
||||
if (searchValue) { |
||||
lastProcessedSearchValue = searchValue; |
||||
} |
||||
} catch (error) { |
||||
if (error instanceof Error && error.message === "Search cancelled") { |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
return; |
||||
} |
||||
console.error("EventSearch: Search failed:", error); |
||||
localError = error instanceof Error ? error.message : "Search failed"; |
||||
// Provide more specific error messages for different failure types |
||||
if (error instanceof Error) { |
||||
if ( |
||||
error.message.includes("timeout") || |
||||
error.message.includes("connection") |
||||
) { |
||||
localError = |
||||
"Search timed out. The relays may be temporarily unavailable. Please try again."; |
||||
} else if (error.message.includes("NDK not initialized")) { |
||||
localError = |
||||
"Nostr client not initialized. Please refresh the page and try again."; |
||||
} else { |
||||
localError = `Search failed: ${error.message}`; |
||||
} |
||||
} |
||||
localError = null; // Clear local error when search fails |
||||
// Stop any ongoing subscription |
||||
if (activeSub) { |
||||
try { |
||||
console.log('Searching for event:', filterOrId); |
||||
const event = await fetchEventWithFallback($ndkInstance, filterOrId, 10000); |
||||
activeSub.stop(); |
||||
} catch (e) { |
||||
console.warn("Error stopping subscription:", e); |
||||
} |
||||
activeSub = null; |
||||
} |
||||
// Abort any ongoing fetch |
||||
if (currentAbortController) { |
||||
currentAbortController.abort(); |
||||
currentAbortController = null; |
||||
} |
||||
updateSearchState(false, false, null, null); |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
|
||||
if (!event) { |
||||
console.warn('[Events] Event not found for filterOrId:', filterOrId); |
||||
localError = 'Event not found'; |
||||
} else { |
||||
console.log('[Events] Event found:', event); |
||||
handleFoundEvent(event); |
||||
// Update last processed search value to prevent re-processing even on error |
||||
if (searchValue) { |
||||
lastProcessedSearchValue = searchValue; |
||||
} |
||||
} catch (err) { |
||||
console.error('[Events] Error fetching event:', err, 'Query:', query); |
||||
localError = 'Error fetching event. Please check the ID and try again.'; |
||||
} |
||||
} |
||||
|
||||
function handleFoundEvent(event: NDKEvent) { |
||||
foundEvent = event; |
||||
onEventFound(event); |
||||
function handleClear() { |
||||
isResetting = true; |
||||
searchQuery = ""; |
||||
isUserEditing = false; // Reset user editing flag |
||||
resetSearchState(); |
||||
|
||||
// Clear URL parameters to reset the page |
||||
goto("", { |
||||
replaceState: true, |
||||
keepFocus: true, |
||||
noScroll: true, |
||||
}); |
||||
|
||||
// Ensure all search state is cleared |
||||
searching = false; |
||||
searchCompleted = false; |
||||
searchResultCount = null; |
||||
searchResultType = null; |
||||
foundEvent = null; |
||||
localError = null; |
||||
isProcessingSearch = false; |
||||
currentProcessingSearchValue = null; |
||||
lastSearchValue = null; |
||||
isWaitingForSearchResult = false; |
||||
|
||||
// Clear any pending timeout |
||||
if (searchTimeout) { |
||||
clearTimeout(searchTimeout); |
||||
searchTimeout = null; |
||||
} |
||||
|
||||
if (onClear) { |
||||
onClear(); |
||||
} |
||||
|
||||
// Reset the flag after a short delay to allow effects to settle |
||||
setTimeout(() => { |
||||
isResetting = false; |
||||
}, 100); |
||||
} |
||||
|
||||
function getResultMessage(): string { |
||||
if (searchResultCount === 0) { |
||||
return "Search completed. No results found."; |
||||
} |
||||
|
||||
const typeLabel = |
||||
searchResultType === "n" |
||||
? "profile" |
||||
: searchResultType === "nip05" |
||||
? "NIP-05 address" |
||||
: "event"; |
||||
const countLabel = searchResultType === "n" ? "profiles" : "events"; |
||||
|
||||
return searchResultCount === 1 |
||||
? `Search completed. Found 1 ${typeLabel}.` |
||||
: `Search completed. Found ${searchResultCount} ${countLabel}.`; |
||||
} |
||||
|
||||
function getNeventUrl(event: NDKEvent): string { |
||||
return neventEncode(event, $activeInboxRelays); |
||||
} |
||||
|
||||
function getNaddrUrl(event: NDKEvent): string { |
||||
return naddrEncode(event, $activeInboxRelays); |
||||
} |
||||
|
||||
function getNprofileUrl(pubkey: string): string { |
||||
return nprofileEncode(pubkey, $activeInboxRelays); |
||||
} |
||||
</script> |
||||
|
||||
<div class="flex flex-col space-y-6"> |
||||
<div class="flex gap-2"> |
||||
<!-- Search Input Section --> |
||||
<div class="flex gap-2 items-center"> |
||||
<Input |
||||
bind:value={searchQuery} |
||||
placeholder="Enter event ID, nevent, or naddr..." |
||||
placeholder="Enter event ID, nevent, naddr, d:tag-name, t:topic, or n:username..." |
||||
class="flex-grow" |
||||
on:keydown={(e: KeyboardEvent) => e.key === 'Enter' && searchEvent(true)} |
||||
onkeydown={(e: KeyboardEvent) => |
||||
e.key === "Enter" && handleSearchEvent(true)} |
||||
oninput={() => (isUserEditing = true)} |
||||
onblur={() => (isUserEditing = false)} |
||||
/> |
||||
<Button on:click={() => searchEvent(true)} disabled={loading}> |
||||
{loading ? 'Searching...' : 'Search'} |
||||
<Button onclick={() => handleSearchEvent(true)} disabled={loading}> |
||||
{#if searching} |
||||
<Spinner class="mr-2 text-gray-600 dark:text-gray-300" size="5" /> |
||||
{/if} |
||||
{searching ? "Searching..." : "Search"} |
||||
</Button> |
||||
<Button |
||||
onclick={handleClear} |
||||
color="alternative" |
||||
type="button" |
||||
disabled={loading} |
||||
> |
||||
Clear |
||||
</Button> |
||||
</div> |
||||
|
||||
{#if localError || error} |
||||
<div class="p-4 mb-4 text-sm text-red-700 bg-red-100 rounded-lg" role="alert"> |
||||
<!-- Error Display --> |
||||
{#if showError} |
||||
<div |
||||
class="p-4 mb-4 text-sm text-red-700 bg-red-100 rounded-lg" |
||||
role="alert" |
||||
> |
||||
{localError || error} |
||||
{#if searchQuery.trim()} |
||||
<div class="mt-2"> |
||||
You can also try viewing this event on |
||||
<a |
||||
class="underline text-primary-700" |
||||
href={"https://njump.me/" + encodeURIComponent(searchQuery.trim())} |
||||
target="_blank" |
||||
rel="noopener" |
||||
>Njump</a>. |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{/if} |
||||
|
||||
<div class="mt-4"> |
||||
<div class="flex flex-wrap gap-2"> |
||||
{#each Object.entries(relayStatuses) as [relay, status]} |
||||
<RelayDisplay {relay} showStatus={true} status={status} /> |
||||
{/each} |
||||
<!-- Success Display --> |
||||
{#if showSuccess} |
||||
<div |
||||
class="p-4 mb-4 text-sm text-green-700 bg-green-100 rounded-lg" |
||||
role="alert" |
||||
> |
||||
{getResultMessage()} |
||||
</div> |
||||
{#if !foundEvent && Object.values(relayStatuses).some(s => s === 'pending')} |
||||
<div class="text-gray-500 mt-2">Searching relays...</div> |
||||
{/if} |
||||
{#if !foundEvent && !searching && Object.values(relayStatuses).every(s => s !== 'pending')} |
||||
<div class="text-red-500 mt-2">Event not found on any relay.</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
@ -1,76 +0,0 @@
@@ -1,76 +0,0 @@
|
||||
<script lang='ts'> |
||||
import { type NDKUserProfile } from '@nostr-dev-kit/ndk'; |
||||
import { activePubkey, loginWithExtension, ndkInstance, ndkSignedIn, persistLogin } from '$lib/ndk'; |
||||
import { Avatar, Button, Popover } from 'flowbite-svelte'; |
||||
import Profile from "$components/util/Profile.svelte"; |
||||
|
||||
let profile = $state<NDKUserProfile | null>(null); |
||||
let npub = $state<string | undefined >(undefined); |
||||
|
||||
let signInFailed = $state<boolean>(false); |
||||
let errorMessage = $state<string>(''); |
||||
|
||||
$effect(() => { |
||||
if ($ndkSignedIn) { |
||||
$ndkInstance |
||||
.getUser({ pubkey: $activePubkey ?? undefined }) |
||||
?.fetchProfile() |
||||
.then(userProfile => { |
||||
profile = userProfile; |
||||
}); |
||||
npub = $ndkInstance.activeUser?.npub; |
||||
} |
||||
}); |
||||
|
||||
async function handleSignInClick() { |
||||
try { |
||||
signInFailed = false; |
||||
errorMessage = ''; |
||||
|
||||
const user = await loginWithExtension(); |
||||
if (!user) { |
||||
throw new Error('The NIP-07 extension did not return a user.'); |
||||
} |
||||
|
||||
profile = await user.fetchProfile(); |
||||
persistLogin(user); |
||||
} catch (e) { |
||||
console.error(e); |
||||
signInFailed = true; |
||||
errorMessage = e instanceof Error ? e.message : 'Failed to sign in. Please try again.'; |
||||
} |
||||
} |
||||
|
||||
</script> |
||||
|
||||
<div class="m-4"> |
||||
{#if $ndkSignedIn} |
||||
<Profile pubkey={$activePubkey} isNav={true} /> |
||||
{:else} |
||||
<Avatar rounded class='h-6 w-6 cursor-pointer bg-transparent' id='avatar' /> |
||||
<Popover |
||||
class='popover-leather w-fit' |
||||
placement='bottom' |
||||
triggeredBy='#avatar' |
||||
> |
||||
<div class='w-full flex flex-col space-y-2'> |
||||
<Button |
||||
onclick={handleSignInClick} |
||||
> |
||||
Extension Sign-In |
||||
</Button> |
||||
{#if signInFailed} |
||||
<div class="p-2 text-sm text-red-600 bg-red-100 rounded"> |
||||
{errorMessage} |
||||
</div> |
||||
{/if} |
||||
<!-- <Button |
||||
color='alternative' |
||||
on:click={signInWithBunker} |
||||
> |
||||
Bunker Sign-In |
||||
</Button> --> |
||||
</div> |
||||
</Popover> |
||||
{/if} |
||||
</div> |
||||
@ -1,77 +1,81 @@
@@ -1,77 +1,81 @@
|
||||
<script lang="ts"> |
||||
import { Button } from "flowbite-svelte"; |
||||
import { loginWithExtension, ndkSignedIn } from '$lib/ndk'; |
||||
import { Button, Modal } from "flowbite-svelte"; |
||||
import { loginWithExtension } from "$lib/stores/userStore"; |
||||
import { userStore } from "$lib/stores/userStore"; |
||||
|
||||
const { show = false, onClose = () => {}, onLoginSuccess = () => {} } = $props<{ |
||||
const { |
||||
show = false, |
||||
onClose = () => {}, |
||||
onLoginSuccess = () => {}, |
||||
} = $props<{ |
||||
show?: boolean; |
||||
onClose?: () => void; |
||||
onLoginSuccess?: () => void; |
||||
}>(); |
||||
|
||||
let signInFailed = $state<boolean>(false); |
||||
let errorMessage = $state<string>(''); |
||||
let errorMessage = $state<string>(""); |
||||
let user = $state($userStore); |
||||
let modalOpen = $state(show); |
||||
|
||||
userStore.subscribe((val) => (user = val)); |
||||
|
||||
$effect(() => { |
||||
if ($ndkSignedIn && show) { |
||||
modalOpen = show; |
||||
}); |
||||
|
||||
$effect(() => { |
||||
if (user.signedIn && show) { |
||||
onLoginSuccess(); |
||||
onClose(); |
||||
} |
||||
}); |
||||
|
||||
$effect(() => { |
||||
if (!modalOpen) { |
||||
onClose(); |
||||
} |
||||
}); |
||||
|
||||
async function handleSignInClick() { |
||||
try { |
||||
signInFailed = false; |
||||
errorMessage = ''; |
||||
errorMessage = ""; |
||||
|
||||
const user = await loginWithExtension(); |
||||
if (!user) { |
||||
throw new Error('The NIP-07 extension did not return a user.'); |
||||
} |
||||
await loginWithExtension(); |
||||
} catch (e: unknown) { |
||||
console.error(e); |
||||
signInFailed = true; |
||||
errorMessage = (e as Error)?.message ?? 'Failed to sign in. Please try again.'; |
||||
errorMessage = |
||||
(e as Error)?.message ?? "Failed to sign in. Please try again."; |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
{#if show} |
||||
<div class="fixed inset-0 z-50 flex items-center justify-center overflow-x-hidden overflow-y-auto outline-none focus:outline-none bg-gray-900 bg-opacity-50"> |
||||
<div class="relative w-auto my-6 mx-auto max-w-3xl"> |
||||
<div class="border-0 rounded-lg shadow-lg relative flex flex-col w-full bg-white dark:bg-gray-800 outline-none focus:outline-none"> |
||||
<!-- Header --> |
||||
<div class="flex items-start justify-between p-5 border-b border-solid border-gray-300 dark:border-gray-600 rounded-t"> |
||||
<h3 class="text-xl font-medium text-gray-900 dark:text-gray-100">Login Required</h3> |
||||
<button |
||||
class="ml-auto bg-transparent border-0 text-gray-400 float-right text-3xl leading-none font-semibold outline-none focus:outline-none" |
||||
onclick={onClose} |
||||
> |
||||
<span class="bg-transparent text-gray-500 dark:text-gray-400 h-6 w-6 text-2xl block outline-none focus:outline-none">×</span> |
||||
</button> |
||||
</div> |
||||
|
||||
<!-- Body --> |
||||
<div class="relative p-6 flex-auto"> |
||||
<p class="text-base leading-relaxed text-gray-500 dark:text-gray-400 mb-6"> |
||||
You need to be logged in to submit an issue. Your form data will be preserved. |
||||
<Modal |
||||
class="modal-leather" |
||||
title="Login Required" |
||||
bind:open={modalOpen} |
||||
autoclose |
||||
outsideclose |
||||
size="sm" |
||||
> |
||||
<p class="text-base leading-relaxed text-gray-700 dark:text-gray-300 mb-6"> |
||||
You need to be logged in to submit an issue. Your form data will be |
||||
preserved. |
||||
</p> |
||||
<div class="flex flex-col space-y-4"> |
||||
<div class="flex justify-center"> |
||||
<Button |
||||
color="primary" |
||||
onclick={handleSignInClick} |
||||
> |
||||
<Button color="primary" onclick={handleSignInClick}> |
||||
Sign in with Extension |
||||
</Button> |
||||
</div> |
||||
{#if signInFailed} |
||||
<div class="p-3 text-sm text-red-600 dark:text-red-400 bg-red-100 dark:bg-red-900 rounded"> |
||||
<div |
||||
class="p-3 text-sm text-red-600 dark:text-red-400 bg-red-100 dark:bg-red-900 rounded" |
||||
> |
||||
{errorMessage} |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</Modal> |
||||
|
||||
@ -1,12 +0,0 @@
@@ -1,12 +0,0 @@
|
||||
<script lang="ts"> |
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
export let showModal; |
||||
export let event: NDKEvent; |
||||
// let str: string = JSON.stringify(event); |
||||
</script> |
||||
|
||||
{#if showModal} |
||||
<div class="backdrop"> |
||||
<div class="Modal">{event.id}</div> |
||||
</div> |
||||
{/if} |
||||
@ -0,0 +1,59 @@
@@ -0,0 +1,59 @@
|
||||
<script lang="ts"> |
||||
import { networkCondition, isNetworkChecking, startNetworkStatusMonitoring } from '$lib/stores/networkStore'; |
||||
import { NetworkCondition } from '$lib/utils/network_detection'; |
||||
import { onMount } from 'svelte'; |
||||
|
||||
function getStatusColor(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return 'text-green-600 dark:text-green-400'; |
||||
case NetworkCondition.SLOW: |
||||
return 'text-yellow-600 dark:text-yellow-400'; |
||||
case NetworkCondition.OFFLINE: |
||||
return 'text-red-600 dark:text-red-400'; |
||||
default: |
||||
return 'text-gray-600 dark:text-gray-400'; |
||||
} |
||||
} |
||||
|
||||
function getStatusIcon(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return '🟢'; |
||||
case NetworkCondition.SLOW: |
||||
return '🟡'; |
||||
case NetworkCondition.OFFLINE: |
||||
return '🔴'; |
||||
default: |
||||
return '⚪'; |
||||
} |
||||
} |
||||
|
||||
function getStatusText(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return 'Online'; |
||||
case NetworkCondition.SLOW: |
||||
return 'Slow Connection'; |
||||
case NetworkCondition.OFFLINE: |
||||
return 'Offline'; |
||||
default: |
||||
return 'Unknown'; |
||||
} |
||||
} |
||||
|
||||
onMount(() => { |
||||
// Start centralized network monitoring |
||||
startNetworkStatusMonitoring(); |
||||
}); |
||||
</script> |
||||
|
||||
<div class="flex items-center space-x-2 text-xs {getStatusColor()} font-medium"> |
||||
{#if $isNetworkChecking} |
||||
<span class="animate-spin">⏳</span> |
||||
<span>Checking...</span> |
||||
{:else} |
||||
<span class="text-lg">{getStatusIcon()}</span> |
||||
<span>{getStatusText()}</span> |
||||
{/if} |
||||
</div> |
||||
@ -1,254 +0,0 @@
@@ -1,254 +0,0 @@
|
||||
<script lang='ts'> |
||||
import { indexKind } from '$lib/consts'; |
||||
import { ndkInstance } from '$lib/ndk'; |
||||
import { filterValidIndexEvents, debounce } from '$lib/utils'; |
||||
import { Button, P, Skeleton, Spinner } from 'flowbite-svelte'; |
||||
import ArticleHeader from './PublicationHeader.svelte'; |
||||
import { onMount } from 'svelte'; |
||||
import { getMatchingTags, NDKRelaySetFromNDK, type NDKEvent, type NDKRelaySet } from '$lib/utils/nostrUtils'; |
||||
|
||||
let { relays, fallbackRelays, searchQuery = '' } = $props<{ relays: string[], fallbackRelays: string[], searchQuery?: string }>(); |
||||
|
||||
let eventsInView: NDKEvent[] = $state([]); |
||||
let loadingMore: boolean = $state(false); |
||||
let endOfFeed: boolean = $state(false); |
||||
let relayStatuses = $state<Record<string, 'pending' | 'found' | 'notfound'>>({}); |
||||
let loading: boolean = $state(true); |
||||
|
||||
let cutoffTimestamp: number = $derived( |
||||
eventsInView?.at(eventsInView.length - 1)?.created_at ?? new Date().getTime() |
||||
); |
||||
|
||||
// Debounced search function |
||||
const debouncedSearch = debounce(async (query: string) => { |
||||
console.debug('[PublicationFeed] Search query changed:', query); |
||||
if (query.trim()) { |
||||
console.debug('[PublicationFeed] Clearing events and searching with query:', query); |
||||
eventsInView = []; |
||||
await getEvents(undefined, query, true); |
||||
} else { |
||||
console.debug('[PublicationFeed] Clearing events and resetting search'); |
||||
eventsInView = []; |
||||
await getEvents(undefined, '', true); |
||||
} |
||||
}, 300); |
||||
|
||||
$effect(() => { |
||||
console.debug('[PublicationFeed] Search query effect triggered:', searchQuery); |
||||
debouncedSearch(searchQuery); |
||||
}); |
||||
|
||||
async function getEvents(before: number | undefined = undefined, search: string = '', reset: boolean = false) { |
||||
loading = true; |
||||
const ndk = $ndkInstance; |
||||
const primaryRelays: string[] = relays; |
||||
const fallback: string[] = fallbackRelays.filter((r: string) => !primaryRelays.includes(r)); |
||||
relayStatuses = Object.fromEntries(primaryRelays.map((r: string) => [r, 'pending'])); |
||||
let allEvents: NDKEvent[] = []; |
||||
let fetchedCount = 0; // Track number of new events |
||||
|
||||
console.debug('[getEvents] Called with before:', before, 'search:', search); |
||||
|
||||
// Function to filter events based on search query |
||||
const filterEventsBySearch = (events: NDKEvent[]) => { |
||||
if (!search) return events; |
||||
const query = search.toLowerCase(); |
||||
console.debug('[PublicationFeed] Filtering events with query:', query, 'Total events before filter:', events.length); |
||||
|
||||
// Check if the query is a NIP-05 address |
||||
const isNip05Query = /^[a-z0-9._-]+@[a-z0-9.-]+$/i.test(query); |
||||
console.debug('[PublicationFeed] Is NIP-05 query:', isNip05Query); |
||||
|
||||
const filtered = events.filter(event => { |
||||
const title = getMatchingTags(event, 'title')[0]?.[1]?.toLowerCase() ?? ''; |
||||
const authorName = getMatchingTags(event, 'author')[0]?.[1]?.toLowerCase() ?? ''; |
||||
const authorPubkey = event.pubkey.toLowerCase(); |
||||
const nip05 = getMatchingTags(event, 'nip05')[0]?.[1]?.toLowerCase() ?? ''; |
||||
|
||||
// For NIP-05 queries, only match against NIP-05 tags |
||||
if (isNip05Query) { |
||||
const matches = nip05 === query; |
||||
if (matches) { |
||||
console.debug('[PublicationFeed] Event matches NIP-05 search:', { |
||||
id: event.id, |
||||
nip05, |
||||
authorPubkey |
||||
}); |
||||
} |
||||
return matches; |
||||
} |
||||
|
||||
// For regular queries, match against all fields |
||||
const matches = ( |
||||
title.includes(query) || |
||||
authorName.includes(query) || |
||||
authorPubkey.includes(query) || |
||||
nip05.includes(query) |
||||
); |
||||
if (matches) { |
||||
console.debug('[PublicationFeed] Event matches search:', { |
||||
id: event.id, |
||||
title, |
||||
authorName, |
||||
authorPubkey, |
||||
nip05 |
||||
}); |
||||
} |
||||
return matches; |
||||
}); |
||||
console.debug('[PublicationFeed] Events after filtering:', filtered.length); |
||||
return filtered; |
||||
}; |
||||
|
||||
// First, try primary relays |
||||
let foundEventsInPrimary = false; |
||||
await Promise.all( |
||||
primaryRelays.map(async (relay: string) => { |
||||
try { |
||||
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk); |
||||
let eventSet = await ndk.fetchEvents( |
||||
{ |
||||
kinds: [indexKind], |
||||
limit: 30, |
||||
until: before, |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
relaySet |
||||
).withTimeout(2500); |
||||
eventSet = filterValidIndexEvents(eventSet); |
||||
const eventArray = filterEventsBySearch(Array.from(eventSet)); |
||||
fetchedCount += eventArray.length; // Count new events |
||||
if (eventArray.length > 0) { |
||||
allEvents = allEvents.concat(eventArray); |
||||
relayStatuses = { ...relayStatuses, [relay]: 'found' }; |
||||
foundEventsInPrimary = true; |
||||
} else { |
||||
relayStatuses = { ...relayStatuses, [relay]: 'notfound' }; |
||||
} |
||||
console.debug(`[getEvents] Fetched ${eventArray.length} events from relay: ${relay} (search: "${search}")`); |
||||
} catch (err) { |
||||
console.error(`Error fetching from primary relay ${relay}:`, err); |
||||
relayStatuses = { ...relayStatuses, [relay]: 'notfound' }; |
||||
} |
||||
}) |
||||
); |
||||
|
||||
// Only try fallback relays if no events were found in primary relays |
||||
if (!foundEventsInPrimary && fallback.length > 0) { |
||||
console.debug('[getEvents] No events found in primary relays, trying fallback relays'); |
||||
relayStatuses = { ...relayStatuses, ...Object.fromEntries(fallback.map((r: string) => [r, 'pending'])) }; |
||||
await Promise.all( |
||||
fallback.map(async (relay: string) => { |
||||
try { |
||||
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk); |
||||
let eventSet = await ndk.fetchEvents( |
||||
{ |
||||
kinds: [indexKind], |
||||
limit: 18, |
||||
until: before, |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
relaySet |
||||
).withTimeout(2500); |
||||
eventSet = filterValidIndexEvents(eventSet); |
||||
const eventArray = filterEventsBySearch(Array.from(eventSet)); |
||||
fetchedCount += eventArray.length; // Count new events |
||||
if (eventArray.length > 0) { |
||||
allEvents = allEvents.concat(eventArray); |
||||
relayStatuses = { ...relayStatuses, [relay]: 'found' }; |
||||
} else { |
||||
relayStatuses = { ...relayStatuses, [relay]: 'notfound' }; |
||||
} |
||||
console.debug(`[getEvents] Fetched ${eventArray.length} events from relay: ${relay} (search: "${search}")`); |
||||
} catch (err) { |
||||
console.error(`Error fetching from fallback relay ${relay}:`, err); |
||||
relayStatuses = { ...relayStatuses, [relay]: 'notfound' }; |
||||
} |
||||
}) |
||||
); |
||||
} |
||||
// Deduplicate and sort |
||||
const eventMap = reset |
||||
? new Map(allEvents.map(event => [event.tagAddress(), event])) |
||||
: new Map([...eventsInView, ...allEvents].map(event => [event.tagAddress(), event])); |
||||
const uniqueEvents = Array.from(eventMap.values()); |
||||
uniqueEvents.sort((a, b) => b.created_at! - a.created_at!); |
||||
eventsInView = uniqueEvents; |
||||
const pageSize = fallback.length > 0 ? 18 : 30; |
||||
if (fetchedCount < pageSize) { |
||||
endOfFeed = true; |
||||
} else { |
||||
endOfFeed = false; |
||||
} |
||||
console.debug(`[getEvents] Total unique events after deduplication: ${uniqueEvents.length}`); |
||||
console.debug(`[getEvents] endOfFeed set to: ${endOfFeed} (fetchedCount: ${fetchedCount}, pageSize: ${pageSize})`); |
||||
loading = false; |
||||
console.debug('Relay statuses:', relayStatuses); |
||||
} |
||||
|
||||
const getSkeletonIds = (): string[] => { |
||||
const skeletonHeight = 124; // The height of the skeleton component in pixels. |
||||
const skeletonCount = Math.floor(window.innerHeight / skeletonHeight) - 2; |
||||
const skeletonIds = []; |
||||
for (let i = 0; i < skeletonCount; i++) { |
||||
skeletonIds.push(`skeleton-${i}`); |
||||
} |
||||
return skeletonIds; |
||||
} |
||||
|
||||
async function loadMorePublications() { |
||||
loadingMore = true; |
||||
await getEvents(cutoffTimestamp, searchQuery, false); |
||||
loadingMore = false; |
||||
} |
||||
|
||||
onMount(async () => { |
||||
await getEvents(); |
||||
}); |
||||
</script> |
||||
|
||||
<div class='leather'> |
||||
<div class='grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4'> |
||||
{#if loading && eventsInView.length === 0} |
||||
{#each getSkeletonIds() as id} |
||||
<Skeleton divClass='skeleton-leather w-full' size='lg' /> |
||||
{/each} |
||||
{:else if eventsInView.length > 0} |
||||
{#each eventsInView as event} |
||||
<ArticleHeader {event} /> |
||||
{/each} |
||||
{:else} |
||||
<div class='col-span-full'> |
||||
<p class='text-center'>No publications found.</p> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{#if !loadingMore && !endOfFeed} |
||||
<div class='flex justify-center mt-4 mb-8'> |
||||
<Button outline class="w-full max-w-md" onclick={async () => { |
||||
await loadMorePublications(); |
||||
}}> |
||||
Show more publications |
||||
</Button> |
||||
</div> |
||||
{:else if loadingMore} |
||||
<div class='flex justify-center mt-4 mb-8'> |
||||
<Button outline disabled class="w-full max-w-md"> |
||||
<Spinner class='mr-3 text-gray-300' size='4' /> |
||||
Loading... |
||||
</Button> |
||||
</div> |
||||
{:else} |
||||
<div class='flex justify-center mt-4 mb-8'> |
||||
<P class='text-sm text-gray-600'>You've reached the end of the feed.</P> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
@ -1,64 +0,0 @@
@@ -1,64 +0,0 @@
|
||||
<script lang="ts"> |
||||
import { ndkInstance } from '$lib/ndk'; |
||||
import { naddrEncode } from '$lib/utils'; |
||||
import type { NDKEvent } from '@nostr-dev-kit/ndk'; |
||||
import { standardRelays } from '../consts'; |
||||
import { Card, Img } from "flowbite-svelte"; |
||||
import CardActions from "$components/util/CardActions.svelte"; |
||||
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; |
||||
|
||||
const { event } = $props<{ event: NDKEvent }>(); |
||||
|
||||
const relays = $derived.by(() => { |
||||
return $ndkInstance.activeUser?.relayUrls ?? standardRelays; |
||||
}); |
||||
|
||||
const href = $derived.by(() => { |
||||
const d = event.getMatchingTags('d')[0]?.[1]; |
||||
if (d != null) { |
||||
return `publication?d=${d}`; |
||||
} else { |
||||
return `publication?id=${naddrEncode(event, relays)}`; |
||||
} |
||||
} |
||||
); |
||||
|
||||
let title: string = $derived(event.getMatchingTags('title')[0]?.[1]); |
||||
let author: string = $derived(event.getMatchingTags(event, 'author')[0]?.[1] ?? 'unknown'); |
||||
let version: string = $derived(event.getMatchingTags('version')[0]?.[1] ?? '1'); |
||||
let image: string = $derived(event.getMatchingTags('image')[0]?.[1] ?? null); |
||||
let authorPubkey: string = $derived(event.getMatchingTags('p')[0]?.[1] ?? null); |
||||
|
||||
console.log("PublicationHeader event:", event); |
||||
</script> |
||||
|
||||
{#if title != null && href != null} |
||||
<Card class='ArticleBox card-leather max-w-md flex flex-row space-x-2'> |
||||
{#if image} |
||||
<div class="flex col justify-center align-middle max-h-36 max-w-24 overflow-hidden"> |
||||
<Img src={image} class="rounded w-full h-full object-cover"/> |
||||
</div> |
||||
{/if} |
||||
<div class='col flex flex-row flex-grow space-x-4'> |
||||
<div class="flex flex-col flex-grow"> |
||||
<a href="/{href}" class='flex flex-col space-y-2'> |
||||
<h2 class='text-lg font-bold line-clamp-2' title="{title}">{title}</h2> |
||||
<h3 class='text-base font-normal'> |
||||
by |
||||
{#if authorPubkey != null} |
||||
{@render userBadge(authorPubkey, author)} |
||||
{:else} |
||||
{author} |
||||
{/if} |
||||
</h3> |
||||
{#if version != '1'} |
||||
<h3 class='text-base font-thin'>version: {version}</h3> |
||||
{/if} |
||||
</a> |
||||
</div> |
||||
<div class="flex flex-col justify-start items-center"> |
||||
<CardActions event={event} /> |
||||
</div> |
||||
</div> |
||||
</Card> |
||||
{/if} |
||||
@ -1,121 +0,0 @@
@@ -1,121 +0,0 @@
|
||||
<script lang='ts'> |
||||
import type { PublicationTree } from "$lib/data_structures/publication_tree"; |
||||
import { contentParagraph, sectionHeading } from "$lib/snippets/PublicationSnippets.svelte"; |
||||
import { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { TextPlaceholder } from "flowbite-svelte"; |
||||
import { getContext } from "svelte"; |
||||
import type { Asciidoctor, Document } from "asciidoctor"; |
||||
import { getMatchingTags } from '$lib/utils/nostrUtils'; |
||||
|
||||
let { |
||||
address, |
||||
rootAddress, |
||||
leaves, |
||||
ref, |
||||
}: { |
||||
address: string, |
||||
rootAddress: string, |
||||
leaves: Array<NDKEvent | null>, |
||||
ref: (ref: HTMLElement) => void, |
||||
} = $props(); |
||||
|
||||
const publicationTree: PublicationTree = getContext('publicationTree'); |
||||
const asciidoctor: Asciidoctor = getContext('asciidoctor'); |
||||
|
||||
let leafEvent: Promise<NDKEvent | null> = $derived.by(async () => |
||||
await publicationTree.getEvent(address)); |
||||
|
||||
let rootEvent: Promise<NDKEvent | null> = $derived.by(async () => |
||||
await publicationTree.getEvent(rootAddress)); |
||||
|
||||
let publicationType: Promise<string | undefined> = $derived.by(async () => |
||||
(await rootEvent)?.getMatchingTags('type')[0]?.[1]); |
||||
|
||||
let leafHierarchy: Promise<NDKEvent[]> = $derived.by(async () => |
||||
await publicationTree.getHierarchy(address)); |
||||
|
||||
let leafTitle: Promise<string | undefined> = $derived.by(async () => |
||||
(await leafEvent)?.getMatchingTags('title')[0]?.[1]); |
||||
|
||||
let leafContent: Promise<string | Document> = $derived.by(async () => |
||||
asciidoctor.convert((await leafEvent)?.content ?? '')); |
||||
|
||||
let previousLeafEvent: NDKEvent | null = $derived.by(() => { |
||||
let index: number; |
||||
let event: NDKEvent | null = null; |
||||
let decrement = 1; |
||||
|
||||
do { |
||||
index = leaves.findIndex(leaf => leaf?.tagAddress() === address); |
||||
if (index === 0) { |
||||
return null; |
||||
} |
||||
event = leaves[index - decrement++]; |
||||
} while (event == null && index - decrement >= 0); |
||||
|
||||
return event; |
||||
}); |
||||
|
||||
let previousLeafHierarchy: Promise<NDKEvent[] | null> = $derived.by(async () => { |
||||
if (!previousLeafEvent) { |
||||
return null; |
||||
} |
||||
return await publicationTree.getHierarchy(previousLeafEvent.tagAddress()); |
||||
}); |
||||
|
||||
let divergingBranches = $derived.by(async () => { |
||||
let [leafHierarchyValue, previousLeafHierarchyValue] = await Promise.all([leafHierarchy, previousLeafHierarchy]); |
||||
|
||||
const branches: [NDKEvent, number][] = []; |
||||
|
||||
if (!previousLeafHierarchyValue) { |
||||
for (let i = 0; i < leafHierarchyValue.length - 1; i++) { |
||||
branches.push([leafHierarchyValue[i], i]); |
||||
} |
||||
return branches; |
||||
} |
||||
|
||||
const minLength = Math.min(leafHierarchyValue.length, previousLeafHierarchyValue.length); |
||||
|
||||
// Find the first diverging node. |
||||
let divergingIndex = 0; |
||||
while ( |
||||
divergingIndex < minLength && |
||||
leafHierarchyValue[divergingIndex].tagAddress() === previousLeafHierarchyValue[divergingIndex].tagAddress() |
||||
) { |
||||
divergingIndex++; |
||||
} |
||||
|
||||
// Add all branches from the first diverging node to the current leaf. |
||||
for (let i = divergingIndex; i < leafHierarchyValue.length - 1; i++) { |
||||
branches.push([leafHierarchyValue[i], i]); |
||||
} |
||||
|
||||
return branches; |
||||
}); |
||||
|
||||
let sectionRef: HTMLElement; |
||||
|
||||
$effect(() => { |
||||
if (!sectionRef) { |
||||
return; |
||||
} |
||||
|
||||
ref(sectionRef); |
||||
}); |
||||
</script> |
||||
|
||||
<section id={address} bind:this={sectionRef} class='publication-leather content-visibility-auto'> |
||||
{#await Promise.all([leafTitle, leafContent, leafHierarchy, publicationType, divergingBranches])} |
||||
<TextPlaceholder size='xxl' /> |
||||
{:then [leafTitle, leafContent, leafHierarchy, publicationType, divergingBranches]} |
||||
{#each divergingBranches as [branch, depth]} |
||||
{@render sectionHeading(getMatchingTags(branch, 'title')[0]?.[1] ?? '', depth)} |
||||
{/each} |
||||
{#if leafTitle} |
||||
{@const leafDepth = leafHierarchy.length - 1} |
||||
{@render sectionHeading(leafTitle, leafDepth)} |
||||
{/if} |
||||
{@render contentParagraph(leafContent.toString(), publicationType ?? 'article', false)} |
||||
{/await} |
||||
</section> |
||||
@ -0,0 +1,164 @@
@@ -0,0 +1,164 @@
|
||||
<script lang="ts"> |
||||
import { Button, Alert } from "flowbite-svelte"; |
||||
import { |
||||
ndkInstance, |
||||
ndkSignedIn, |
||||
testRelayConnection, |
||||
checkWebSocketSupport, |
||||
checkEnvironmentForWebSocketDowngrade, |
||||
} from "$lib/ndk"; |
||||
import { onMount } from "svelte"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
|
||||
interface RelayStatus { |
||||
url: string; |
||||
connected: boolean; |
||||
requiresAuth: boolean; |
||||
error?: string; |
||||
testing: boolean; |
||||
} |
||||
|
||||
let relayStatuses = $state<RelayStatus[]>([]); |
||||
let testing = $state(false); |
||||
|
||||
// Use the new relay management system |
||||
let allRelays: string[] = $state([]); |
||||
|
||||
$effect(() => { |
||||
allRelays = [...$activeInboxRelays, ...$activeOutboxRelays]; |
||||
}); |
||||
|
||||
async function runRelayTests() { |
||||
testing = true; |
||||
const ndk = $ndkInstance; |
||||
if (!ndk) { |
||||
testing = false; |
||||
return; |
||||
} |
||||
|
||||
let relaysToTest: string[] = []; |
||||
|
||||
// Use active relays from the new relay management system |
||||
const userRelays = new Set([...$activeInboxRelays, ...$activeOutboxRelays]); |
||||
relaysToTest = Array.from(userRelays); |
||||
|
||||
console.log("[RelayStatus] Relays to test:", relaysToTest); |
||||
|
||||
relayStatuses = relaysToTest.map((url) => ({ |
||||
url, |
||||
connected: false, |
||||
requiresAuth: false, |
||||
testing: true, |
||||
})); |
||||
|
||||
const results = await Promise.allSettled( |
||||
relaysToTest.map(async (url) => { |
||||
console.log("[RelayStatus] Testing relay:", url); |
||||
try { |
||||
return await testRelayConnection(url, ndk); |
||||
} catch (error) { |
||||
return { |
||||
connected: false, |
||||
requiresAuth: false, |
||||
error: error instanceof Error ? error.message : "Unknown error", |
||||
}; |
||||
} |
||||
}), |
||||
); |
||||
|
||||
relayStatuses = relayStatuses.map((status, index) => { |
||||
const result = results[index]; |
||||
if (result.status === "fulfilled") { |
||||
return { |
||||
...status, |
||||
...result.value, |
||||
testing: false, |
||||
}; |
||||
} else { |
||||
return { |
||||
...status, |
||||
connected: false, |
||||
requiresAuth: false, |
||||
error: "Test failed", |
||||
testing: false, |
||||
}; |
||||
} |
||||
}); |
||||
|
||||
testing = false; |
||||
} |
||||
|
||||
$effect(() => { |
||||
// Re-run relay tests when feed type, login state, or relay lists change |
||||
void runRelayTests(); |
||||
}); |
||||
|
||||
onMount(() => { |
||||
checkWebSocketSupport(); |
||||
checkEnvironmentForWebSocketDowngrade(); |
||||
// Run initial relay tests |
||||
void runRelayTests(); |
||||
}); |
||||
|
||||
function getStatusColor(status: RelayStatus): string { |
||||
if (status.testing) return "text-yellow-600"; |
||||
if (status.connected) return "text-green-600"; |
||||
if (status.requiresAuth && !$ndkSignedIn) return "text-orange-600"; |
||||
return "text-red-600"; |
||||
} |
||||
|
||||
function getStatusText(status: RelayStatus): string { |
||||
if (status.testing) return "Testing..."; |
||||
if (status.connected) return "Connected"; |
||||
if (status.requiresAuth && !$ndkSignedIn) return "Requires Authentication"; |
||||
if (status.error) return `Error: ${status.error}`; |
||||
return "Failed to Connect"; |
||||
} |
||||
</script> |
||||
|
||||
<div class="space-y-4"> |
||||
<div class="flex items-center justify-between"> |
||||
<h3 class="text-lg font-medium">Relay Connection Status</h3> |
||||
<Button size="sm" onclick={runRelayTests} disabled={testing}> |
||||
{testing ? "Testing..." : "Refresh"} |
||||
</Button> |
||||
</div> |
||||
|
||||
{#if !$ndkSignedIn} |
||||
<Alert color="yellow"> |
||||
<span class="font-medium">Anonymous Mode</span> |
||||
<p class="mt-1 text-sm"> |
||||
You are not signed in. Some relays require authentication and may not be |
||||
accessible. Sign in to access all relays. |
||||
</p> |
||||
</Alert> |
||||
{/if} |
||||
|
||||
<div class="space-y-2"> |
||||
{#each relayStatuses as status} |
||||
<div class="flex items-center justify-between p-3 border rounded-lg"> |
||||
<div class="flex-1"> |
||||
<div class="font-medium">{status.url}</div> |
||||
<div class="text-sm {getStatusColor(status)}"> |
||||
{getStatusText(status)} |
||||
</div> |
||||
</div> |
||||
<div |
||||
class="w-3 h-3 rounded-full {getStatusColor(status).replace( |
||||
'text-', |
||||
'bg-', |
||||
)}" |
||||
></div> |
||||
</div> |
||||
{/each} |
||||
</div> |
||||
|
||||
{#if relayStatuses.some((s) => s.requiresAuth && !$ndkSignedIn)} |
||||
<Alert color="orange"> |
||||
<span class="font-medium">Authentication Required</span> |
||||
<p class="mt-1 text-sm"> |
||||
Some relays require authentication. Sign in to access these relays. |
||||
</p> |
||||
</Alert> |
||||
{/if} |
||||
</div> |
||||
@ -1,24 +0,0 @@
@@ -1,24 +0,0 @@
|
||||
<script lang="ts"> |
||||
import type { NDKEvent } from '@nostr-dev-kit/ndk'; |
||||
import {nip19} from 'nostr-tools'; |
||||
export let notes: NDKEvent[] = []; |
||||
// check if notes is empty |
||||
if (notes.length === 0) { |
||||
console.debug('notes is empty'); |
||||
} |
||||
</script> |
||||
|
||||
<div class="toc"> |
||||
<h2>Table of contents</h2> |
||||
<ul> |
||||
{#each notes as note} |
||||
<li><a href="#{nip19.noteEncode(note.id)}">{note.getMatchingTags('title')[0][1]}</a></li> |
||||
{/each} |
||||
</ul> |
||||
</div> |
||||
|
||||
<style> |
||||
.toc h2 { |
||||
text-align: center; |
||||
} |
||||
</style> |
||||
@ -0,0 +1,180 @@
@@ -0,0 +1,180 @@
|
||||
<script lang="ts"> |
||||
import { Textarea, Button } from "flowbite-svelte"; |
||||
import { EyeOutline } from "flowbite-svelte-icons"; |
||||
import { |
||||
parseAsciiDocSections, |
||||
type ZettelSection, |
||||
} from "$lib/utils/ZettelParser"; |
||||
import asciidoctor from "asciidoctor"; |
||||
|
||||
// Component props |
||||
let { |
||||
content = "", |
||||
placeholder = `== Note Title |
||||
:author: {author} // author is optional |
||||
:tags: tag1, tag2, tag3 // tags are optional |
||||
|
||||
note content here... |
||||
|
||||
== Note Title 2 |
||||
:tags: tag1, tag2, tag3 |
||||
Note content here... |
||||
`, |
||||
showPreview = false, |
||||
onContentChange = (content: string) => {}, |
||||
onPreviewToggle = (show: boolean) => {}, |
||||
} = $props<{ |
||||
content?: string; |
||||
placeholder?: string; |
||||
showPreview?: boolean; |
||||
onContentChange?: (content: string) => void; |
||||
onPreviewToggle?: (show: boolean) => void; |
||||
}>(); |
||||
|
||||
// Initialize AsciiDoctor processor |
||||
const asciidoctorProcessor = asciidoctor(); |
||||
|
||||
// Parse sections for preview |
||||
let parsedSections = $derived(parseAsciiDocSections(content, 2)); |
||||
|
||||
// Toggle preview panel |
||||
function togglePreview() { |
||||
const newShowPreview = !showPreview; |
||||
onPreviewToggle(newShowPreview); |
||||
} |
||||
|
||||
// Handle content changes |
||||
function handleContentChange(event: Event) { |
||||
const target = event.target as HTMLTextAreaElement; |
||||
onContentChange(target.value); |
||||
} |
||||
</script> |
||||
|
||||
<div class="flex flex-col space-y-4"> |
||||
<div class="flex items-center justify-between"> |
||||
<Button |
||||
color="light" |
||||
size="sm" |
||||
on:click={togglePreview} |
||||
class="flex items-center space-x-1" |
||||
> |
||||
{#if showPreview} |
||||
<EyeOutline class="w-4 h-4" /> |
||||
<span>Hide Preview</span> |
||||
{:else} |
||||
<EyeOutline class="w-4 h-4" /> |
||||
<span>Show Preview</span> |
||||
{/if} |
||||
</Button> |
||||
</div> |
||||
|
||||
<div class="flex space-x-4 {showPreview ? 'h-96' : ''}"> |
||||
<!-- Editor Panel --> |
||||
<div class="{showPreview ? 'w-1/2' : 'w-full'} flex flex-col space-y-4"> |
||||
<div class="flex-1"> |
||||
<Textarea |
||||
bind:value={content} |
||||
on:input={handleContentChange} |
||||
{placeholder} |
||||
class="h-full min-h-64 resize-none" |
||||
rows={12} |
||||
/> |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Preview Panel --> |
||||
{#if showPreview} |
||||
<div class="w-1/2 border-l border-gray-200 dark:border-gray-700 pl-4"> |
||||
<div class="sticky top-4"> |
||||
<h3 |
||||
class="text-lg font-semibold mb-4 text-gray-900 dark:text-gray-100" |
||||
> |
||||
AsciiDoc Preview |
||||
</h3> |
||||
|
||||
<div |
||||
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 max-h-80 overflow-y-auto" |
||||
> |
||||
{#if !content.trim()} |
||||
<div class="text-gray-500 dark:text-gray-400 text-sm"> |
||||
Start typing to see the preview... |
||||
</div> |
||||
{:else} |
||||
<div class="prose prose-sm dark:prose-invert max-w-none"> |
||||
{#each parsedSections as section, index} |
||||
<div class="mb-6"> |
||||
<div |
||||
class="text-sm text-gray-800 dark:text-gray-200 asciidoc-content" |
||||
> |
||||
{@html asciidoctorProcessor.convert( |
||||
`== ${section.title}\n\n${section.content}`, |
||||
{ |
||||
standalone: false, |
||||
doctype: "article", |
||||
attributes: { |
||||
showtitle: true, |
||||
sectids: true, |
||||
}, |
||||
}, |
||||
)} |
||||
</div> |
||||
|
||||
{#if index < parsedSections.length - 1} |
||||
<!-- Gray area with tag bubbles above event boundary --> |
||||
<div class="my-4 relative"> |
||||
<!-- Gray background area --> |
||||
<div |
||||
class="bg-gray-200 dark:bg-gray-700 rounded-lg p-3 mb-2" |
||||
> |
||||
<div class="flex flex-wrap gap-2 items-center"> |
||||
{#if section.tags && section.tags.length > 0} |
||||
{#each section.tags as tag} |
||||
<div |
||||
class="bg-amber-900 text-amber-100 px-2 py-1 rounded-full text-xs font-medium flex items-baseline" |
||||
> |
||||
<span class="font-mono">{tag[0]}:</span> |
||||
<span>{tag[1]}</span> |
||||
</div> |
||||
{/each} |
||||
{:else} |
||||
<span |
||||
class="text-gray-500 dark:text-gray-400 text-xs italic" |
||||
>No tags</span |
||||
> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Event boundary line --> |
||||
<div |
||||
class="border-t-2 border-dashed border-blue-400 relative" |
||||
> |
||||
<div |
||||
class="absolute -top-2 left-1/2 transform -translate-x-1/2 bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200 px-2 py-1 rounded text-xs font-medium" |
||||
> |
||||
Event Boundary |
||||
</div> |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{/each} |
||||
</div> |
||||
|
||||
<div |
||||
class="mt-4 text-xs text-gray-600 dark:text-gray-400 bg-gray-50 dark:bg-gray-900 p-2 rounded border" |
||||
> |
||||
<strong>Event Count:</strong> |
||||
{parsedSections.length} event{parsedSections.length !== 1 |
||||
? "s" |
||||
: ""} |
||||
<br /> |
||||
<strong>Note:</strong> Currently only the first event will be published. |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
@ -0,0 +1,414 @@
@@ -0,0 +1,414 @@
|
||||
<script lang="ts"> |
||||
import { indexKind } from "$lib/consts"; |
||||
import { ndkInstance, activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
import { filterValidIndexEvents, debounce } from "$lib/utils"; |
||||
import { Button, P, Skeleton, Spinner } from "flowbite-svelte"; |
||||
import ArticleHeader from "./PublicationHeader.svelte"; |
||||
import { onMount, onDestroy } from "svelte"; |
||||
import { |
||||
getMatchingTags, |
||||
NDKRelaySetFromNDK, |
||||
type NDKEvent, |
||||
type NDKRelaySet, |
||||
} from "$lib/utils/nostrUtils"; |
||||
import { searchCache } from "$lib/utils/searchCache"; |
||||
import { indexEventCache } from "$lib/utils/indexEventCache"; |
||||
import { isValidNip05Address } from "$lib/utils/search_utility"; |
||||
|
||||
const props = $props<{ |
||||
searchQuery?: string; |
||||
onEventCountUpdate?: (counts: { displayed: number; total: number }) => void; |
||||
}>(); |
||||
|
||||
// Component state |
||||
let eventsInView: NDKEvent[] = $state([]); |
||||
let loadingMore: boolean = $state(false); |
||||
let endOfFeed: boolean = $state(false); |
||||
let relayStatuses = $state<Record<string, "pending" | "found" | "notfound">>({}); |
||||
let loading: boolean = $state(true); |
||||
let hasInitialized = $state(false); |
||||
let fallbackTimeout: ReturnType<typeof setTimeout> | null = null; |
||||
|
||||
// Relay management |
||||
let allRelays: string[] = $state([]); |
||||
let ndk = $derived($ndkInstance); |
||||
|
||||
// Event management |
||||
let allIndexEvents: NDKEvent[] = $state([]); |
||||
|
||||
// Initialize relays and fetch events |
||||
async function initializeAndFetch() { |
||||
if (!ndk) { |
||||
console.debug('[PublicationFeed] No NDK instance available'); |
||||
return; |
||||
} |
||||
|
||||
// Get relays from active stores |
||||
const inboxRelays = $activeInboxRelays; |
||||
const outboxRelays = $activeOutboxRelays; |
||||
const newRelays = [...inboxRelays, ...outboxRelays]; |
||||
|
||||
console.debug('[PublicationFeed] Available relays:', { |
||||
inboxCount: inboxRelays.length, |
||||
outboxCount: outboxRelays.length, |
||||
totalCount: newRelays.length, |
||||
relays: newRelays |
||||
}); |
||||
|
||||
if (newRelays.length === 0) { |
||||
console.debug('[PublicationFeed] No relays available, waiting...'); |
||||
return; |
||||
} |
||||
|
||||
// Update allRelays if different |
||||
const currentRelaysString = allRelays.sort().join(','); |
||||
const newRelaysString = newRelays.sort().join(','); |
||||
|
||||
if (currentRelaysString !== newRelaysString) { |
||||
allRelays = newRelays; |
||||
console.debug('[PublicationFeed] Relays updated, fetching events'); |
||||
await fetchAllIndexEventsFromRelays(); |
||||
} |
||||
} |
||||
|
||||
// Watch for relay store changes |
||||
$effect(() => { |
||||
const inboxRelays = $activeInboxRelays; |
||||
const outboxRelays = $activeOutboxRelays; |
||||
const newRelays = [...inboxRelays, ...outboxRelays]; |
||||
|
||||
if (newRelays.length > 0 && !hasInitialized) { |
||||
console.debug('[PublicationFeed] Relays available, initializing'); |
||||
hasInitialized = true; |
||||
if (fallbackTimeout) { |
||||
clearTimeout(fallbackTimeout); |
||||
fallbackTimeout = null; |
||||
} |
||||
setTimeout(() => initializeAndFetch(), 0); |
||||
} else if (newRelays.length === 0 && !hasInitialized) { |
||||
console.debug('[PublicationFeed] No relays available, setting up fallback'); |
||||
if (!fallbackTimeout) { |
||||
fallbackTimeout = setTimeout(() => { |
||||
console.debug('[PublicationFeed] Fallback timeout reached, retrying'); |
||||
hasInitialized = true; |
||||
initializeAndFetch(); |
||||
}, 3000); |
||||
} |
||||
} |
||||
}); |
||||
|
||||
async function fetchAllIndexEventsFromRelays() { |
||||
console.debug('[PublicationFeed] fetchAllIndexEventsFromRelays called with relays:', { |
||||
allRelaysCount: allRelays.length, |
||||
allRelays: allRelays |
||||
}); |
||||
|
||||
if (!ndk) { |
||||
console.error('[PublicationFeed] No NDK instance available'); |
||||
loading = false; |
||||
return; |
||||
} |
||||
|
||||
if (allRelays.length === 0) { |
||||
console.debug('[PublicationFeed] No relays available for fetching'); |
||||
loading = false; |
||||
return; |
||||
} |
||||
|
||||
// Check cache first |
||||
const cachedEvents = indexEventCache.get(allRelays); |
||||
if (cachedEvents) { |
||||
console.log( |
||||
`[PublicationFeed] Using cached index events (${cachedEvents.length} events)`, |
||||
); |
||||
allIndexEvents = cachedEvents; |
||||
eventsInView = allIndexEvents.slice(0, 30); |
||||
endOfFeed = allIndexEvents.length <= 30; |
||||
loading = false; |
||||
return; |
||||
} |
||||
|
||||
loading = true; |
||||
relayStatuses = Object.fromEntries( |
||||
allRelays.map((r: string) => [r, "pending"]), |
||||
); |
||||
let allEvents: NDKEvent[] = []; |
||||
const eventMap = new Map<string, NDKEvent>(); |
||||
|
||||
// Helper to fetch from a single relay with timeout |
||||
async function fetchFromRelay(relay: string): Promise<void> { |
||||
try { |
||||
console.debug(`[PublicationFeed] Fetching from relay: ${relay}`); |
||||
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk); |
||||
let eventSet = await ndk |
||||
.fetchEvents( |
||||
{ |
||||
kinds: [indexKind], |
||||
limit: 1000, // Increased limit to get more events |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
relaySet, |
||||
) |
||||
.withTimeout(5000); // Reduced timeout to 5 seconds for faster response |
||||
|
||||
console.debug(`[PublicationFeed] Raw events from ${relay}:`, eventSet.size); |
||||
eventSet = filterValidIndexEvents(eventSet); |
||||
console.debug(`[PublicationFeed] Valid events from ${relay}:`, eventSet.size); |
||||
|
||||
relayStatuses = { ...relayStatuses, [relay]: "found" }; |
||||
|
||||
// Add new events to the map and update the view immediately |
||||
const newEvents: NDKEvent[] = []; |
||||
for (const event of eventSet) { |
||||
const tagAddress = event.tagAddress(); |
||||
if (!eventMap.has(tagAddress)) { |
||||
eventMap.set(tagAddress, event); |
||||
newEvents.push(event); |
||||
} |
||||
} |
||||
|
||||
if (newEvents.length > 0) { |
||||
// Update allIndexEvents with new events |
||||
allIndexEvents = Array.from(eventMap.values()); |
||||
// Sort by created_at descending |
||||
allIndexEvents.sort((a, b) => b.created_at! - a.created_at!); |
||||
|
||||
// Update the view immediately with new events |
||||
eventsInView = allIndexEvents.slice(0, 30); |
||||
endOfFeed = allIndexEvents.length <= 30; |
||||
|
||||
console.debug(`[PublicationFeed] Updated view with ${newEvents.length} new events from ${relay}, total: ${allIndexEvents.length}`); |
||||
} |
||||
} catch (err) { |
||||
console.error(`[PublicationFeed] Error fetching from relay ${relay}:`, err); |
||||
relayStatuses = { ...relayStatuses, [relay]: "notfound" }; |
||||
} |
||||
} |
||||
|
||||
// Fetch from all relays in parallel, return events as they arrive |
||||
console.debug(`[PublicationFeed] Starting fetch from ${allRelays.length} relays`); |
||||
|
||||
// Start all relay fetches in parallel |
||||
const fetchPromises = allRelays.map(fetchFromRelay); |
||||
|
||||
// Wait for all to complete (but events are shown as they arrive) |
||||
await Promise.allSettled(fetchPromises); |
||||
|
||||
console.debug(`[PublicationFeed] All relays completed, final event count:`, allIndexEvents.length); |
||||
|
||||
// Cache the fetched events |
||||
indexEventCache.set(allRelays, allIndexEvents); |
||||
|
||||
// Final update to ensure we have the latest view |
||||
eventsInView = allIndexEvents.slice(0, 30); |
||||
endOfFeed = allIndexEvents.length <= 30; |
||||
loading = false; |
||||
} |
||||
|
||||
// Function to filter events based on search query |
||||
const filterEventsBySearch = (events: NDKEvent[]) => { |
||||
if (!props.searchQuery) return events; |
||||
const query = props.searchQuery.toLowerCase(); |
||||
console.debug( |
||||
"[PublicationFeed] Filtering events with query:", |
||||
query, |
||||
"Total events before filter:", |
||||
events.length, |
||||
); |
||||
|
||||
// Check cache first for publication search |
||||
const cachedResult = searchCache.get("publication", query); |
||||
if (cachedResult) { |
||||
console.log( |
||||
`[PublicationFeed] Using cached results for publication search: ${query}`, |
||||
); |
||||
return cachedResult.events; |
||||
} |
||||
|
||||
// Check if the query is a NIP-05 address |
||||
const isNip05Query = isValidNip05Address(query); |
||||
console.debug("[PublicationFeed] Is NIP-05 query:", isNip05Query); |
||||
|
||||
const filtered = events.filter((event) => { |
||||
const title = |
||||
getMatchingTags(event, "title")[0]?.[1]?.toLowerCase() ?? ""; |
||||
const authorName = |
||||
getMatchingTags(event, "author")[0]?.[1]?.toLowerCase() ?? ""; |
||||
const authorPubkey = event.pubkey.toLowerCase(); |
||||
const nip05 = |
||||
getMatchingTags(event, "nip05")[0]?.[1]?.toLowerCase() ?? ""; |
||||
|
||||
// For NIP-05 queries, only match against NIP-05 tags |
||||
if (isNip05Query) { |
||||
const matches = nip05 === query; |
||||
if (matches) { |
||||
console.debug("[PublicationFeed] Event matches NIP-05 search:", { |
||||
id: event.id, |
||||
nip05, |
||||
authorPubkey, |
||||
}); |
||||
} |
||||
return matches; |
||||
} |
||||
|
||||
// For regular queries, match against all fields |
||||
const matches = |
||||
title.includes(query) || |
||||
authorName.includes(query) || |
||||
authorPubkey.includes(query) || |
||||
nip05.includes(query); |
||||
if (matches) { |
||||
console.debug("[PublicationFeed] Event matches search:", { |
||||
id: event.id, |
||||
title, |
||||
authorName, |
||||
authorPubkey, |
||||
nip05, |
||||
}); |
||||
} |
||||
return matches; |
||||
}); |
||||
|
||||
// Cache the filtered results |
||||
const result = { |
||||
events: filtered, |
||||
secondOrder: [], |
||||
tTagEvents: [], |
||||
eventIds: new Set<string>(), |
||||
addresses: new Set<string>(), |
||||
searchType: "publication", |
||||
searchTerm: query, |
||||
}; |
||||
searchCache.set("publication", query, result); |
||||
|
||||
console.debug("[PublicationFeed] Events after filtering:", filtered.length); |
||||
return filtered; |
||||
}; |
||||
|
||||
// Debounced search function |
||||
const debouncedSearch = debounce(async (query: string) => { |
||||
console.debug("[PublicationFeed] Search query changed:", query); |
||||
if (query.trim()) { |
||||
const filtered = filterEventsBySearch(allIndexEvents); |
||||
eventsInView = filtered.slice(0, 30); |
||||
endOfFeed = filtered.length <= 30; |
||||
} else { |
||||
eventsInView = allIndexEvents.slice(0, 30); |
||||
endOfFeed = allIndexEvents.length <= 30; |
||||
} |
||||
}, 300); |
||||
|
||||
$effect(() => { |
||||
console.debug( |
||||
"[PublicationFeed] Search query effect triggered:", |
||||
props.searchQuery, |
||||
); |
||||
debouncedSearch(props.searchQuery); |
||||
}); |
||||
|
||||
// Emit event count updates |
||||
$effect(() => { |
||||
if (props.onEventCountUpdate) { |
||||
props.onEventCountUpdate({ |
||||
displayed: eventsInView.length, |
||||
total: allIndexEvents.length |
||||
}); |
||||
} |
||||
}); |
||||
|
||||
async function loadMorePublications() { |
||||
loadingMore = true; |
||||
const current = eventsInView.length; |
||||
let source = props.searchQuery.trim() |
||||
? filterEventsBySearch(allIndexEvents) |
||||
: allIndexEvents; |
||||
eventsInView = source.slice(0, current + 30); |
||||
endOfFeed = eventsInView.length >= source.length; |
||||
loadingMore = false; |
||||
} |
||||
|
||||
function getSkeletonIds(): string[] { |
||||
const skeletonHeight = 192; // The height of the card component in pixels (h-48 = 12rem = 192px). |
||||
const skeletonCount = Math.floor(window.innerHeight / skeletonHeight) - 2; |
||||
const skeletonIds = []; |
||||
for (let i = 0; i < skeletonCount; i++) { |
||||
skeletonIds.push(`skeleton-${i}`); |
||||
} |
||||
return skeletonIds; |
||||
} |
||||
|
||||
function getCacheStats(): string { |
||||
const indexStats = indexEventCache.getStats(); |
||||
const searchStats = searchCache.size(); |
||||
return `Index: ${indexStats.size} entries (${indexStats.totalEvents} events), Search: ${searchStats} entries`; |
||||
} |
||||
|
||||
// Cleanup function for fallback timeout |
||||
function cleanup() { |
||||
if (fallbackTimeout) { |
||||
clearTimeout(fallbackTimeout); |
||||
fallbackTimeout = null; |
||||
} |
||||
} |
||||
|
||||
// Cleanup on component destruction |
||||
onDestroy(() => { |
||||
cleanup(); |
||||
}); |
||||
|
||||
onMount(async () => { |
||||
console.debug('[PublicationFeed] onMount called'); |
||||
// The effect will handle fetching when relays become available |
||||
}); |
||||
</script> |
||||
|
||||
<div class="flex flex-col space-y-4"> |
||||
<div |
||||
class="grid grid-cols-1 sm:grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4 w-full" |
||||
> |
||||
{#if loading && eventsInView.length === 0} |
||||
{#each getSkeletonIds() as id} |
||||
<Skeleton divClass="skeleton-leather w-full" size="lg" /> |
||||
{/each} |
||||
{:else if eventsInView.length > 0} |
||||
{#each eventsInView as event} |
||||
<ArticleHeader {event} /> |
||||
{/each} |
||||
{:else} |
||||
<div class="col-span-full"> |
||||
<p class="text-center">No publications found.</p> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
|
||||
{#if !loadingMore && !endOfFeed} |
||||
<div class="flex justify-center mt-4 mb-8"> |
||||
<Button |
||||
outline |
||||
class="w-full max-w-md" |
||||
onclick={async () => { |
||||
await loadMorePublications(); |
||||
}} |
||||
> |
||||
Show more publications |
||||
</Button> |
||||
</div> |
||||
{:else if loadingMore} |
||||
<div class="flex justify-center mt-4 mb-8"> |
||||
<Button outline disabled class="w-full max-w-md"> |
||||
<Spinner class="mr-3 text-gray-600 dark:text-gray-300" size="4" /> |
||||
Loading... |
||||
</Button> |
||||
</div> |
||||
{:else} |
||||
<div class="flex justify-center mt-4 mb-8"> |
||||
<P class="text-sm text-gray-700 dark:text-gray-300" |
||||
>You've reached the end of the feed.</P |
||||
> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
@ -0,0 +1,90 @@
@@ -0,0 +1,90 @@
|
||||
<script lang="ts"> |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { activeInboxRelays } from "$lib/ndk"; |
||||
import { Card } from "flowbite-svelte"; |
||||
import CardActions from "$components/util/CardActions.svelte"; |
||||
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; |
||||
import LazyImage from "$components/util/LazyImage.svelte"; |
||||
import { generateDarkPastelColor } from "$lib/utils/image_utils"; |
||||
|
||||
const { event } = $props<{ event: NDKEvent }>(); |
||||
|
||||
function getRelayUrls(): string[] { |
||||
return $activeInboxRelays; |
||||
} |
||||
|
||||
const relays = $derived.by(() => { |
||||
return getRelayUrls(); |
||||
}); |
||||
|
||||
const href = $derived.by(() => { |
||||
const d = event.getMatchingTags("d")[0]?.[1]; |
||||
if (d != null) { |
||||
return `publication?d=${d}`; |
||||
} else { |
||||
return `publication?id=${naddrEncode(event, relays)}`; |
||||
} |
||||
}); |
||||
|
||||
let title: string = $derived(event.getMatchingTags("title")[0]?.[1]); |
||||
let author: string = $derived( |
||||
event.getMatchingTags(event, "author")[0]?.[1] ?? "unknown", |
||||
); |
||||
let version: string = $derived( |
||||
event.getMatchingTags("version")[0]?.[1] ?? "1", |
||||
); |
||||
let image: string = $derived(event.getMatchingTags("image")[0]?.[1] ?? null); |
||||
let authorPubkey: string = $derived( |
||||
event.getMatchingTags("p")[0]?.[1] ?? null, |
||||
); |
||||
</script> |
||||
|
||||
{#if title != null && href != null} |
||||
<Card class="ArticleBox card-leather max-w-md h-48 flex flex-row space-x-2 relative"> |
||||
<div |
||||
class="flex-shrink-0 w-32 h-40 overflow-hidden rounded flex items-center justify-center p-2 -mt-2" |
||||
> |
||||
{#if image} |
||||
<LazyImage |
||||
src={image} |
||||
alt={title || "Publication image"} |
||||
eventId={event.id} |
||||
className="w-full h-full object-cover" |
||||
/> |
||||
{:else} |
||||
<div |
||||
class="w-full h-full rounded" |
||||
style="background-color: {generateDarkPastelColor(event.id)};" |
||||
> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
|
||||
<div class="flex flex-col flex-grow space-x-2"> |
||||
<div class="flex flex-col flex-grow"> |
||||
<a href="/{href}" class="flex flex-col space-y-2 h-full"> |
||||
<div class="flex-grow pt-2"> |
||||
<h2 class="text-lg font-bold line-clamp-2" {title}>{title}</h2> |
||||
<h3 class="text-base font-normal mt-2"> |
||||
by |
||||
{#if authorPubkey != null} |
||||
{@render userBadge(authorPubkey, author)} |
||||
{:else} |
||||
{author} |
||||
{/if} |
||||
</h3> |
||||
</div> |
||||
{#if version != "1"} |
||||
<h3 class="text-sm font-semibold text-primary-600 dark:text-primary-400 mt-auto">version: {version}</h3> |
||||
{/if} |
||||
</a> |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Position CardActions at bottom-right --> |
||||
<div class="absolute bottom-2 right-2"> |
||||
<CardActions {event} /> |
||||
</div> |
||||
</Card> |
||||
{/if} |
||||
@ -0,0 +1,155 @@
@@ -0,0 +1,155 @@
|
||||
<script lang="ts"> |
||||
import type { PublicationTree } from "$lib/data_structures/publication_tree"; |
||||
import { |
||||
contentParagraph, |
||||
sectionHeading, |
||||
} from "$lib/snippets/PublicationSnippets.svelte"; |
||||
import { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { TextPlaceholder } from "flowbite-svelte"; |
||||
import { getContext } from "svelte"; |
||||
import type { Asciidoctor, Document } from "asciidoctor"; |
||||
import { getMatchingTags } from "$lib/utils/nostrUtils"; |
||||
import type { SveltePublicationTree } from "./svelte_publication_tree.svelte"; |
||||
import { postProcessAdvancedAsciidoctorHtml } from "$lib/utils/markup/advancedAsciidoctorPostProcessor"; |
||||
|
||||
let { |
||||
address, |
||||
rootAddress, |
||||
leaves, |
||||
ref, |
||||
}: { |
||||
address: string; |
||||
rootAddress: string; |
||||
leaves: Array<NDKEvent | null>; |
||||
ref: (ref: HTMLElement) => void; |
||||
} = $props(); |
||||
|
||||
const publicationTree: SveltePublicationTree = getContext("publicationTree"); |
||||
const asciidoctor: Asciidoctor = getContext("asciidoctor"); |
||||
|
||||
let leafEvent: Promise<NDKEvent | null> = $derived.by( |
||||
async () => await publicationTree.getEvent(address), |
||||
); |
||||
|
||||
let rootEvent: Promise<NDKEvent | null> = $derived.by( |
||||
async () => await publicationTree.getEvent(rootAddress), |
||||
); |
||||
|
||||
let publicationType: Promise<string | undefined> = $derived.by( |
||||
async () => (await rootEvent)?.getMatchingTags("type")[0]?.[1], |
||||
); |
||||
|
||||
let leafHierarchy: Promise<NDKEvent[]> = $derived.by( |
||||
async () => await publicationTree.getHierarchy(address), |
||||
); |
||||
|
||||
let leafTitle: Promise<string | undefined> = $derived.by( |
||||
async () => (await leafEvent)?.getMatchingTags("title")[0]?.[1], |
||||
); |
||||
|
||||
let leafContent: Promise<string | Document> = $derived.by(async () => { |
||||
const content = (await leafEvent)?.content ?? ""; |
||||
const converted = asciidoctor.convert(content); |
||||
const processed = await postProcessAdvancedAsciidoctorHtml(converted.toString()); |
||||
return processed; |
||||
}); |
||||
|
||||
let previousLeafEvent: NDKEvent | null = $derived.by(() => { |
||||
let index: number; |
||||
let event: NDKEvent | null = null; |
||||
let decrement = 1; |
||||
|
||||
do { |
||||
index = leaves.findIndex((leaf) => leaf?.tagAddress() === address); |
||||
if (index === 0) { |
||||
return null; |
||||
} |
||||
event = leaves[index - decrement++]; |
||||
} while (event == null && index - decrement >= 0); |
||||
|
||||
return event; |
||||
}); |
||||
|
||||
let previousLeafHierarchy: Promise<NDKEvent[] | null> = $derived.by( |
||||
async () => { |
||||
if (!previousLeafEvent) { |
||||
return null; |
||||
} |
||||
return await publicationTree.getHierarchy(previousLeafEvent.tagAddress()); |
||||
}, |
||||
); |
||||
|
||||
let divergingBranches = $derived.by(async () => { |
||||
let [leafHierarchyValue, previousLeafHierarchyValue] = await Promise.all([ |
||||
leafHierarchy, |
||||
previousLeafHierarchy, |
||||
]); |
||||
|
||||
const branches: [NDKEvent, number][] = []; |
||||
|
||||
if (!previousLeafHierarchyValue) { |
||||
for (let i = 0; i < leafHierarchyValue.length - 1; i++) { |
||||
branches.push([leafHierarchyValue[i], i]); |
||||
} |
||||
return branches; |
||||
} |
||||
|
||||
const minLength = Math.min( |
||||
leafHierarchyValue.length, |
||||
previousLeafHierarchyValue.length, |
||||
); |
||||
|
||||
// Find the first diverging node. |
||||
let divergingIndex = 0; |
||||
while ( |
||||
divergingIndex < minLength && |
||||
leafHierarchyValue[divergingIndex].tagAddress() === |
||||
previousLeafHierarchyValue[divergingIndex].tagAddress() |
||||
) { |
||||
divergingIndex++; |
||||
} |
||||
|
||||
// Add all branches from the first diverging node to the current leaf. |
||||
for (let i = divergingIndex; i < leafHierarchyValue.length - 1; i++) { |
||||
branches.push([leafHierarchyValue[i], i]); |
||||
} |
||||
|
||||
return branches; |
||||
}); |
||||
|
||||
let sectionRef: HTMLElement; |
||||
|
||||
$effect(() => { |
||||
if (!sectionRef) { |
||||
return; |
||||
} |
||||
|
||||
ref(sectionRef); |
||||
}); |
||||
</script> |
||||
|
||||
<section |
||||
id={address} |
||||
bind:this={sectionRef} |
||||
class="publication-leather content-visibility-auto" |
||||
> |
||||
{#await Promise.all( [leafTitle, leafContent, leafHierarchy, publicationType, divergingBranches], )} |
||||
<TextPlaceholder size="xxl" /> |
||||
{:then [leafTitle, leafContent, leafHierarchy, publicationType, divergingBranches]} |
||||
{#each divergingBranches as [branch, depth]} |
||||
{@render sectionHeading( |
||||
getMatchingTags(branch, "title")[0]?.[1] ?? "", |
||||
depth, |
||||
)} |
||||
{/each} |
||||
{#if leafTitle} |
||||
{@const leafDepth = leafHierarchy.length - 1} |
||||
{@render sectionHeading(leafTitle, leafDepth)} |
||||
{/if} |
||||
{@render contentParagraph( |
||||
leafContent.toString(), |
||||
publicationType ?? "article", |
||||
false, |
||||
)} |
||||
{/await} |
||||
</section> |
||||
@ -0,0 +1,182 @@
@@ -0,0 +1,182 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
TableOfContents, |
||||
type TocEntry, |
||||
} from "$lib/components/publications/table_of_contents.svelte"; |
||||
import { getContext } from "svelte"; |
||||
import { |
||||
SidebarDropdownWrapper, |
||||
SidebarGroup, |
||||
SidebarItem, |
||||
} from "flowbite-svelte"; |
||||
import Self from "./TableOfContents.svelte"; |
||||
import { onMount, onDestroy } from "svelte"; |
||||
|
||||
let { depth, onSectionFocused, onLoadMore } = $props<{ |
||||
rootAddress: string; |
||||
depth: number; |
||||
onSectionFocused?: (address: string) => void; |
||||
onLoadMore?: () => void; |
||||
}>(); |
||||
|
||||
let toc = getContext("toc") as TableOfContents; |
||||
|
||||
let entries = $derived.by<TocEntry[]>(() => { |
||||
const newEntries = []; |
||||
for (const [_, entry] of toc.addressMap) { |
||||
if (entry.depth !== depth) { |
||||
continue; |
||||
} |
||||
|
||||
newEntries.push(entry); |
||||
} |
||||
|
||||
return newEntries; |
||||
}); |
||||
|
||||
// Track the currently visible section |
||||
let currentVisibleSection = $state<string | null>(null); |
||||
let observer: IntersectionObserver; |
||||
|
||||
function setEntryExpanded(address: string, expanded: boolean = false) { |
||||
const entry = toc.getEntry(address); |
||||
if (!entry) { |
||||
return; |
||||
} |
||||
|
||||
toc.expandedMap.set(address, expanded); |
||||
entry.resolveChildren(); |
||||
} |
||||
|
||||
function handleSectionClick(address: string) { |
||||
// Smooth scroll to the section |
||||
const element = document.getElementById(address); |
||||
if (element) { |
||||
element.scrollIntoView({ |
||||
behavior: 'smooth', |
||||
block: 'start', |
||||
}); |
||||
} |
||||
|
||||
onSectionFocused?.(address); |
||||
|
||||
// Check if this is the last entry and trigger loading more events |
||||
const currentEntries = entries; |
||||
const lastEntry = currentEntries[currentEntries.length - 1]; |
||||
if (lastEntry && lastEntry.address === address) { |
||||
console.debug('[TableOfContents] Last entry clicked, triggering load more'); |
||||
onLoadMore?.(); |
||||
} |
||||
} |
||||
|
||||
// Check if an entry is currently visible |
||||
function isEntryVisible(address: string): boolean { |
||||
return currentVisibleSection === address; |
||||
} |
||||
|
||||
// Set up intersection observer to track visible sections |
||||
onMount(() => { |
||||
observer = new IntersectionObserver( |
||||
(entries) => { |
||||
// Find the section that is most visible in the viewport |
||||
let maxIntersectionRatio = 0; |
||||
let mostVisibleSection: string | null = null; |
||||
|
||||
entries.forEach((entry) => { |
||||
if (entry.isIntersecting && entry.intersectionRatio > maxIntersectionRatio) { |
||||
maxIntersectionRatio = entry.intersectionRatio; |
||||
mostVisibleSection = entry.target.id; |
||||
} |
||||
}); |
||||
|
||||
if (mostVisibleSection && mostVisibleSection !== currentVisibleSection) { |
||||
currentVisibleSection = mostVisibleSection; |
||||
} |
||||
}, |
||||
{ |
||||
threshold: [0, 0.25, 0.5, 0.75, 1], |
||||
rootMargin: "-20% 0px -20% 0px", // Consider section visible when it's in the middle 60% of the viewport |
||||
} |
||||
); |
||||
|
||||
// Function to observe all section elements |
||||
function observeSections() { |
||||
const sections = document.querySelectorAll('section[id]'); |
||||
sections.forEach((section) => { |
||||
observer.observe(section); |
||||
}); |
||||
} |
||||
|
||||
// Initial observation |
||||
observeSections(); |
||||
|
||||
// Set up a mutation observer to watch for new sections being added |
||||
const mutationObserver = new MutationObserver((mutations) => { |
||||
mutations.forEach((mutation) => { |
||||
mutation.addedNodes.forEach((node) => { |
||||
if (node.nodeType === Node.ELEMENT_NODE) { |
||||
const element = node as Element; |
||||
// Check if the added node is a section with an id |
||||
if (element.tagName === 'SECTION' && element.id) { |
||||
observer.observe(element); |
||||
} |
||||
// Check if the added node contains sections |
||||
const sections = element.querySelectorAll?.('section[id]'); |
||||
if (sections) { |
||||
sections.forEach((section) => { |
||||
observer.observe(section); |
||||
}); |
||||
} |
||||
} |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
// Start observing the document body for changes |
||||
mutationObserver.observe(document.body, { |
||||
childList: true, |
||||
subtree: true, |
||||
}); |
||||
|
||||
return () => { |
||||
observer.disconnect(); |
||||
mutationObserver.disconnect(); |
||||
}; |
||||
}); |
||||
|
||||
onDestroy(() => { |
||||
if (observer) { |
||||
observer.disconnect(); |
||||
} |
||||
}); |
||||
</script> |
||||
|
||||
<!-- TODO: Figure out how to style indentations. --> |
||||
<!-- TODO: Make group title fonts the same as entry title fonts. --> |
||||
<SidebarGroup> |
||||
{#each entries as entry, index} |
||||
{@const address = entry.address} |
||||
{@const expanded = toc.expandedMap.get(address) ?? false} |
||||
{@const isLeaf = toc.leaves.has(address)} |
||||
{@const isVisible = isEntryVisible(address)} |
||||
{@const isLastEntry = index === entries.length - 1} |
||||
{#if isLeaf} |
||||
<SidebarItem |
||||
label={entry.title} |
||||
href={`#${address}`} |
||||
spanClass="px-2 text-ellipsis" |
||||
class={`${isVisible ? "toc-highlight" : ""} ${isLastEntry ? "pb-4" : ""}`} |
||||
onclick={() => handleSectionClick(address)} |
||||
/> |
||||
{:else} |
||||
{@const childDepth = depth + 1} |
||||
<SidebarDropdownWrapper |
||||
label={entry.title} |
||||
btnClass="flex items-center p-2 w-full font-normal text-gray-900 rounded-lg transition duration-75 group hover:bg-primary-50 dark:text-white dark:hover:bg-primary-800 {isVisible ? 'toc-highlight' : ''} {isLastEntry ? 'pb-4' : ''}" |
||||
bind:isOpen={() => expanded, (open) => setEntryExpanded(address, open)} |
||||
> |
||||
<Self rootAddress={address} depth={childDepth} {onSectionFocused} {onLoadMore} /> |
||||
</SidebarDropdownWrapper> |
||||
{/if} |
||||
{/each} |
||||
</SidebarGroup> |
||||
@ -0,0 +1,111 @@
@@ -0,0 +1,111 @@
|
||||
import { SvelteSet } from "svelte/reactivity"; |
||||
import { PublicationTree } from "../../data_structures/publication_tree.ts"; |
||||
import NDK, { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
|
||||
export class SveltePublicationTree { |
||||
resolvedAddresses: SvelteSet<string> = new SvelteSet(); |
||||
|
||||
#publicationTree: PublicationTree; |
||||
#nodeResolvedObservers: Array<(address: string) => void> = []; |
||||
#bookmarkMovedObservers: Array<(address: string) => void> = []; |
||||
|
||||
constructor(rootEvent: NDKEvent, ndk: NDK) { |
||||
this.#publicationTree = new PublicationTree(rootEvent, ndk); |
||||
|
||||
this.#publicationTree.onNodeResolved(this.#handleNodeResolved); |
||||
this.#publicationTree.onBookmarkMoved(this.#handleBookmarkMoved); |
||||
} |
||||
|
||||
// #region Proxied Public Methods
|
||||
|
||||
getChildAddresses(address: string): Promise<Array<string | null>> { |
||||
return this.#publicationTree.getChildAddresses(address); |
||||
} |
||||
|
||||
getEvent(address: string): Promise<NDKEvent | null> { |
||||
return this.#publicationTree.getEvent(address); |
||||
} |
||||
|
||||
getHierarchy(address: string): Promise<NDKEvent[]> { |
||||
return this.#publicationTree.getHierarchy(address); |
||||
} |
||||
|
||||
async getParent(address: string): Promise<NDKEvent | null> { |
||||
const hierarchy = await this.getHierarchy(address); |
||||
|
||||
// The last element in the hierarchy is the event with the given address, so the parent is the
|
||||
// second to last element.
|
||||
return hierarchy.at(-2) ?? null; |
||||
} |
||||
|
||||
setBookmark(address: string) { |
||||
this.#publicationTree.setBookmark(address); |
||||
} |
||||
|
||||
/** |
||||
* Registers an observer function that is invoked whenever a new node is resolved. |
||||
* @param observer The observer function. |
||||
*/ |
||||
onNodeResolved(observer: (address: string) => void) { |
||||
this.#nodeResolvedObservers.push(observer); |
||||
} |
||||
|
||||
/** |
||||
* Registers an observer function that is invoked whenever the bookmark is moved. |
||||
* @param observer The observer function. |
||||
*/ |
||||
onBookmarkMoved(observer: (address: string) => void) { |
||||
this.#bookmarkMovedObservers.push(observer); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Proxied Async Iterator Methods
|
||||
|
||||
[Symbol.asyncIterator](): AsyncIterator<NDKEvent | null> { |
||||
return this; |
||||
} |
||||
|
||||
next(): Promise<IteratorResult<NDKEvent | null>> { |
||||
return this.#publicationTree.next(); |
||||
} |
||||
|
||||
previous(): Promise<IteratorResult<NDKEvent | null>> { |
||||
return this.#publicationTree.previous(); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Private Methods
|
||||
|
||||
/** |
||||
* Observer function that is invoked whenever a new node is resolved on the publication tree. |
||||
* |
||||
* @param address The address of the resolved node. |
||||
* |
||||
* This member is declared as an arrow function to ensure that the correct `this` context is |
||||
* used when the function is invoked in this class's constructor. |
||||
*/ |
||||
#handleNodeResolved = (address: string) => { |
||||
this.resolvedAddresses.add(address); |
||||
for (const observer of this.#nodeResolvedObservers) { |
||||
observer(address); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Observer function that is invoked whenever the bookmark is moved on the publication tree. |
||||
* |
||||
* @param address The address of the new bookmark. |
||||
* |
||||
* This member is declared as an arrow function to ensure that the correct `this` context is |
||||
* used when the function is invoked in this class's constructor. |
||||
*/ |
||||
#handleBookmarkMoved = (address: string) => { |
||||
for (const observer of this.#bookmarkMovedObservers) { |
||||
observer(address); |
||||
} |
||||
}; |
||||
|
||||
// #endregion
|
||||
} |
||||
@ -0,0 +1,297 @@
@@ -0,0 +1,297 @@
|
||||
import { SvelteMap, SvelteSet } from "svelte/reactivity"; |
||||
import { SveltePublicationTree } from "./svelte_publication_tree.svelte.ts"; |
||||
import type { NDKEvent } from "../../utils/nostrUtils.ts"; |
||||
import { indexKind } from "../../consts.ts"; |
||||
|
||||
export interface TocEntry { |
||||
address: string; |
||||
title: string; |
||||
href?: string; |
||||
children: TocEntry[]; |
||||
parent?: TocEntry; |
||||
depth: number; |
||||
childrenResolved: boolean; |
||||
resolveChildren: () => Promise<void>; |
||||
} |
||||
|
||||
/** |
||||
* Maintains a table of contents (ToC) for a `SveltePublicationTree`. Since publication trees are |
||||
* conceptually infinite and lazy-loading, the ToC represents only the portion of the tree that has |
||||
* been "discovered". The ToC is updated as new nodes are resolved within the publication tree. |
||||
* |
||||
* @see SveltePublicationTree |
||||
*/ |
||||
export class TableOfContents { |
||||
public addressMap: SvelteMap<string, TocEntry> = new SvelteMap(); |
||||
public expandedMap: SvelteMap<string, boolean> = new SvelteMap(); |
||||
public leaves: SvelteSet<string> = new SvelteSet(); |
||||
|
||||
#root: TocEntry | null = null; |
||||
#publicationTree: SveltePublicationTree; |
||||
#pagePathname: string; |
||||
|
||||
/** |
||||
* Constructs a `TableOfContents` from a `SveltePublicationTree`. |
||||
* |
||||
* @param rootAddress The address of the root event. |
||||
* @param publicationTree The SveltePublicationTree instance. |
||||
* @param pagePathname The current page pathname for href generation. |
||||
*/ |
||||
constructor( |
||||
rootAddress: string, |
||||
publicationTree: SveltePublicationTree, |
||||
pagePathname: string, |
||||
) { |
||||
this.#publicationTree = publicationTree; |
||||
this.#pagePathname = pagePathname; |
||||
this.#init(rootAddress); |
||||
} |
||||
|
||||
// #region Public Methods
|
||||
|
||||
/** |
||||
* Returns the root entry of the ToC. |
||||
* |
||||
* @returns The root entry of the ToC, or `null` if the ToC has not been initialized. |
||||
*/ |
||||
getRootEntry(): TocEntry | null { |
||||
return this.#root; |
||||
} |
||||
|
||||
getEntry(address: string): TocEntry | undefined { |
||||
return this.addressMap.get(address); |
||||
} |
||||
|
||||
/** |
||||
* Builds a table of contents from the DOM subtree rooted at `parentElement`. |
||||
* |
||||
* @param parentElement The root of the DOM subtree containing the content to be added to the |
||||
* ToC. |
||||
* @param parentAddress The address of the event corresponding to the DOM subtree root indicated |
||||
* by `parentElement`. |
||||
* |
||||
* This function is intended for use on segments of HTML markup that are not directly derived |
||||
* from a structure publication of the kind supported by `PublicationTree`. It may be used to |
||||
* produce a table of contents from the contents of a kind `30041` event with AsciiDoc markup, or |
||||
* from a kind `30023` event with Markdown content. |
||||
*/ |
||||
buildTocFromDocument(parentElement: HTMLElement, parentEntry: TocEntry) { |
||||
parentElement |
||||
.querySelectorAll<HTMLHeadingElement>(`h${parentEntry.depth}`) |
||||
.forEach((header) => { |
||||
// TODO: Correctly update ToC state from DOM.
|
||||
const title = header.textContent?.trim(); |
||||
const id = header.id; |
||||
|
||||
// Only create an entry if the header has an ID and a title.
|
||||
if (id && title) { |
||||
const href = `${this.#pagePathname}#${id}`; |
||||
|
||||
// TODO: Check this logic.
|
||||
const tocEntry: TocEntry = { |
||||
address: parentEntry.address, |
||||
title, |
||||
href, |
||||
depth: parentEntry.depth + 1, |
||||
children: [], |
||||
childrenResolved: true, |
||||
resolveChildren: () => Promise.resolve(), |
||||
}; |
||||
parentEntry.children.push(tocEntry); |
||||
this.expandedMap.set(tocEntry.address, false); |
||||
|
||||
this.buildTocFromDocument(header, tocEntry); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Iterator Methods
|
||||
|
||||
/** |
||||
* Iterates over all ToC entries in depth-first order. |
||||
*/ |
||||
*[Symbol.iterator](): IterableIterator<TocEntry> { |
||||
function* traverse(entry: TocEntry | null): IterableIterator<TocEntry> { |
||||
if (!entry) { |
||||
return; |
||||
} |
||||
|
||||
yield entry; |
||||
|
||||
if (entry.children) { |
||||
for (const child of entry.children) { |
||||
yield* traverse(child); |
||||
} |
||||
} |
||||
} |
||||
|
||||
yield* traverse(this.#root); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Private Methods
|
||||
|
||||
/** |
||||
* Initializes the ToC from the associated publication tree. |
||||
* |
||||
* @param rootAddress The address of the publication's root event. |
||||
* |
||||
* Michael J - 07 July 2025 - NOTE: Since the publication tree is conceptually infinite and |
||||
* lazy-loading, the ToC is not guaranteed to contain all the nodes at any layer until the |
||||
* publication has been fully resolved. |
||||
* |
||||
* Michael J - 07 July 2025 - TODO: If the relay provides event metadata, use the metadata to |
||||
* initialize the ToC with all of its first-level children. |
||||
*/ |
||||
async #init(rootAddress: string) { |
||||
const rootEvent = await this.#publicationTree.getEvent(rootAddress); |
||||
if (!rootEvent) { |
||||
throw new Error(`[ToC] Root event ${rootAddress} not found.`); |
||||
} |
||||
|
||||
this.#root = await this.#buildTocEntry(rootAddress); |
||||
|
||||
this.addressMap.set(rootAddress, this.#root); |
||||
|
||||
// Handle any other nodes that have already been resolved in parallel.
|
||||
await Promise.all( |
||||
Array.from(this.#publicationTree.resolvedAddresses).map((address) => |
||||
this.#buildTocEntryFromResolvedNode(address), |
||||
), |
||||
); |
||||
|
||||
// Set up an observer to handle progressive resolution of the publication tree.
|
||||
this.#publicationTree.onNodeResolved((address: string) => { |
||||
this.#buildTocEntryFromResolvedNode(address); |
||||
}); |
||||
} |
||||
|
||||
#getTitle(event: NDKEvent | null): string { |
||||
if (!event) { |
||||
// TODO: What do we want to return in this case?
|
||||
return "[untitled]"; |
||||
} |
||||
const titleTag = event.getMatchingTags?.("title")?.[0]?.[1]; |
||||
return titleTag || event.tagAddress() || "[untitled]"; |
||||
} |
||||
|
||||
async #buildTocEntry(address: string): Promise<TocEntry> { |
||||
// Michael J - 07 July 2025 - NOTE: This arrow function is nested so as to use its containing
|
||||
// scope in its operation. Do not move it to the top level without ensuring it still has access
|
||||
// to the necessary variables.
|
||||
const resolver = async () => { |
||||
if (entry.childrenResolved) { |
||||
return; |
||||
} |
||||
|
||||
const event = await this.#publicationTree.getEvent(entry.address); |
||||
if (event?.kind !== indexKind) { |
||||
// TODO: Build ToC entries from HTML markup in this case.
|
||||
return; |
||||
} |
||||
|
||||
const childAddresses = await this.#publicationTree.getChildAddresses( |
||||
entry.address, |
||||
); |
||||
for (const childAddress of childAddresses) { |
||||
if (!childAddress) { |
||||
continue; |
||||
} |
||||
|
||||
// Michael J - 16 June 2025 - This duplicates logic in the outer function, but is necessary
|
||||
// here so that we can determine whether to render an entry as a leaf before it is fully
|
||||
// resolved.
|
||||
if (childAddress.split(":")[0] !== indexKind.toString()) { |
||||
this.leaves.add(childAddress); |
||||
} |
||||
|
||||
// Michael J - 05 June 2025 - The `getChildAddresses` method forces node resolution on the
|
||||
// publication tree. This is acceptable here, because the tree is always resolved
|
||||
// top-down. Therefore, by the time we handle a node's resolution, its parent and
|
||||
// siblings have already been resolved.
|
||||
const childEntry = await this.#buildTocEntry(childAddress); |
||||
childEntry.parent = entry; |
||||
childEntry.depth = entry.depth + 1; |
||||
entry.children.push(childEntry); |
||||
this.addressMap.set(childAddress, childEntry); |
||||
} |
||||
|
||||
await this.#matchChildrenToTagOrder(entry); |
||||
|
||||
entry.childrenResolved = true; |
||||
}; |
||||
|
||||
const event = await this.#publicationTree.getEvent(address); |
||||
if (!event) { |
||||
throw new Error(`[ToC] Event ${address} not found.`); |
||||
} |
||||
|
||||
const depth = (await this.#publicationTree.getHierarchy(address)).length; |
||||
|
||||
const entry: TocEntry = { |
||||
address, |
||||
title: this.#getTitle(event), |
||||
href: `${this.#pagePathname}#${address}`, |
||||
children: [], |
||||
depth, |
||||
childrenResolved: false, |
||||
resolveChildren: resolver, |
||||
}; |
||||
this.expandedMap.set(address, false); |
||||
|
||||
// Michael J - 16 June 2025 - We determine whether to add a leaf both here and in the inner
|
||||
// resolver function. The resolver function is called when entries are resolved by expanding
|
||||
// a ToC entry, and we'll reach the block below when entries are resolved by the publication
|
||||
// tree.
|
||||
if (event.kind !== indexKind) { |
||||
this.leaves.add(address); |
||||
} |
||||
|
||||
return entry; |
||||
} |
||||
|
||||
/** |
||||
* Reorders the children of a ToC entry to match the order of 'a' tags in the corresponding |
||||
* Nostr index event. |
||||
* |
||||
* @param entry The ToC entry to reorder. |
||||
* |
||||
* This function has a time complexity of `O(n log n)`, where `n` is the number of children the |
||||
* parent event has. Average size of `n` is small enough to be negligible. |
||||
*/ |
||||
async #matchChildrenToTagOrder(entry: TocEntry) { |
||||
const parentEvent = await this.#publicationTree.getEvent(entry.address); |
||||
if (parentEvent?.kind === indexKind) { |
||||
const tagOrder = parentEvent.getMatchingTags("a").map((tag) => tag[1]); |
||||
const addressToOrdinal = new Map<string, number>(); |
||||
|
||||
// Build map of addresses to their ordinals from tag order
|
||||
tagOrder.forEach((address, index) => { |
||||
addressToOrdinal.set(address, index); |
||||
}); |
||||
|
||||
entry.children.sort((a, b) => { |
||||
const aOrdinal = |
||||
addressToOrdinal.get(a.address) ?? Number.MAX_SAFE_INTEGER; |
||||
const bOrdinal = |
||||
addressToOrdinal.get(b.address) ?? Number.MAX_SAFE_INTEGER; |
||||
return aOrdinal - bOrdinal; |
||||
}); |
||||
} |
||||
} |
||||
|
||||
#buildTocEntryFromResolvedNode(address: string) { |
||||
if (this.addressMap.has(address)) { |
||||
return; |
||||
} |
||||
|
||||
this.#buildTocEntry(address).then((entry) => { |
||||
this.addressMap.set(address, entry); |
||||
}); |
||||
} |
||||
|
||||
// #endregion
|
||||
} |
||||
@ -0,0 +1,115 @@
@@ -0,0 +1,115 @@
|
||||
<script lang="ts"> |
||||
import { Button } from "flowbite-svelte"; |
||||
import { goto } from "$app/navigation"; |
||||
import type { NDKEvent } from "$lib/utils/nostrUtils"; |
||||
import { findContainingIndexEvents } from "$lib/utils/event_search"; |
||||
import { getMatchingTags } from "$lib/utils/nostrUtils"; |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
|
||||
let { event } = $props<{ |
||||
event: NDKEvent; |
||||
}>(); |
||||
|
||||
let containingIndexes = $state<NDKEvent[]>([]); |
||||
let loading = $state(false); |
||||
let error = $state<string | null>(null); |
||||
let lastEventId = $state<string | null>(null); |
||||
|
||||
async function loadContainingIndexes() { |
||||
console.log( |
||||
"[ContainingIndexes] Loading containing indexes for event:", |
||||
event.id, |
||||
); |
||||
loading = true; |
||||
error = null; |
||||
|
||||
try { |
||||
containingIndexes = await findContainingIndexEvents(event); |
||||
console.log( |
||||
"[ContainingIndexes] Found containing indexes:", |
||||
containingIndexes.length, |
||||
); |
||||
} catch (err) { |
||||
error = |
||||
err instanceof Error |
||||
? err.message |
||||
: "Failed to load containing indexes"; |
||||
console.error( |
||||
"[ContainingIndexes] Error loading containing indexes:", |
||||
err, |
||||
); |
||||
} finally { |
||||
loading = false; |
||||
} |
||||
} |
||||
|
||||
function navigateToIndex(indexEvent: NDKEvent) { |
||||
const dTag = getMatchingTags(indexEvent, "d")[0]?.[1]; |
||||
if (dTag) { |
||||
goto(`/publication?d=${encodeURIComponent(dTag)}`); |
||||
} else { |
||||
// Fallback to naddr |
||||
try { |
||||
const naddr = naddrEncode(indexEvent, $activeInboxRelays); |
||||
goto(`/publication?id=${encodeURIComponent(naddr)}`); |
||||
} catch (err) { |
||||
console.error("[ContainingIndexes] Error creating naddr:", err); |
||||
} |
||||
} |
||||
} |
||||
|
||||
function getNaddrUrl(event: NDKEvent): string { |
||||
return naddrEncode(event, $activeInboxRelays); |
||||
} |
||||
|
||||
$effect(() => { |
||||
// Only reload if the event ID has actually changed |
||||
if (event.id !== lastEventId) { |
||||
lastEventId = event.id; |
||||
loadContainingIndexes(); |
||||
} |
||||
}); |
||||
</script> |
||||
|
||||
{#if containingIndexes.length > 0 || loading || error} |
||||
<div class="mb-4 p-3 bg-gray-50 dark:bg-gray-800 rounded-lg border"> |
||||
<h4 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-2"> |
||||
Containing Publications |
||||
</h4> |
||||
|
||||
{#if loading} |
||||
<div class="text-sm text-gray-500 dark:text-gray-400"> |
||||
Loading containing publications... |
||||
</div> |
||||
{:else if error} |
||||
<div class="text-sm text-red-600 dark:text-red-400"> |
||||
{error} |
||||
</div> |
||||
{:else if containingIndexes.length > 0} |
||||
<div class="max-h-32 overflow-y-auto"> |
||||
{#each containingIndexes.slice(0, 3) as indexEvent} |
||||
{@const title = |
||||
getMatchingTags(indexEvent, "title")[0]?.[1] || "Untitled"} |
||||
<Button |
||||
size="xs" |
||||
color="alternative" |
||||
class="mb-1 mr-1 text-xs" |
||||
onclick={() => navigateToIndex(indexEvent)} |
||||
> |
||||
{title} |
||||
</Button> |
||||
{/each} |
||||
{#if containingIndexes.length > 3} |
||||
<span class="text-xs text-gray-500 dark:text-gray-400"> |
||||
+{containingIndexes.length - 3} more |
||||
</span> |
||||
{/if} |
||||
</div> |
||||
{:else} |
||||
<div class="text-sm text-gray-500 dark:text-gray-400"> |
||||
No containing publications found |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{/if} |
||||
@ -0,0 +1,90 @@
@@ -0,0 +1,90 @@
|
||||
<script lang="ts"> |
||||
import { generateDarkPastelColor } from '$lib/utils/image_utils'; |
||||
import { fade } from 'svelte/transition'; |
||||
import { quintOut } from 'svelte/easing'; |
||||
|
||||
let { |
||||
src, |
||||
alt, |
||||
eventId, |
||||
className = 'w-full h-full object-cover', |
||||
placeholderClassName = '', |
||||
}: { |
||||
src: string; |
||||
alt: string; |
||||
eventId: string; |
||||
className?: string; |
||||
placeholderClassName?: string; |
||||
} = $props(); |
||||
|
||||
let imageLoaded = $state(false); |
||||
let imageError = $state(false); |
||||
let imgElement = $state<HTMLImageElement | null>(null); |
||||
|
||||
const placeholderColor = $derived.by(() => generateDarkPastelColor(eventId)); |
||||
|
||||
function loadImage() { |
||||
if (!imgElement) return; |
||||
|
||||
imgElement.onload = () => { |
||||
// Small delay to ensure smooth transition |
||||
setTimeout(() => { |
||||
imageLoaded = true; |
||||
}, 100); |
||||
}; |
||||
|
||||
imgElement.onerror = () => { |
||||
imageError = true; |
||||
}; |
||||
|
||||
// Set src after setting up event handlers |
||||
imgElement.src = src; |
||||
} |
||||
|
||||
function bindImg(element: HTMLImageElement) { |
||||
imgElement = element; |
||||
// Load image immediately when element is bound |
||||
loadImage(); |
||||
} |
||||
</script> |
||||
|
||||
<div class="relative w-full h-full"> |
||||
<!-- Placeholder --> |
||||
<div |
||||
class="absolute inset-0 {placeholderClassName}" |
||||
style="background-color: {placeholderColor};" |
||||
class:hidden={imageLoaded} |
||||
> |
||||
</div> |
||||
|
||||
<!-- Image --> |
||||
<img |
||||
bind:this={imgElement} |
||||
{src} |
||||
{alt} |
||||
class="{className} {imageLoaded ? 'opacity-100' : 'opacity-0'}" |
||||
style="transition: opacity 0.2s ease-out;" |
||||
loading="lazy" |
||||
decoding="async" |
||||
class:hidden={imageError} |
||||
onload={() => { |
||||
setTimeout(() => { |
||||
imageLoaded = true; |
||||
}, 100); |
||||
}} |
||||
onerror={() => { |
||||
imageError = true; |
||||
}} |
||||
/> |
||||
|
||||
<!-- Error state --> |
||||
{#if imageError} |
||||
<div |
||||
class="absolute inset-0 flex items-center justify-center bg-gray-200 dark:bg-gray-700 {placeholderClassName}" |
||||
> |
||||
<div class="text-gray-500 dark:text-gray-400 text-xs"> |
||||
Failed to load |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
@ -1,143 +0,0 @@
@@ -1,143 +0,0 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
Heading, |
||||
Sidebar, |
||||
SidebarGroup, |
||||
SidebarItem, |
||||
SidebarWrapper, |
||||
} from "flowbite-svelte"; |
||||
import { onMount } from "svelte"; |
||||
import { pharosInstance, tocUpdate } from "$lib/parser"; |
||||
import { publicationColumnVisibility } from "$lib/stores"; |
||||
|
||||
let { rootId } = $props<{ rootId: string }>(); |
||||
|
||||
if (rootId !== $pharosInstance.getRootIndexId()) { |
||||
console.error("Root ID does not match parser root index ID"); |
||||
} |
||||
|
||||
const tocBreakpoint = 1140; |
||||
|
||||
let activeHash = $state(window.location.hash); |
||||
|
||||
interface TocItem { |
||||
label: string; |
||||
hash: string; |
||||
} |
||||
|
||||
// Get TOC items from parser |
||||
let tocItems = $state<TocItem[]>([]); |
||||
|
||||
$effect(() => { |
||||
// This will re-run whenever tocUpdate changes |
||||
tocUpdate; |
||||
const items: TocItem[] = []; |
||||
const childIds = $pharosInstance.getChildIndexIds(rootId); |
||||
console.log('TOC rootId:', rootId, 'childIds:', childIds); |
||||
const processNode = (nodeId: string) => { |
||||
const title = $pharosInstance.getIndexTitle(nodeId); |
||||
if (title) { |
||||
items.push({ |
||||
label: title, |
||||
hash: `#${nodeId}` |
||||
}); |
||||
} |
||||
const children = $pharosInstance.getChildIndexIds(nodeId); |
||||
children.forEach(processNode); |
||||
}; |
||||
childIds.forEach(processNode); |
||||
tocItems = items; |
||||
}); |
||||
|
||||
function normalizeHashPath(str: string): string { |
||||
return str |
||||
.toLowerCase() |
||||
.replace(/\s+/g, "-") |
||||
.replace(/[^\w-]/g, ""); |
||||
} |
||||
|
||||
function scrollToElementWithOffset() { |
||||
const hash = window.location.hash; |
||||
if (hash) { |
||||
const targetElement = document.querySelector(hash); |
||||
if (targetElement) { |
||||
const headerOffset = 80; |
||||
const elementPosition = targetElement.getBoundingClientRect().top; |
||||
const offsetPosition = elementPosition + window.scrollY - headerOffset; |
||||
|
||||
window.scrollTo({ |
||||
top: offsetPosition, |
||||
behavior: "auto", |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
function updateActiveHash() { |
||||
activeHash = window.location.hash; |
||||
} |
||||
|
||||
/** |
||||
* Hides the table of contents sidebar when the window shrinks below a certain size. This |
||||
* prevents the sidebar from occluding the article content. |
||||
*/ |
||||
function setTocVisibilityOnResize() { |
||||
// Always show TOC on laptop and larger screens, collapsible only on small/medium |
||||
publicationColumnVisibility.update(v => ({ ...v, toc: window.innerWidth >= tocBreakpoint })); |
||||
} |
||||
|
||||
/** |
||||
* Hides the table of contents sidebar when the user clicks outside of it. |
||||
*/ |
||||
function hideTocOnClick(ev: MouseEvent) { |
||||
const target = ev.target as HTMLElement; |
||||
|
||||
if (target.closest(".sidebar-leather") || target.closest(".btn-leather")) { |
||||
return; |
||||
} |
||||
|
||||
// Only allow hiding TOC on screens smaller than tocBreakpoint |
||||
if (window.innerWidth < tocBreakpoint && $publicationColumnVisibility.toc) { |
||||
publicationColumnVisibility.update(v => ({ ...v, toc: false})); |
||||
} |
||||
} |
||||
|
||||
onMount(() => { |
||||
// Always check whether the TOC sidebar should be visible. |
||||
setTocVisibilityOnResize(); |
||||
|
||||
window.addEventListener("hashchange", updateActiveHash); |
||||
window.addEventListener("hashchange", scrollToElementWithOffset); |
||||
// Also handle the case where the user lands on the page with a hash in the URL |
||||
scrollToElementWithOffset(); |
||||
|
||||
window.addEventListener("resize", setTocVisibilityOnResize); |
||||
window.addEventListener("click", hideTocOnClick); |
||||
|
||||
return () => { |
||||
window.removeEventListener("hashchange", updateActiveHash); |
||||
window.removeEventListener("hashchange", scrollToElementWithOffset); |
||||
window.removeEventListener("resize", setTocVisibilityOnResize); |
||||
window.removeEventListener("click", hideTocOnClick); |
||||
}; |
||||
}); |
||||
</script> |
||||
|
||||
<!-- TODO: Get TOC from parser. --> |
||||
{#if $publicationColumnVisibility.toc} |
||||
<Sidebar class='sidebar-leather left-0'> |
||||
<SidebarWrapper> |
||||
<SidebarGroup class='sidebar-group-leather'> |
||||
<Heading tag="h1" class="h-leather !text-lg">Table of contents</Heading> |
||||
<p>(This ToC is only for demo purposes, and is not fully-functional.)</p> |
||||
{#each tocItems as item} |
||||
<SidebarItem |
||||
class="sidebar-item-leather {activeHash === item.hash ? 'bg-primary-200 font-bold' : ''}" |
||||
label={item.label} |
||||
href={item.hash} |
||||
/> |
||||
{/each} |
||||
</SidebarGroup> |
||||
</SidebarWrapper> |
||||
</Sidebar> |
||||
{/if} |
||||
@ -0,0 +1,84 @@
@@ -0,0 +1,84 @@
|
||||
<script lang="ts"> |
||||
import type { NDKEvent } from "$lib/utils/nostrUtils"; |
||||
import { getMatchingTags } from "$lib/utils/nostrUtils"; |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import { getEventType } from "$lib/utils/mime"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
import { communityRelays } from "$lib/consts"; |
||||
import { goto } from "$app/navigation"; |
||||
|
||||
let { event, className = "" } = $props<{ |
||||
event: NDKEvent; |
||||
className?: string; |
||||
}>(); |
||||
|
||||
function getDeferralNaddr(event: NDKEvent): string | undefined { |
||||
// Look for a 'deferral' tag, e.g. ['deferral', 'naddr1...'] |
||||
return getMatchingTags(event, "deferral")[0]?.[1]; |
||||
} |
||||
|
||||
function isAddressableEvent(event: NDKEvent): boolean { |
||||
return getEventType(event.kind || 0) === "addressable"; |
||||
} |
||||
|
||||
function getNaddrAddress(event: NDKEvent): string | null { |
||||
if (!isAddressableEvent(event)) { |
||||
return null; |
||||
} |
||||
try { |
||||
return naddrEncode(event, $activeInboxRelays); |
||||
} catch { |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
function getViewPublicationNaddr(event: NDKEvent): string | null { |
||||
// First, check for a-tags with 'defer' - these indicate the event is deferring to someone else's version |
||||
const aTags = getMatchingTags(event, "a"); |
||||
for (const tag of aTags) { |
||||
if (tag.length >= 2 && tag.includes("defer")) { |
||||
// This is a deferral to someone else's addressable event |
||||
return tag[1]; // Return the addressable event address |
||||
} |
||||
} |
||||
|
||||
// For deferred events with deferral tag, use the deferral naddr instead of the event's own naddr |
||||
const deferralNaddr = getDeferralNaddr(event); |
||||
if (deferralNaddr) { |
||||
return deferralNaddr; |
||||
} |
||||
|
||||
// Otherwise, use the event's own naddr if it's addressable |
||||
return getNaddrAddress(event); |
||||
} |
||||
|
||||
function navigateToPublication() { |
||||
const naddrAddress = getViewPublicationNaddr(event); |
||||
console.log("ViewPublicationLink: navigateToPublication called", { |
||||
eventKind: event.kind, |
||||
naddrAddress, |
||||
isAddressable: isAddressableEvent(event), |
||||
}); |
||||
if (naddrAddress) { |
||||
console.log( |
||||
"ViewPublicationLink: Navigating to publication:", |
||||
naddrAddress, |
||||
); |
||||
goto(`/publication?id=${encodeURIComponent(naddrAddress)}`); |
||||
} else { |
||||
console.log("ViewPublicationLink: No naddr address found for event"); |
||||
} |
||||
} |
||||
|
||||
let naddrAddress = $derived(getViewPublicationNaddr(event)); |
||||
</script> |
||||
|
||||
{#if naddrAddress} |
||||
<button |
||||
class="inline-flex items-center px-3 py-2 text-sm font-medium text-white bg-primary-600 hover:bg-primary-700 focus:bg-primary-700 focus:outline-none focus:ring-2 focus:ring-primary-500 rounded-lg transition-colors {className}" |
||||
onclick={navigateToPublication} |
||||
tabindex="0" |
||||
> |
||||
View Publication |
||||
</button> |
||||
{/if} |
||||
@ -1,23 +1,52 @@
@@ -1,23 +1,52 @@
|
||||
// AI SHOULD NEVER CHANGE THIS FILE
|
||||
|
||||
export const wikiKind = 30818; |
||||
export const indexKind = 30040; |
||||
export const zettelKinds = [ 30041, 30818 ]; |
||||
export const communityRelay = [ 'wss://theforest.nostr1.com' ]; |
||||
export const standardRelays = [ 'wss://thecitadel.nostr1.com', 'wss://theforest.nostr1.com' ]; |
||||
export const fallbackRelays = [
|
||||
'wss://purplepag.es', |
||||
'wss://indexer.coracle.social', |
||||
'wss://relay.noswhere.com', |
||||
'wss://relay.damus.io', |
||||
'wss://relay.nostr.band', |
||||
'wss://relay.lumina.rocks', |
||||
'wss://nostr.wine', |
||||
'wss://nostr.land' |
||||
export const zettelKinds = [30041, 30818]; |
||||
|
||||
export const communityRelays = [ |
||||
"wss://theforest.nostr1.com", |
||||
//"wss://theforest.gitcitadel.eu"
|
||||
]; |
||||
|
||||
export const searchRelays = [ |
||||
"wss://profiles.nostr1.com", |
||||
"wss://aggr.nostr.land", |
||||
"wss://relay.noswhere.com", |
||||
"wss://nostr.wine", |
||||
]; |
||||
|
||||
export const secondaryRelays = [ |
||||
"wss://theforest.nostr1.com", |
||||
//"wss://theforest.gitcitadel.eu"
|
||||
"wss://thecitadel.nostr1.com", |
||||
//"wss://thecitadel.gitcitadel.eu",
|
||||
"wss://nostr.land", |
||||
"wss://nostr.wine", |
||||
"wss://nostr.sovbit.host", |
||||
"wss://nostr21.com", |
||||
]; |
||||
|
||||
export const anonymousRelays = [ |
||||
"wss://freelay.sovbit.host", |
||||
"wss://thecitadel.nostr1.com" |
||||
]; |
||||
|
||||
export const lowbandwidthRelays = [ |
||||
"wss://theforest.nostr1.com", |
||||
"wss://thecitadel.nostr1.com", |
||||
"wss://aggr.nostr.land" |
||||
]; |
||||
|
||||
export const localRelays: string[] = [ |
||||
"wss://localhost:8080", |
||||
"wss://localhost:4869" |
||||
]; |
||||
|
||||
export enum FeedType { |
||||
StandardRelays = 'standard', |
||||
UserRelays = 'user', |
||||
CommunityRelays = "standard", |
||||
UserRelays = "user", |
||||
} |
||||
|
||||
export const loginStorageKey = 'alexandria/login/pubkey'; |
||||
export const feedTypeStorageKey = 'alexandria/feed/type'; |
||||
export const loginStorageKey = "alexandria/login/pubkey"; |
||||
export const feedTypeStorageKey = "alexandria/feed/type"; |
||||
|
||||
@ -0,0 +1,111 @@
@@ -0,0 +1,111 @@
|
||||
import { get } from "svelte/store"; |
||||
import { ndkInstance } from "../ndk.ts"; |
||||
import { getMimeTags } from "../utils/mime.ts"; |
||||
import { parseAsciiDocSections } from "../utils/ZettelParser.ts"; |
||||
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
|
||||
export interface PublishResult { |
||||
success: boolean; |
||||
eventId?: string; |
||||
error?: string; |
||||
} |
||||
|
||||
export interface PublishOptions { |
||||
content: string; |
||||
kind?: number; |
||||
onSuccess?: (eventId: string) => void; |
||||
onError?: (error: string) => void; |
||||
} |
||||
|
||||
/** |
||||
* Publishes AsciiDoc content as Nostr events |
||||
* @param options - Publishing options |
||||
* @returns Promise resolving to publish result |
||||
*/ |
||||
export async function publishZettel( |
||||
options: PublishOptions, |
||||
): Promise<PublishResult> { |
||||
const { content, kind = 30041, onSuccess, onError } = options; |
||||
|
||||
if (!content.trim()) { |
||||
const error = "Please enter some content"; |
||||
onError?.(error); |
||||
return { success: false, error }; |
||||
} |
||||
|
||||
// Get the current NDK instance from the store
|
||||
const ndk = get(ndkInstance); |
||||
|
||||
if (!ndk?.activeUser) { |
||||
const error = "Please log in first"; |
||||
onError?.(error); |
||||
return { success: false, error }; |
||||
} |
||||
|
||||
try { |
||||
// Parse content into sections
|
||||
const sections = parseAsciiDocSections(content, 2); |
||||
|
||||
if (sections.length === 0) { |
||||
throw new Error("No valid sections found in content"); |
||||
} |
||||
|
||||
// For now, publish only the first section
|
||||
const firstSection = sections[0]; |
||||
const title = firstSection.title; |
||||
const cleanContent = firstSection.content; |
||||
const sectionTags = firstSection.tags || []; |
||||
|
||||
// Generate d-tag and create event
|
||||
const dTag = generateDTag(title); |
||||
const [mTag, MTag] = getMimeTags(kind); |
||||
|
||||
const tags: string[][] = [["d", dTag], mTag, MTag, ["title", title]]; |
||||
if (sectionTags) { |
||||
tags.push(...sectionTags); |
||||
} |
||||
|
||||
// Create and sign NDK event
|
||||
const ndkEvent = new NDKEvent(ndk); |
||||
ndkEvent.kind = kind; |
||||
ndkEvent.created_at = Math.floor(Date.now() / 1000); |
||||
ndkEvent.tags = tags; |
||||
ndkEvent.content = cleanContent; |
||||
ndkEvent.pubkey = ndk.activeUser.pubkey; |
||||
|
||||
await ndkEvent.sign(); |
||||
|
||||
// Publish to relays
|
||||
const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map( |
||||
(r) => r.url, |
||||
); |
||||
|
||||
if (allRelayUrls.length === 0) { |
||||
throw new Error("No relays available in NDK pool"); |
||||
} |
||||
|
||||
const relaySet = NDKRelaySet.fromRelayUrls(allRelayUrls, ndk); |
||||
const publishedToRelays = await ndkEvent.publish(relaySet); |
||||
|
||||
if (publishedToRelays.size > 0) { |
||||
const result = { success: true, eventId: ndkEvent.id }; |
||||
onSuccess?.(ndkEvent.id); |
||||
return result; |
||||
} else { |
||||
// Try fallback publishing logic here...
|
||||
throw new Error("Failed to publish to any relays"); |
||||
} |
||||
} catch (error) { |
||||
const errorMessage = |
||||
error instanceof Error ? error.message : "Unknown error"; |
||||
onError?.(errorMessage); |
||||
return { success: false, error: errorMessage }; |
||||
} |
||||
} |
||||
|
||||
function generateDTag(title: string): string { |
||||
return title |
||||
.toLowerCase() |
||||
.replace(/[^\w\s-]/g, "") |
||||
.replace(/\s+/g, "-"); |
||||
} |
||||
@ -1,19 +1,81 @@
@@ -1,19 +1,81 @@
|
||||
<script module lang='ts'> |
||||
import { createProfileLink, createProfileLinkWithVerification, toNpub } from '$lib/utils/nostrUtils'; |
||||
<script module lang="ts"> |
||||
import { goto } from "$app/navigation"; |
||||
import { |
||||
createProfileLinkWithVerification, |
||||
toNpub, |
||||
getUserMetadata, |
||||
} from "$lib/utils/nostrUtils"; |
||||
|
||||
// Extend NostrProfile locally to allow display_name for legacy support |
||||
type NostrProfileWithLegacy = { |
||||
displayName?: string; |
||||
display_name?: string; |
||||
name?: string; |
||||
[key: string]: any; |
||||
}; |
||||
|
||||
export { userBadge }; |
||||
</script> |
||||
|
||||
{#snippet userBadge(identifier: string, displayText: string | undefined)} |
||||
{#if toNpub(identifier)} |
||||
{#await createProfileLinkWithVerification(toNpub(identifier) as string, displayText)} |
||||
{@html createProfileLink(toNpub(identifier) as string, displayText)} |
||||
{@const npub = toNpub(identifier)} |
||||
{#if npub} |
||||
{#if !displayText || displayText.trim().toLowerCase() === "unknown"} |
||||
{#await getUserMetadata(npub) then profile} |
||||
{@const p = profile as NostrProfileWithLegacy} |
||||
<span class="inline-flex items-center gap-0.5"> |
||||
<button |
||||
class="npub-badge bg-transparent border-none p-0 underline cursor-pointer" |
||||
onclick={() => goto(`/events?id=${npub}`)} |
||||
> |
||||
@{p.displayName || |
||||
p.display_name || |
||||
p.name || |
||||
npub.slice(0, 8) + "..." + npub.slice(-4)} |
||||
</button> |
||||
</span> |
||||
{:catch} |
||||
<span class="inline-flex items-center gap-0.5"> |
||||
<button |
||||
class="npub-badge bg-transparent border-none p-0 underline cursor-pointer" |
||||
onclick={() => goto(`/events?id=${npub}`)} |
||||
> |
||||
@{npub.slice(0, 8) + "..." + npub.slice(-4)} |
||||
</button> |
||||
</span> |
||||
{/await} |
||||
{:else} |
||||
{#await createProfileLinkWithVerification(npub as string, displayText)} |
||||
<span class="inline-flex items-center gap-0.5"> |
||||
<button |
||||
class="npub-badge bg-transparent border-none p-0 underline cursor-pointer" |
||||
onclick={() => goto(`/events?id=${npub}`)} |
||||
> |
||||
@{displayText} |
||||
</button> |
||||
</span> |
||||
{:then html} |
||||
{@html html} |
||||
<span class="inline-flex items-center gap-0.5"> |
||||
<button |
||||
class="npub-badge bg-transparent border-none p-0 underline cursor-pointer" |
||||
onclick={() => goto(`/events?id=${npub}`)} |
||||
> |
||||
@{displayText} |
||||
</button> |
||||
{@html html.replace(/([\s\S]*<\/a>)/, "").trim()} |
||||
</span> |
||||
{:catch} |
||||
{@html createProfileLink(toNpub(identifier) as string, displayText)} |
||||
<span class="inline-flex items-center gap-0.5"> |
||||
<button |
||||
class="npub-badge bg-transparent border-none p-0 underline cursor-pointer" |
||||
onclick={() => goto(`/events?id=${npub}`)} |
||||
> |
||||
@{displayText} |
||||
</button> |
||||
</span> |
||||
{/await} |
||||
{/if} |
||||
{:else} |
||||
{displayText ?? ''} |
||||
{displayText ?? ""} |
||||
{/if} |
||||
{/snippet} |
||||
|
||||
@ -0,0 +1,11 @@
@@ -0,0 +1,11 @@
|
||||
import { writable, derived } from "svelte/store"; |
||||
|
||||
/** |
||||
* Stores the user's public key if logged in, or null otherwise. |
||||
*/ |
||||
export const userPubkey = writable<string | null>(null); |
||||
|
||||
/** |
||||
* Derived store indicating if the user is logged in. |
||||
*/ |
||||
export const isLoggedIn = derived(userPubkey, ($userPubkey) => !!$userPubkey); |
||||
@ -0,0 +1,55 @@
@@ -0,0 +1,55 @@
|
||||
import { writable } from "svelte/store"; |
||||
import { detectNetworkCondition, NetworkCondition, startNetworkMonitoring } from '../utils/network_detection.ts'; |
||||
|
||||
// Network status store
|
||||
export const networkCondition = writable<NetworkCondition>(NetworkCondition.ONLINE); |
||||
export const isNetworkChecking = writable<boolean>(false); |
||||
|
||||
// Network monitoring state
|
||||
let stopNetworkMonitoring: (() => void) | null = null; |
||||
|
||||
/** |
||||
* Starts network monitoring if not already running |
||||
*/ |
||||
export function startNetworkStatusMonitoring(): void { |
||||
if (stopNetworkMonitoring) { |
||||
return; // Already monitoring
|
||||
} |
||||
|
||||
console.debug('[networkStore.ts] Starting network status monitoring'); |
||||
|
||||
stopNetworkMonitoring = startNetworkMonitoring( |
||||
(condition: NetworkCondition) => { |
||||
console.debug(`[networkStore.ts] Network condition changed to: ${condition}`); |
||||
networkCondition.set(condition); |
||||
}, |
||||
60000 // Check every 60 seconds to reduce spam
|
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Stops network monitoring |
||||
*/ |
||||
export function stopNetworkStatusMonitoring(): void { |
||||
if (stopNetworkMonitoring) { |
||||
console.debug('[networkStore.ts] Stopping network status monitoring'); |
||||
stopNetworkMonitoring(); |
||||
stopNetworkMonitoring = null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Manually check network status (for immediate updates) |
||||
*/ |
||||
export async function checkNetworkStatus(): Promise<void> { |
||||
try { |
||||
isNetworkChecking.set(true); |
||||
const condition = await detectNetworkCondition(); |
||||
networkCondition.set(condition); |
||||
} catch (error) { |
||||
console.warn('[networkStore.ts] Failed to check network status:', error); |
||||
networkCondition.set(NetworkCondition.OFFLINE); |
||||
} finally { |
||||
isNetworkChecking.set(false); |
||||
} |
||||
}
|
||||
@ -1,4 +0,0 @@
@@ -1,4 +0,0 @@
|
||||
import { writable } from 'svelte/store'; |
||||
|
||||
// Initialize with empty array, will be populated from user preferences
|
||||
export const userRelays = writable<string[]>([]);
|
||||
@ -0,0 +1,427 @@
@@ -0,0 +1,427 @@
|
||||
import { writable, get } from "svelte/store"; |
||||
import type { NostrProfile } from "../utils/nostrUtils.ts"; |
||||
import type { NDKUser, NDKSigner } from "@nostr-dev-kit/ndk"; |
||||
import NDK, { |
||||
NDKNip07Signer, |
||||
NDKRelayAuthPolicies, |
||||
NDKRelaySet, |
||||
NDKRelay, |
||||
} from "@nostr-dev-kit/ndk"; |
||||
import { getUserMetadata } from "../utils/nostrUtils.ts"; |
||||
import { ndkInstance, activeInboxRelays, activeOutboxRelays, updateActiveRelayStores } from "../ndk.ts"; |
||||
import { loginStorageKey } from "../consts.ts"; |
||||
import { nip19 } from "nostr-tools"; |
||||
import { userPubkey } from "../stores/authStore.Svelte.ts"; |
||||
|
||||
export interface UserState { |
||||
pubkey: string | null; |
||||
npub: string | null; |
||||
profile: NostrProfile | null; |
||||
relays: { inbox: string[]; outbox: string[] }; |
||||
loginMethod: "extension" | "amber" | "npub" | null; |
||||
ndkUser: NDKUser | null; |
||||
signer: NDKSigner | null; |
||||
signedIn: boolean; |
||||
} |
||||
|
||||
export const userStore = writable<UserState>({ |
||||
pubkey: null, |
||||
npub: null, |
||||
profile: null, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: null, |
||||
ndkUser: null, |
||||
signer: null, |
||||
signedIn: false, |
||||
}); |
||||
|
||||
// Helper functions for relay management
|
||||
function getRelayStorageKey(user: NDKUser, type: "inbox" | "outbox"): string { |
||||
return `${loginStorageKey}/${user.pubkey}/${type}`; |
||||
} |
||||
|
||||
function persistRelays( |
||||
user: NDKUser, |
||||
inboxes: Set<NDKRelay>, |
||||
outboxes: Set<NDKRelay>, |
||||
): void { |
||||
localStorage.setItem( |
||||
getRelayStorageKey(user, "inbox"), |
||||
JSON.stringify(Array.from(inboxes).map((relay) => relay.url)), |
||||
); |
||||
localStorage.setItem( |
||||
getRelayStorageKey(user, "outbox"), |
||||
JSON.stringify(Array.from(outboxes).map((relay) => relay.url)), |
||||
); |
||||
} |
||||
|
||||
function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] { |
||||
const inboxes = new Set<string>( |
||||
JSON.parse(localStorage.getItem(getRelayStorageKey(user, "inbox")) ?? "[]"), |
||||
); |
||||
const outboxes = new Set<string>( |
||||
JSON.parse( |
||||
localStorage.getItem(getRelayStorageKey(user, "outbox")) ?? "[]", |
||||
), |
||||
); |
||||
|
||||
return [inboxes, outboxes]; |
||||
} |
||||
|
||||
async function getUserPreferredRelays( |
||||
ndk: NDK, |
||||
user: NDKUser, |
||||
fallbacks: readonly string[] = [...get(activeInboxRelays), ...get(activeOutboxRelays)], |
||||
): Promise<[Set<NDKRelay>, Set<NDKRelay>]> { |
||||
const relayList = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10002], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
NDKRelaySet.fromRelayUrls(fallbacks, ndk), |
||||
); |
||||
|
||||
const inboxRelays = new Set<NDKRelay>(); |
||||
const outboxRelays = new Set<NDKRelay>(); |
||||
|
||||
if (relayList == null) { |
||||
const relayMap = await globalThis.nostr?.getRelays?.(); |
||||
Object.entries(relayMap ?? {}).forEach( |
||||
([url, relayType]: [string, Record<string, boolean | undefined>]) => { |
||||
const relay = new NDKRelay( |
||||
url, |
||||
NDKRelayAuthPolicies.signIn({ ndk }), |
||||
ndk, |
||||
); |
||||
if (relayType.read) inboxRelays.add(relay); |
||||
if (relayType.write) outboxRelays.add(relay); |
||||
}, |
||||
); |
||||
} else { |
||||
relayList.tags.forEach((tag: string[]) => { |
||||
switch (tag[0]) { |
||||
case "r": |
||||
inboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
case "w": |
||||
outboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
default: |
||||
inboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
outboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
} |
||||
}); |
||||
} |
||||
|
||||
return [inboxRelays, outboxRelays]; |
||||
} |
||||
|
||||
// --- Unified login/logout helpers ---
|
||||
|
||||
export const loginMethodStorageKey = "alexandria/login/method"; |
||||
|
||||
function persistLogin(user: NDKUser, method: "extension" | "amber" | "npub") { |
||||
localStorage.setItem(loginStorageKey, user.pubkey); |
||||
localStorage.setItem(loginMethodStorageKey, method); |
||||
} |
||||
|
||||
function clearLogin() { |
||||
localStorage.removeItem(loginStorageKey); |
||||
localStorage.removeItem(loginMethodStorageKey); |
||||
} |
||||
|
||||
/** |
||||
* Login with NIP-07 browser extension |
||||
*/ |
||||
export async function loginWithExtension() { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
const signer = new NDKNip07Signer(); |
||||
const user = await signer.user(); |
||||
const npub = user.npub; |
||||
|
||||
console.log("Login with extension - fetching profile for npub:", npub); |
||||
|
||||
// Try to fetch user metadata, but don't fail if it times out
|
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
console.log("Login with extension - attempting to fetch profile..."); |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with extension - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with extension - using fallback profile:", profile); |
||||
} |
||||
|
||||
// Fetch user's preferred relays
|
||||
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); |
||||
for (const relay of persistedInboxes) { |
||||
ndk.addExplicitRelay(relay); |
||||
} |
||||
const [inboxes, outboxes] = await getUserPreferredRelays(ndk, user); |
||||
persistRelays(user, inboxes, outboxes); |
||||
ndk.signer = signer; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { |
||||
inbox: Array.from(inboxes ?? persistedInboxes).map((relay) => relay.url), |
||||
outbox: Array.from(outboxes ?? persistedOutboxes).map( |
||||
(relay) => relay.url, |
||||
), |
||||
}, |
||||
loginMethod: "extension" as const, |
||||
ndkUser: user, |
||||
signer, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with extension - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithExtension: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithExtension: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "extension"); |
||||
} |
||||
|
||||
/** |
||||
* Login with Amber (NIP-46) |
||||
*/ |
||||
export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
const npub = user.npub; |
||||
|
||||
console.log("Login with Amber - fetching profile for npub:", npub); |
||||
|
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with Amber - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during Amber login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with Amber - using fallback profile:", profile); |
||||
} |
||||
|
||||
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); |
||||
for (const relay of persistedInboxes) { |
||||
ndk.addExplicitRelay(relay); |
||||
} |
||||
const [inboxes, outboxes] = await getUserPreferredRelays(ndk, user); |
||||
persistRelays(user, inboxes, outboxes); |
||||
ndk.signer = amberSigner; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { |
||||
inbox: Array.from(inboxes ?? persistedInboxes).map((relay) => relay.url), |
||||
outbox: Array.from(outboxes ?? persistedOutboxes).map( |
||||
(relay) => relay.url, |
||||
), |
||||
}, |
||||
loginMethod: "amber" as const, |
||||
ndkUser: user, |
||||
signer: amberSigner, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with Amber - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithAmber: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithAmber: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "amber"); |
||||
} |
||||
|
||||
/** |
||||
* Login with npub (read-only) |
||||
*/ |
||||
export async function loginWithNpub(pubkeyOrNpub: string) { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
let hexPubkey: string; |
||||
if (pubkeyOrNpub.startsWith("npub")) { |
||||
try { |
||||
hexPubkey = nip19.decode(pubkeyOrNpub).data as string; |
||||
} catch (e) { |
||||
console.error("Failed to decode hex pubkey from npub:", pubkeyOrNpub, e); |
||||
throw e; |
||||
} |
||||
} else { |
||||
hexPubkey = pubkeyOrNpub; |
||||
} |
||||
let npub: string; |
||||
try { |
||||
npub = nip19.npubEncode(hexPubkey); |
||||
} catch (e) { |
||||
console.error("Failed to encode npub from hex pubkey:", hexPubkey, e); |
||||
throw e; |
||||
} |
||||
|
||||
console.log("Login with npub - fetching profile for npub:", npub); |
||||
|
||||
const user = ndk.getUser({ npub }); |
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with npub - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during npub login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with npub - using fallback profile:", profile); |
||||
} |
||||
|
||||
ndk.signer = undefined; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: "npub" as const, |
||||
ndkUser: user, |
||||
signer: null, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with npub - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithNpub: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithNpub: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "npub"); |
||||
} |
||||
|
||||
/** |
||||
* Logout and clear all user state |
||||
*/ |
||||
export function logoutUser() { |
||||
console.log("Logging out user..."); |
||||
const currentUser = get(userStore); |
||||
if (currentUser.ndkUser) { |
||||
// Clear persisted relays for the user
|
||||
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "inbox")); |
||||
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "outbox")); |
||||
} |
||||
|
||||
// Clear all possible login states from localStorage
|
||||
clearLogin(); |
||||
|
||||
// Also clear any other potential login keys that might exist
|
||||
const keysToRemove = []; |
||||
for (let i = 0; i < localStorage.length; i++) { |
||||
const key = localStorage.key(i); |
||||
if ( |
||||
key && |
||||
(key.includes("login") || |
||||
key.includes("nostr") || |
||||
key.includes("user") || |
||||
key.includes("alexandria") || |
||||
key === "pubkey") |
||||
) { |
||||
keysToRemove.push(key); |
||||
} |
||||
} |
||||
|
||||
// Specifically target the login storage key
|
||||
keysToRemove.push("alexandria/login/pubkey"); |
||||
keysToRemove.push("alexandria/login/method"); |
||||
|
||||
keysToRemove.forEach((key) => { |
||||
console.log("Removing localStorage key:", key); |
||||
localStorage.removeItem(key); |
||||
}); |
||||
|
||||
// Clear Amber-specific flags
|
||||
localStorage.removeItem("alexandria/amber/fallback"); |
||||
|
||||
// Set a flag to prevent auto-login on next page load
|
||||
localStorage.setItem("alexandria/logout/flag", "true"); |
||||
|
||||
console.log("Cleared all login data from localStorage"); |
||||
|
||||
userStore.set({ |
||||
pubkey: null, |
||||
npub: null, |
||||
profile: null, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: null, |
||||
ndkUser: null, |
||||
signer: null, |
||||
signedIn: false, |
||||
}); |
||||
userPubkey.set(null); |
||||
|
||||
const ndk = get(ndkInstance); |
||||
if (ndk) { |
||||
ndk.activeUser = undefined; |
||||
ndk.signer = undefined; |
||||
} |
||||
|
||||
console.log("Logout complete"); |
||||
} |
||||
@ -0,0 +1,110 @@
@@ -0,0 +1,110 @@
|
||||
export interface ZettelSection { |
||||
title: string; |
||||
content: string; |
||||
tags?: string[][]; |
||||
} |
||||
|
||||
/** |
||||
* Splits AsciiDoc content into sections at the specified heading level. |
||||
* Each section starts with the heading and includes all lines up to the next heading of the same level. |
||||
* @param content The AsciiDoc string. |
||||
* @param level The heading level (2 for '==', 3 for '===', etc.). |
||||
* @returns Array of section strings, each starting with the heading. |
||||
*/ |
||||
export function splitAsciiDocByHeadingLevel( |
||||
content: string, |
||||
level: number, |
||||
): string[] { |
||||
if (level < 1 || level > 6) throw new Error("Heading level must be 1-6"); |
||||
const heading = "^" + "=".repeat(level) + " "; |
||||
const regex = new RegExp(`(?=${heading})`, "gm"); |
||||
return content |
||||
.split(regex) |
||||
.map((section) => section.trim()) |
||||
.filter((section) => section.length > 0); |
||||
} |
||||
|
||||
/** |
||||
* Parses a single AsciiDoc section string into a ZettelSection object. |
||||
* @param section The section string (must start with heading). |
||||
*/ |
||||
export function parseZettelSection(section: string): ZettelSection { |
||||
const lines = section.split("\n"); |
||||
let title = "Untitled"; |
||||
const contentLines: string[] = []; |
||||
let inHeader = true; |
||||
let tags: string[][] = []; |
||||
tags = extractTags(section); |
||||
|
||||
for (const line of lines) { |
||||
const trimmed = line.trim(); |
||||
if (inHeader && trimmed.startsWith("==")) { |
||||
title = trimmed.replace(/^==+/, "").trim(); |
||||
continue; |
||||
} else if (inHeader && trimmed.startsWith(":")) { |
||||
continue; |
||||
} |
||||
|
||||
inHeader = false; |
||||
contentLines.push(line); |
||||
} |
||||
|
||||
return { |
||||
title, |
||||
content: contentLines.join("\n").trim(), |
||||
tags, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Parses AsciiDoc into an array of ZettelSection objects at the given heading level. |
||||
*/ |
||||
export function parseAsciiDocSections( |
||||
content: string, |
||||
level: number, |
||||
): ZettelSection[] { |
||||
return splitAsciiDocByHeadingLevel(content, level).map(parseZettelSection); |
||||
} |
||||
|
||||
/** |
||||
* Extracts tag names and values from the content. |
||||
* :tagname: tagvalue // tags are optional
|
||||
* @param content The AsciiDoc string. |
||||
* @returns Array of tags. |
||||
*/ |
||||
export function extractTags(content: string): string[][] { |
||||
const tags: string[][] = []; |
||||
const lines = content.split("\n"); |
||||
|
||||
for (const line of lines) { |
||||
const trimmed = line.trim(); |
||||
if (trimmed.startsWith(":")) { |
||||
// Parse AsciiDoc attribute format: :tagname: value
|
||||
const match = trimmed.match(/^:([^:]+):\s*(.*)$/); |
||||
if (match) { |
||||
const tagName = match[1].trim(); |
||||
const tagValue = match[2].trim(); |
||||
|
||||
// Special handling for tags attribute
|
||||
if (tagName === "tags") { |
||||
// Split comma-separated values and create individual "t" tags
|
||||
const tagValues = tagValue |
||||
.split(",") |
||||
.map((v) => v.trim()) |
||||
.filter((v) => v.length > 0); |
||||
for (const value of tagValues) { |
||||
tags.push(["t", value]); |
||||
} |
||||
} else { |
||||
// Regular attribute becomes a tag
|
||||
tags.push([tagName, tagValue]); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
console.log("Extracted tags:", tags); |
||||
return tags; |
||||
} |
||||
// You can add publishing logic here as needed, e.g.,
|
||||
// export async function publishZettelSection(...) { ... }
|
||||
@ -0,0 +1,106 @@
@@ -0,0 +1,106 @@
|
||||
import { communityRelays } from "$lib/consts"; |
||||
import { RELAY_CONSTANTS, SEARCH_LIMITS } from "./search_constants"; |
||||
|
||||
// Cache for pubkeys with kind 1 events on communityRelay
|
||||
const communityCache = new Map<string, boolean>(); |
||||
|
||||
/** |
||||
* Check if a pubkey has posted to the community relay |
||||
*/ |
||||
export async function checkCommunity(pubkey: string): Promise<boolean> { |
||||
if (communityCache.has(pubkey)) { |
||||
return communityCache.get(pubkey)!; |
||||
} |
||||
|
||||
try { |
||||
// Try each community relay until we find one that works
|
||||
for (const relayUrl of communityRelays) { |
||||
try { |
||||
const ws = new WebSocket(relayUrl); |
||||
const result = await new Promise<boolean>((resolve) => { |
||||
ws.onopen = () => { |
||||
ws.send( |
||||
JSON.stringify([ |
||||
"REQ", |
||||
RELAY_CONSTANTS.COMMUNITY_REQUEST_ID, |
||||
{ |
||||
kinds: RELAY_CONSTANTS.COMMUNITY_REQUEST_KINDS, |
||||
authors: [pubkey], |
||||
limit: SEARCH_LIMITS.COMMUNITY_CHECK, |
||||
}, |
||||
]), |
||||
); |
||||
}; |
||||
ws.onmessage = (event) => { |
||||
const data = JSON.parse(event.data); |
||||
if (data[0] === "EVENT" && data[2]?.kind === 1) { |
||||
communityCache.set(pubkey, true); |
||||
ws.close(); |
||||
resolve(true); |
||||
} else if (data[0] === "EOSE") { |
||||
communityCache.set(pubkey, false); |
||||
ws.close(); |
||||
resolve(false); |
||||
} |
||||
}; |
||||
ws.onerror = () => { |
||||
ws.close(); |
||||
resolve(false); |
||||
}; |
||||
}); |
||||
|
||||
if (result) { |
||||
return true; |
||||
} |
||||
} catch { |
||||
// Continue to next relay if this one fails
|
||||
continue; |
||||
} |
||||
} |
||||
|
||||
// If we get here, no relay found the user
|
||||
communityCache.set(pubkey, false); |
||||
return false; |
||||
} catch { |
||||
communityCache.set(pubkey, false); |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Check community status for multiple profiles |
||||
*/ |
||||
export async function checkCommunityStatus( |
||||
profiles: Array<{ pubkey?: string }>, |
||||
): Promise<Record<string, boolean>> { |
||||
const communityStatus: Record<string, boolean> = {}; |
||||
|
||||
// Run all community checks in parallel with timeout
|
||||
const checkPromises = profiles.map(async (profile) => { |
||||
if (!profile.pubkey) return { pubkey: "", status: false }; |
||||
|
||||
try { |
||||
const status = await Promise.race([ |
||||
checkCommunity(profile.pubkey), |
||||
new Promise<boolean>((resolve) => { |
||||
setTimeout(() => resolve(false), 2000); // 2 second timeout per check
|
||||
}), |
||||
]); |
||||
return { pubkey: profile.pubkey, status }; |
||||
} catch (error) { |
||||
console.warn("Community status check failed for", profile.pubkey, error); |
||||
return { pubkey: profile.pubkey, status: false }; |
||||
} |
||||
}); |
||||
|
||||
// Wait for all checks to complete
|
||||
const results = await Promise.allSettled(checkPromises); |
||||
|
||||
for (const result of results) { |
||||
if (result.status === "fulfilled" && result.value.pubkey) { |
||||
communityStatus[result.value.pubkey] = result.value.status; |
||||
} |
||||
} |
||||
|
||||
return communityStatus; |
||||
} |
||||
@ -0,0 +1,437 @@
@@ -0,0 +1,437 @@
|
||||
import type { NDKEvent } from "./nostrUtils.ts"; |
||||
import { get } from "svelte/store"; |
||||
import { ndkInstance } from "../ndk.ts"; |
||||
import { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk"; |
||||
import { EVENT_KINDS } from "./search_constants"; |
||||
|
||||
// =========================
|
||||
// Validation
|
||||
// =========================
|
||||
|
||||
/** |
||||
* Returns true if the event kind requires a d-tag (kinds 30000-39999). |
||||
*/ |
||||
export function requiresDTag(kind: number): boolean { |
||||
return ( |
||||
kind >= EVENT_KINDS.ADDRESSABLE.MIN && kind <= EVENT_KINDS.ADDRESSABLE.MAX |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Returns true if the tags array contains at least one d-tag with a non-empty value. |
||||
*/ |
||||
export function hasDTag(tags: [string, string][]): boolean { |
||||
return tags.some(([k, v]) => k === "d" && v && v.trim() !== ""); |
||||
} |
||||
|
||||
/** |
||||
* Returns true if the content contains AsciiDoc headers (lines starting with '=' or '=='). |
||||
*/ |
||||
function containsAsciiDocHeaders(content: string): boolean { |
||||
return /^={1,}\s+/m.test(content); |
||||
} |
||||
|
||||
/** |
||||
* Validates that content does NOT contain AsciiDoc headers (for kind 30023). |
||||
* Returns { valid, reason }. |
||||
*/ |
||||
export function validateNotAsciidoc(content: string): { |
||||
valid: boolean; |
||||
reason?: string; |
||||
} { |
||||
if (containsAsciiDocHeaders(content)) { |
||||
return { |
||||
valid: false, |
||||
reason: |
||||
"Kind 30023 must not contain AsciiDoc headers (lines starting with = or ==).", |
||||
}; |
||||
} |
||||
return { valid: true }; |
||||
} |
||||
|
||||
/** |
||||
* Validates AsciiDoc content. Must start with '=' and contain at least one '==' section header. |
||||
* Returns { valid, reason }. |
||||
*/ |
||||
export function validateAsciiDoc(content: string): { |
||||
valid: boolean; |
||||
reason?: string; |
||||
} { |
||||
if (!content.trim().startsWith("=")) { |
||||
return { |
||||
valid: false, |
||||
reason: 'AsciiDoc must start with a document title ("=").', |
||||
}; |
||||
} |
||||
if (!/^==\s+/m.test(content)) { |
||||
return { |
||||
valid: false, |
||||
reason: 'AsciiDoc must contain at least one section header ("==").', |
||||
}; |
||||
} |
||||
return { valid: true }; |
||||
} |
||||
|
||||
/** |
||||
* Validates that a 30040 event set will be created correctly. |
||||
* Returns { valid, reason }. |
||||
*/ |
||||
export function validate30040EventSet(content: string): { |
||||
valid: boolean; |
||||
reason?: string; |
||||
} { |
||||
// First validate as AsciiDoc
|
||||
const asciiDocValidation = validateAsciiDoc(content); |
||||
if (!asciiDocValidation.valid) { |
||||
return asciiDocValidation; |
||||
} |
||||
|
||||
// Check that we have at least one section
|
||||
const sectionsResult = splitAsciiDocSections(content); |
||||
if (sectionsResult.sections.length === 0) { |
||||
return { |
||||
valid: false, |
||||
reason: "30040 events must contain at least one section.", |
||||
}; |
||||
} |
||||
|
||||
// Check that we have a document title
|
||||
const documentTitle = extractAsciiDocDocumentHeader(content); |
||||
if (!documentTitle) { |
||||
return { |
||||
valid: false, |
||||
reason: |
||||
'30040 events must have a document title (line starting with "=").', |
||||
}; |
||||
} |
||||
|
||||
// Check that the content will result in an empty 30040 event
|
||||
// The 30040 event should have empty content, with all content split into 30041 events
|
||||
if (!content.trim().startsWith("=")) { |
||||
return { |
||||
valid: false, |
||||
reason: '30040 events must start with a document title ("=").', |
||||
}; |
||||
} |
||||
|
||||
return { valid: true }; |
||||
} |
||||
|
||||
// =========================
|
||||
// Extraction & Normalization
|
||||
// =========================
|
||||
|
||||
/** |
||||
* Normalize a string for use as a d-tag: lowercase, hyphens, alphanumeric only. |
||||
*/ |
||||
function normalizeDTagValue(header: string): string { |
||||
return header |
||||
.toLowerCase() |
||||
.replace(/[^\p{L}\p{N}]+/gu, "-") |
||||
.replace(/^-+|-+$/g, ""); |
||||
} |
||||
|
||||
/** |
||||
* Converts a title string to a valid d-tag (lowercase, hyphens, no punctuation). |
||||
*/ |
||||
export function titleToDTag(title: string): string { |
||||
return title |
||||
.toLowerCase() |
||||
.replace(/[^a-z0-9]+/g, "-") // Replace non-alphanumeric with hyphens
|
||||
.replace(/^-+|-+$/g, ""); // Trim leading/trailing hyphens
|
||||
} |
||||
|
||||
/** |
||||
* Extracts the first AsciiDoc document header (line starting with '= '). |
||||
*/ |
||||
function extractAsciiDocDocumentHeader(content: string): string | null { |
||||
const match = content.match(/^=\s+(.+)$/m); |
||||
return match ? match[1].trim() : null; |
||||
} |
||||
|
||||
/** |
||||
* Extracts the topmost Markdown # header (line starting with '# '). |
||||
*/ |
||||
function extractMarkdownTopHeader(content: string): string | null { |
||||
const match = content.match(/^#\s+(.+)$/m); |
||||
return match ? match[1].trim() : null; |
||||
} |
||||
|
||||
/** |
||||
* Splits AsciiDoc content into sections at each '==' header. Returns array of section strings. |
||||
* Document title (= header) is excluded from sections and only used for the index event title. |
||||
* Section headers (==) are discarded from content. |
||||
* Text between document header and first section becomes a "Preamble" section. |
||||
*/ |
||||
function splitAsciiDocSections(content: string): { |
||||
sections: string[]; |
||||
sectionHeaders: string[]; |
||||
hasPreamble: boolean; |
||||
} { |
||||
const lines = content.split(/\r?\n/); |
||||
const sections: string[] = []; |
||||
const sectionHeaders: string[] = []; |
||||
let current: string[] = []; |
||||
let foundFirstSection = false; |
||||
let hasPreamble = false; |
||||
const preambleContent: string[] = []; |
||||
|
||||
for (const line of lines) { |
||||
// Skip document title lines (= header)
|
||||
if (/^=\s+/.test(line)) { |
||||
continue; |
||||
} |
||||
|
||||
// If we encounter a section header (==) and we have content, start a new section
|
||||
if (/^==\s+/.test(line)) { |
||||
if (current.length > 0) { |
||||
sections.push(current.join("\n").trim()); |
||||
current = []; |
||||
} |
||||
|
||||
// Extract section header for title tag
|
||||
const headerMatch = line.match(/^==\s+(.+)$/); |
||||
if (headerMatch) { |
||||
sectionHeaders.push(headerMatch[1].trim()); |
||||
} |
||||
|
||||
foundFirstSection = true; |
||||
} else if (foundFirstSection) { |
||||
// Only add lines to current section if we've found the first section
|
||||
current.push(line); |
||||
} else { |
||||
// Text before first section becomes preamble
|
||||
if (line.trim() !== "") { |
||||
preambleContent.push(line); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Add the last section
|
||||
if (current.length > 0) { |
||||
sections.push(current.join("\n").trim()); |
||||
} |
||||
|
||||
// Add preamble as first section if it exists
|
||||
if (preambleContent.length > 0) { |
||||
sections.unshift(preambleContent.join("\n").trim()); |
||||
sectionHeaders.unshift("Preamble"); |
||||
hasPreamble = true; |
||||
} |
||||
|
||||
return { sections, sectionHeaders, hasPreamble }; |
||||
} |
||||
|
||||
// =========================
|
||||
// Event Construction
|
||||
// =========================
|
||||
|
||||
/** |
||||
* Returns the current NDK instance from the store. |
||||
*/ |
||||
function getNdk() { |
||||
return get(ndkInstance); |
||||
} |
||||
|
||||
/** |
||||
* Builds a set of events for a 30040 publication: one 30040 index event and one 30041 event per section. |
||||
* Each 30041 gets a d-tag (normalized section header) and a title tag (raw section header). |
||||
* The 30040 index event references all 30041s by their d-tag. |
||||
*/ |
||||
export function build30040EventSet( |
||||
content: string, |
||||
tags: [string, string][], |
||||
baseEvent: Partial<NDKEvent> & { pubkey: string; created_at: number }, |
||||
): { indexEvent: NDKEvent; sectionEvents: NDKEvent[] } { |
||||
console.log("=== build30040EventSet called ==="); |
||||
console.log("Input content:", content); |
||||
console.log("Input tags:", tags); |
||||
console.log("Input baseEvent:", baseEvent); |
||||
|
||||
const ndk = getNdk(); |
||||
console.log("NDK instance:", ndk); |
||||
|
||||
const sectionsResult = splitAsciiDocSections(content); |
||||
const sections = sectionsResult.sections; |
||||
const sectionHeaders = sectionsResult.sectionHeaders; |
||||
console.log("Sections:", sections); |
||||
console.log("Section headers:", sectionHeaders); |
||||
|
||||
const dTags = |
||||
sectionHeaders.length === sections.length |
||||
? sectionHeaders.map(normalizeDTagValue) |
||||
: sections.map((_, i) => `section${i}`); |
||||
console.log("D tags:", dTags); |
||||
|
||||
const sectionEvents: NDKEvent[] = sections.map((section, i) => { |
||||
const header = sectionHeaders[i] || `Section ${i + 1}`; |
||||
const dTag = dTags[i]; |
||||
console.log(`Creating section ${i}:`, { header, dTag, content: section }); |
||||
return new NDKEventClass(ndk, { |
||||
kind: 30041, |
||||
content: section, |
||||
tags: [...tags, ["d", dTag], ["title", header]], |
||||
pubkey: baseEvent.pubkey, |
||||
created_at: baseEvent.created_at, |
||||
}); |
||||
}); |
||||
|
||||
// Create proper a tags with format: kind:pubkey:d-tag
|
||||
const aTags = dTags.map( |
||||
(dTag) => ["a", `30041:${baseEvent.pubkey}:${dTag}`] as [string, string], |
||||
); |
||||
console.log("A tags:", aTags); |
||||
|
||||
// Extract document title for the index event
|
||||
const documentTitle = extractAsciiDocDocumentHeader(content); |
||||
const indexDTag = documentTitle ? normalizeDTagValue(documentTitle) : "index"; |
||||
console.log("Index event:", { documentTitle, indexDTag }); |
||||
|
||||
const indexTags = [ |
||||
...tags, |
||||
["d", indexDTag], |
||||
["title", documentTitle || "Untitled"], |
||||
...aTags, |
||||
]; |
||||
|
||||
const indexEvent: NDKEvent = new NDKEventClass(ndk, { |
||||
kind: 30040, |
||||
content: "", |
||||
tags: indexTags, |
||||
pubkey: baseEvent.pubkey, |
||||
created_at: baseEvent.created_at, |
||||
}); |
||||
console.log("Final index event:", indexEvent); |
||||
console.log("=== build30040EventSet completed ==="); |
||||
return { indexEvent, sectionEvents }; |
||||
} |
||||
|
||||
/** |
||||
* Returns the appropriate title tag for a given event kind and content. |
||||
* - 30041, 30818: AsciiDoc document header (first '= ' line) |
||||
* - 30023: Markdown topmost '# ' header |
||||
*/ |
||||
export function getTitleTagForEvent( |
||||
kind: number, |
||||
content: string, |
||||
): string | null { |
||||
if (kind === 30041 || kind === 30818) { |
||||
return extractAsciiDocDocumentHeader(content); |
||||
} |
||||
if (kind === 30023) { |
||||
return extractMarkdownTopHeader(content); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* Returns the appropriate d-tag value for a given event kind and content. |
||||
* - 30023: Normalized markdown header |
||||
* - 30041, 30818: Normalized AsciiDoc document header |
||||
* - 30040: Uses existing d-tag or generates from content |
||||
*/ |
||||
export function getDTagForEvent( |
||||
kind: number, |
||||
content: string, |
||||
existingDTag?: string, |
||||
): string | null { |
||||
if (existingDTag && existingDTag.trim() !== "") { |
||||
return existingDTag.trim(); |
||||
} |
||||
|
||||
if (kind === 30023) { |
||||
const title = extractMarkdownTopHeader(content); |
||||
return title ? normalizeDTagValue(title) : null; |
||||
} |
||||
|
||||
if (kind === 30041 || kind === 30818) { |
||||
const title = extractAsciiDocDocumentHeader(content); |
||||
return title ? normalizeDTagValue(title) : null; |
||||
} |
||||
|
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* Returns a description of what a 30040 event structure should be. |
||||
*/ |
||||
export function get30040EventDescription(): string { |
||||
return `30040 events are publication indexes that contain:
|
||||
- Empty content (metadata only) |
||||
- A d-tag for the publication identifier |
||||
- A title tag for the publication title |
||||
- A tags referencing 30041 content events (one per section) |
||||
|
||||
The content is split into sections, each published as a separate 30041 event.`;
|
||||
} |
||||
|
||||
/** |
||||
* Analyzes a 30040 event to determine if it was created correctly. |
||||
* Returns { valid, issues } where issues is an array of problems found. |
||||
*/ |
||||
export function analyze30040Event(event: { |
||||
content: string; |
||||
tags: [string, string][]; |
||||
kind: number; |
||||
}): { valid: boolean; issues: string[] } { |
||||
const issues: string[] = []; |
||||
|
||||
// Check if it's actually a 30040 event
|
||||
if (event.kind !== 30040) { |
||||
issues.push("Event is not kind 30040"); |
||||
return { valid: false, issues }; |
||||
} |
||||
|
||||
// Check if content is empty (30040 should be metadata only)
|
||||
if (event.content && event.content.trim() !== "") { |
||||
issues.push("30040 events should have empty content (metadata only)"); |
||||
issues.push("Content should be split into separate 30041 events"); |
||||
} |
||||
|
||||
// Check for required tags
|
||||
const hasTitle = event.tags.some(([k, v]) => k === "title" && v); |
||||
const hasDTag = event.tags.some(([k, v]) => k === "d" && v); |
||||
const hasATags = event.tags.some(([k, v]) => k === "a" && v); |
||||
|
||||
if (!hasTitle) { |
||||
issues.push("Missing title tag"); |
||||
} |
||||
if (!hasDTag) { |
||||
issues.push("Missing d tag"); |
||||
} |
||||
if (!hasATags) { |
||||
issues.push("Missing a tags (should reference 30041 content events)"); |
||||
} |
||||
|
||||
// Check if a tags have the correct format (kind:pubkey:d-tag)
|
||||
const aTags = event.tags.filter(([k, v]) => k === "a" && v); |
||||
for (const [, value] of aTags) { |
||||
if (!value.includes(":")) { |
||||
issues.push( |
||||
`Invalid a tag format: ${value} (should be "kind:pubkey:d-tag")`, |
||||
); |
||||
} |
||||
} |
||||
|
||||
return { valid: issues.length === 0, issues }; |
||||
} |
||||
|
||||
/** |
||||
* Returns guidance on how to fix incorrect 30040 events. |
||||
*/ |
||||
export function get30040FixGuidance(): string { |
||||
return `To fix a 30040 event:
|
||||
|
||||
1. **Content Issue**: 30040 events should have empty content. All content should be split into separate 30041 events. |
||||
|
||||
2. **Structure**: A proper 30040 event should contain: |
||||
- Empty content |
||||
- d tag: publication identifier |
||||
- title tag: publication title |
||||
- a tags: references to 30041 content events (format: "30041:pubkey:d-tag") |
||||
|
||||
3. **Process**: When creating a 30040 event: |
||||
- Write your content with document title (= Title) and sections (== Section) |
||||
- The system will automatically split it into one 30040 index event and multiple 30041 content events |
||||
- The 30040 will have empty content and reference the 30041s via a tags`;
|
||||
} |
||||
@ -0,0 +1,224 @@
@@ -0,0 +1,224 @@
|
||||
import { ndkInstance } from "../ndk.ts"; |
||||
import { fetchEventWithFallback } from "./nostrUtils.ts"; |
||||
import { nip19 } from "nostr-tools"; |
||||
import { NDKEvent, NDKFilter } from "@nostr-dev-kit/ndk"; |
||||
import { get } from "svelte/store"; |
||||
import { wellKnownUrl, isValidNip05Address } from "./search_utils.ts"; |
||||
import { TIMEOUTS, VALIDATION } from "./search_constants.ts"; |
||||
|
||||
/** |
||||
* Search for a single event by ID or filter |
||||
*/ |
||||
export async function searchEvent(query: string): Promise<NDKEvent | null> { |
||||
// Clean the query and normalize to lowercase
|
||||
const cleanedQuery = query.replace(/^nostr:/, "").toLowerCase(); |
||||
let filterOrId: NDKFilter | string = cleanedQuery; |
||||
|
||||
// If it's a valid hex string, try as event id first, then as pubkey (profile)
|
||||
if ( |
||||
new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(cleanedQuery) |
||||
) { |
||||
// Try as event id
|
||||
filterOrId = cleanedQuery; |
||||
const eventResult = await fetchEventWithFallback( |
||||
get(ndkInstance), |
||||
filterOrId, |
||||
TIMEOUTS.EVENT_FETCH, |
||||
); |
||||
// Always try as pubkey (profile event) as well
|
||||
const profileFilter = { kinds: [0], authors: [cleanedQuery] }; |
||||
const profileEvent = await fetchEventWithFallback( |
||||
get(ndkInstance), |
||||
profileFilter, |
||||
TIMEOUTS.EVENT_FETCH, |
||||
); |
||||
// Prefer profile if found and pubkey matches query
|
||||
if ( |
||||
profileEvent && |
||||
profileEvent.pubkey.toLowerCase() === cleanedQuery.toLowerCase() |
||||
) { |
||||
return profileEvent; |
||||
} else if (eventResult) { |
||||
return eventResult; |
||||
} |
||||
} else if ( |
||||
new RegExp( |
||||
`^(nevent|note|naddr|npub|nprofile)[a-z0-9]{${VALIDATION.MIN_NOSTR_IDENTIFIER_LENGTH},}$`, |
||||
"i", |
||||
).test(cleanedQuery) |
||||
) { |
||||
try { |
||||
const decoded = nip19.decode(cleanedQuery); |
||||
if (!decoded) throw new Error("Invalid identifier"); |
||||
switch (decoded.type) { |
||||
case "nevent": |
||||
filterOrId = decoded.data.id; |
||||
break; |
||||
case "note": |
||||
filterOrId = decoded.data; |
||||
break; |
||||
case "naddr": |
||||
filterOrId = { |
||||
kinds: [decoded.data.kind], |
||||
authors: [decoded.data.pubkey], |
||||
"#d": [decoded.data.identifier], |
||||
}; |
||||
break; |
||||
case "nprofile": |
||||
filterOrId = { |
||||
kinds: [0], |
||||
authors: [decoded.data.pubkey], |
||||
}; |
||||
break; |
||||
case "npub": |
||||
filterOrId = { |
||||
kinds: [0], |
||||
authors: [decoded.data], |
||||
}; |
||||
break; |
||||
default: |
||||
filterOrId = cleanedQuery; |
||||
} |
||||
} catch (e) { |
||||
console.error("[Search] Invalid Nostr identifier:", cleanedQuery, e); |
||||
throw new Error("Invalid Nostr identifier."); |
||||
} |
||||
} |
||||
|
||||
try { |
||||
const event = await fetchEventWithFallback( |
||||
get(ndkInstance), |
||||
filterOrId, |
||||
TIMEOUTS.EVENT_FETCH, |
||||
); |
||||
|
||||
if (!event) { |
||||
console.warn("[Search] Event not found for filterOrId:", filterOrId); |
||||
return null; |
||||
} else { |
||||
return event; |
||||
} |
||||
} catch (err) { |
||||
console.error("[Search] Error fetching event:", err, "Query:", query); |
||||
throw new Error("Error fetching event. Please check the ID and try again."); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Search for NIP-05 address |
||||
*/ |
||||
export async function searchNip05( |
||||
nip05Address: string, |
||||
): Promise<NDKEvent | null> { |
||||
// NIP-05 address pattern: user@domain
|
||||
if (!isValidNip05Address(nip05Address)) { |
||||
throw new Error("Invalid NIP-05 address format. Expected: user@domain"); |
||||
} |
||||
|
||||
try { |
||||
const [name, domain] = nip05Address.split("@"); |
||||
|
||||
const res = await fetch(wellKnownUrl(domain, name)); |
||||
|
||||
if (!res.ok) { |
||||
throw new Error(`HTTP ${res.status}: ${res.statusText}`); |
||||
} |
||||
|
||||
const data = await res.json(); |
||||
|
||||
const pubkey = data.names?.[name]; |
||||
if (pubkey) { |
||||
const profileFilter = { kinds: [0], authors: [pubkey] }; |
||||
const profileEvent = await fetchEventWithFallback( |
||||
get(ndkInstance), |
||||
profileFilter, |
||||
TIMEOUTS.EVENT_FETCH, |
||||
); |
||||
if (profileEvent) { |
||||
return profileEvent; |
||||
} else { |
||||
throw new Error( |
||||
`No profile found for ${name}@${domain} (pubkey: ${pubkey})`, |
||||
); |
||||
} |
||||
} else { |
||||
throw new Error(`NIP-05 address not found: ${name}@${domain}`); |
||||
} |
||||
} catch (e) { |
||||
console.error( |
||||
`[Search] Error resolving NIP-05 address ${nip05Address}:`, |
||||
e, |
||||
); |
||||
const errorMessage = e instanceof Error ? e.message : String(e); |
||||
throw new Error(`Error resolving NIP-05 address: ${errorMessage}`); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Find containing 30040 index events for a given content event |
||||
* @param contentEvent The content event to find containers for (30041, 30818, etc.) |
||||
* @returns Array of containing 30040 index events |
||||
*/ |
||||
export async function findContainingIndexEvents( |
||||
contentEvent: NDKEvent, |
||||
): Promise<NDKEvent[]> { |
||||
// Support all content event kinds that can be contained in indexes
|
||||
const contentEventKinds = [30041, 30818, 30040, 30023]; |
||||
if (!contentEventKinds.includes(contentEvent.kind!)) { |
||||
return []; |
||||
} |
||||
|
||||
try { |
||||
const ndk = get(ndkInstance); |
||||
|
||||
// Search for 30040 events that reference this content event
|
||||
// We need to search for events that have an 'a' tag or 'e' tag referencing this event
|
||||
const contentEventId = contentEvent.id; |
||||
const contentEventAddress = contentEvent.tagAddress(); |
||||
|
||||
// Search for index events that reference this content event
|
||||
const indexEvents = await ndk.fetchEvents( |
||||
{ |
||||
kinds: [30040], |
||||
"#a": [contentEventAddress], |
||||
}, |
||||
{ |
||||
groupable: true, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
); |
||||
|
||||
// Also search for events with 'e' tags (legacy format)
|
||||
const indexEventsWithETags = await ndk.fetchEvents( |
||||
{ |
||||
kinds: [30040], |
||||
"#e": [contentEventId], |
||||
}, |
||||
{ |
||||
groupable: true, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
); |
||||
|
||||
// Combine and deduplicate results
|
||||
const allIndexEvents = new Set([...indexEvents, ...indexEventsWithETags]); |
||||
|
||||
// Filter to only include valid index events
|
||||
const validIndexEvents = Array.from(allIndexEvents).filter((event) => { |
||||
// Check if it's a valid index event (has title, d tag, and either a or e tags)
|
||||
const hasTitle = event.getMatchingTags("title").length > 0; |
||||
const hasDTag = event.getMatchingTags("d").length > 0; |
||||
const hasATags = event.getMatchingTags("a").length > 0; |
||||
const hasETags = event.getMatchingTags("e").length > 0; |
||||
|
||||
return hasTitle && hasDTag && (hasATags || hasETags); |
||||
}); |
||||
|
||||
return validIndexEvents; |
||||
} catch (error) { |
||||
console.error("[Search] Error finding containing index events:", error); |
||||
return []; |
||||
} |
||||
} |
||||
@ -0,0 +1,31 @@
@@ -0,0 +1,31 @@
|
||||
/** |
||||
* Generate a dark-pastel color based on a string (like an event ID) |
||||
* @param seed - The string to generate a color from |
||||
* @returns A dark-pastel hex color |
||||
*/ |
||||
export function generateDarkPastelColor(seed: string): string { |
||||
// Create a simple hash from the seed string
|
||||
let hash = 0; |
||||
for (let i = 0; i < seed.length; i++) { |
||||
const char = seed.charCodeAt(i); |
||||
hash = ((hash << 5) - hash) + char; |
||||
hash = hash & hash; // Convert to 32-bit integer
|
||||
} |
||||
|
||||
// Use the hash to generate lighter pastel colors
|
||||
// Keep values in the 120-200 range for better pastel effect
|
||||
const r = Math.abs(hash) % 80 + 120; // 120-200 range
|
||||
const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range
|
||||
const b = Math.abs(hash >> 16) % 80 + 120; // 120-200 range
|
||||
|
||||
return `#${r.toString(16).padStart(2, '0')}${g.toString(16).padStart(2, '0')}${b.toString(16).padStart(2, '0')}`; |
||||
} |
||||
|
||||
/** |
||||
* Test function to verify color generation |
||||
* @param eventId - The event ID to test |
||||
* @returns The generated color |
||||
*/ |
||||
export function testColorGeneration(eventId: string): string { |
||||
return generateDarkPastelColor(eventId); |
||||
}
|
||||
@ -0,0 +1,139 @@
@@ -0,0 +1,139 @@
|
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { CACHE_DURATIONS, TIMEOUTS } from "./search_constants.ts"; |
||||
|
||||
export interface IndexEventCacheEntry { |
||||
events: NDKEvent[]; |
||||
timestamp: number; |
||||
relayUrls: string[]; |
||||
} |
||||
|
||||
class IndexEventCache { |
||||
private cache: Map<string, IndexEventCacheEntry> = new Map(); |
||||
private readonly CACHE_DURATION = CACHE_DURATIONS.INDEX_EVENT_CACHE; |
||||
private readonly MAX_CACHE_SIZE = 50; // Maximum number of cached relay combinations
|
||||
|
||||
/** |
||||
* Generate a cache key based on relay URLs |
||||
*/ |
||||
private generateKey(relayUrls: string[]): string { |
||||
return relayUrls.sort().join("|"); |
||||
} |
||||
|
||||
/** |
||||
* Check if a cached entry is still valid |
||||
*/ |
||||
private isExpired(entry: IndexEventCacheEntry): boolean { |
||||
return Date.now() - entry.timestamp > this.CACHE_DURATION; |
||||
} |
||||
|
||||
/** |
||||
* Get cached index events for a set of relays |
||||
*/ |
||||
get(relayUrls: string[]): NDKEvent[] | null { |
||||
const key = this.generateKey(relayUrls); |
||||
const entry = this.cache.get(key); |
||||
|
||||
if (!entry || this.isExpired(entry)) { |
||||
if (entry) { |
||||
this.cache.delete(key); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
console.log( |
||||
`[IndexEventCache] Using cached index events for ${relayUrls.length} relays`, |
||||
); |
||||
return entry.events; |
||||
} |
||||
|
||||
/** |
||||
* Store index events in cache |
||||
*/ |
||||
set(relayUrls: string[], events: NDKEvent[]): void { |
||||
const key = this.generateKey(relayUrls); |
||||
|
||||
// Implement LRU eviction if cache is full
|
||||
if (this.cache.size >= this.MAX_CACHE_SIZE) { |
||||
const oldestKey = this.cache.keys().next().value; |
||||
if (oldestKey) { |
||||
this.cache.delete(oldestKey); |
||||
} |
||||
} |
||||
|
||||
this.cache.set(key, { |
||||
events, |
||||
timestamp: Date.now(), |
||||
relayUrls: [...relayUrls], |
||||
}); |
||||
|
||||
console.log( |
||||
`[IndexEventCache] Cached ${events.length} index events for ${relayUrls.length} relays`, |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Check if index events are cached for a set of relays |
||||
*/ |
||||
has(relayUrls: string[]): boolean { |
||||
const key = this.generateKey(relayUrls); |
||||
const entry = this.cache.get(key); |
||||
return entry !== undefined && !this.isExpired(entry); |
||||
} |
||||
|
||||
/** |
||||
* Clear expired entries from cache |
||||
*/ |
||||
cleanup(): void { |
||||
for (const [key, entry] of this.cache.entries()) { |
||||
if (this.isExpired(entry)) { |
||||
this.cache.delete(key); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Clear all cache entries |
||||
*/ |
||||
clear(): void { |
||||
this.cache.clear(); |
||||
} |
||||
|
||||
/** |
||||
* Get cache size |
||||
*/ |
||||
size(): number { |
||||
return this.cache.size; |
||||
} |
||||
|
||||
/** |
||||
* Get cache statistics |
||||
*/ |
||||
getStats(): { |
||||
size: number; |
||||
totalEvents: number; |
||||
oldestEntry: number | null; |
||||
} { |
||||
let totalEvents = 0; |
||||
let oldestTimestamp: number | null = null; |
||||
|
||||
for (const entry of this.cache.values()) { |
||||
totalEvents += entry.events.length; |
||||
if (oldestTimestamp === null || entry.timestamp < oldestTimestamp) { |
||||
oldestTimestamp = entry.timestamp; |
||||
} |
||||
} |
||||
|
||||
return { |
||||
size: this.cache.size, |
||||
totalEvents, |
||||
oldestEntry: oldestTimestamp, |
||||
}; |
||||
} |
||||
} |
||||
|
||||
export const indexEventCache = new IndexEventCache(); |
||||
|
||||
// Clean up expired entries periodically
|
||||
setInterval(() => { |
||||
indexEventCache.cleanup(); |
||||
}, TIMEOUTS.CACHE_CLEANUP); // Check every minute
|
||||
@ -0,0 +1,371 @@
@@ -0,0 +1,371 @@
|
||||
import { postProcessAsciidoctorHtml } from "./asciidoctorPostProcessor.ts"; |
||||
import plantumlEncoder from "plantuml-encoder"; |
||||
|
||||
/** |
||||
* Unified post-processor for Asciidoctor HTML that handles: |
||||
* - Math rendering (Asciimath/Latex, stem blocks) |
||||
* - PlantUML diagrams |
||||
* - BPMN diagrams |
||||
* - TikZ diagrams |
||||
*/ |
||||
export async function postProcessAdvancedAsciidoctorHtml( |
||||
html: string, |
||||
): Promise<string> { |
||||
if (!html) return html; |
||||
try { |
||||
// First apply the basic post-processing (wikilinks, nostr addresses)
|
||||
let processedHtml = await postProcessAsciidoctorHtml(html); |
||||
// Unified math block processing
|
||||
processedHtml = fixAllMathBlocks(processedHtml); |
||||
// Process PlantUML blocks
|
||||
processedHtml = processPlantUMLBlocks(processedHtml); |
||||
// Process BPMN blocks
|
||||
processedHtml = processBPMNBlocks(processedHtml); |
||||
// Process TikZ blocks
|
||||
processedHtml = processTikZBlocks(processedHtml); |
||||
// After all processing, apply highlight.js if available
|
||||
if ( |
||||
typeof globalThis !== "undefined" && |
||||
typeof globalThis.hljs?.highlightAll === "function" |
||||
) { |
||||
setTimeout(() => globalThis.hljs!.highlightAll(), 0); |
||||
} |
||||
if ( |
||||
typeof globalThis !== "undefined" && |
||||
typeof globalThis.MathJax?.typesetPromise === "function" |
||||
) { |
||||
setTimeout(() => globalThis.MathJax.typesetPromise(), 0); |
||||
} |
||||
return processedHtml; |
||||
} catch (error) { |
||||
console.error("Error in postProcessAdvancedAsciidoctorHtml:", error); |
||||
return html; // Return original HTML if processing fails
|
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fixes all math blocks for MathJax rendering. |
||||
* Now only processes LaTeX within inline code blocks. |
||||
*/ |
||||
function fixAllMathBlocks(html: string): string { |
||||
// Unescape \$ to $ for math delimiters
|
||||
html = html.replace(/\\\$/g, "$"); |
||||
|
||||
// Process inline code blocks that contain LaTeX
|
||||
html = html.replace( |
||||
/<code[^>]*class="[^"]*language-[^"]*"[^>]*>([\s\S]*?)<\/code>/g, |
||||
(match, codeContent) => { |
||||
const trimmedCode = codeContent.trim(); |
||||
if (isLaTeXContent(trimmedCode)) { |
||||
return `<span class="math-inline">$${trimmedCode}$</span>`; |
||||
} |
||||
return match; // Return original if not LaTeX
|
||||
}, |
||||
); |
||||
|
||||
// Also process code blocks without language class
|
||||
html = html.replace( |
||||
/<code[^>]*>([\s\S]*?)<\/code>/g, |
||||
(match, codeContent) => { |
||||
const trimmedCode = codeContent.trim(); |
||||
if (isLaTeXContent(trimmedCode)) { |
||||
return `<span class="math-inline">$${trimmedCode}$</span>`; |
||||
} |
||||
return match; // Return original if not LaTeX
|
||||
}, |
||||
); |
||||
|
||||
return html; |
||||
} |
||||
|
||||
/** |
||||
* Checks if content contains LaTeX syntax |
||||
*/ |
||||
function isLaTeXContent(content: string): boolean { |
||||
const trimmed = content.trim(); |
||||
|
||||
// Check for common LaTeX patterns
|
||||
const latexPatterns = [ |
||||
/\\[a-zA-Z]+/, // LaTeX commands like \frac, \sum, etc.
|
||||
/\\[\(\)\[\]]/, // LaTeX delimiters like \(, \), \[, \]
|
||||
/\\begin\{/, // LaTeX environments
|
||||
/\\end\{/, // LaTeX environments
|
||||
/\$\$/, // Display math delimiters
|
||||
/\$[^$]+\$/, // Inline math delimiters
|
||||
/\\text\{/, // LaTeX text command
|
||||
/\\mathrm\{/, // LaTeX mathrm command
|
||||
/\\mathbf\{/, // LaTeX bold command
|
||||
/\\mathit\{/, // LaTeX italic command
|
||||
/\\sqrt/, // Square root
|
||||
/\\frac/, // Fraction
|
||||
/\\sum/, // Sum
|
||||
/\\int/, // Integral
|
||||
/\\lim/, // Limit
|
||||
/\\infty/, // Infinity
|
||||
/\\alpha/, // Greek letters
|
||||
/\\beta/, |
||||
/\\gamma/, |
||||
/\\delta/, |
||||
/\\theta/, |
||||
/\\lambda/, |
||||
/\\mu/, |
||||
/\\pi/, |
||||
/\\sigma/, |
||||
/\\phi/, |
||||
/\\omega/, |
||||
/\\partial/, // Partial derivative
|
||||
/\\nabla/, // Nabla
|
||||
/\\cdot/, // Dot product
|
||||
/\\times/, // Times
|
||||
/\\div/, // Division
|
||||
/\\pm/, // Plus-minus
|
||||
/\\mp/, // Minus-plus
|
||||
/\\leq/, // Less than or equal
|
||||
/\\geq/, // Greater than or equal
|
||||
/\\neq/, // Not equal
|
||||
/\\approx/, // Approximately equal
|
||||
/\\equiv/, // Equivalent
|
||||
/\\propto/, // Proportional
|
||||
/\\in/, // Element of
|
||||
/\\notin/, // Not element of
|
||||
/\\subset/, // Subset
|
||||
/\\supset/, // Superset
|
||||
/\\cup/, // Union
|
||||
/\\cap/, // Intersection
|
||||
/\\emptyset/, // Empty set
|
||||
/\\mathbb\{/, // Blackboard bold
|
||||
/\\mathcal\{/, // Calligraphic
|
||||
/\\mathfrak\{/, // Fraktur
|
||||
/\\mathscr\{/, // Script
|
||||
]; |
||||
|
||||
return latexPatterns.some((pattern) => pattern.test(trimmed)); |
||||
} |
||||
|
||||
/** |
||||
* Processes PlantUML blocks in HTML content |
||||
*/ |
||||
function processPlantUMLBlocks(html: string): string { |
||||
// Only match code blocks with class 'language-plantuml' or 'plantuml'
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre class="highlight">\s*<code[^>]*class="[^"]*(?:language-plantuml|plantuml)[^"]*"[^>]*>([\s\S]*?)<\/code>\s*<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
try { |
||||
// Unescape HTML for PlantUML server, but escape for <code>
|
||||
const rawContent = decodeHTMLEntities(content); |
||||
const encoded = plantumlEncoder.encode(rawContent); |
||||
const plantUMLUrl = `https://www.plantuml.com/plantuml/svg/${encoded}`; |
||||
return `<div class="plantuml-block my-4">
|
||||
<img src="${plantUMLUrl}" alt="PlantUML diagram"
|
||||
class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg"
|
||||
loading="lazy" decoding="async"> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show PlantUML source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(rawContent)}</code> |
||||
</pre> |
||||
</details> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process PlantUML block:", error); |
||||
return match; |
||||
} |
||||
}, |
||||
); |
||||
// Fallback: match <pre> blocks whose content starts with @startuml or @start (global, robust)
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre>([\s\S]*?)<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
const lines = content.trim().split("\n"); |
||||
if ( |
||||
lines[0].trim().startsWith("@startuml") || |
||||
lines[0].trim().startsWith("@start") |
||||
) { |
||||
try { |
||||
const rawContent = decodeHTMLEntities(content); |
||||
const encoded = plantumlEncoder.encode(rawContent); |
||||
const plantUMLUrl = `https://www.plantuml.com/plantuml/svg/${encoded}`; |
||||
return `<div class="plantuml-block my-4">
|
||||
<img src="${plantUMLUrl}" alt="PlantUML diagram"
|
||||
class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg"
|
||||
loading="lazy" decoding="async"> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show PlantUML source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(rawContent)}</code> |
||||
</pre> |
||||
</details> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process PlantUML fallback block:", error); |
||||
return match; |
||||
} |
||||
} |
||||
return match; |
||||
}, |
||||
); |
||||
return html; |
||||
} |
||||
|
||||
function decodeHTMLEntities(text: string): string { |
||||
const textarea = document.createElement("textarea"); |
||||
textarea.innerHTML = text; |
||||
return textarea.value; |
||||
} |
||||
|
||||
/** |
||||
* Processes BPMN blocks in HTML content |
||||
*/ |
||||
function processBPMNBlocks(html: string): string { |
||||
// Only match code blocks with class 'language-bpmn' or 'bpmn'
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre class="highlight">\s*<code[^>]*class="[^"]*(?:language-bpmn|bpmn)[^\"]*"[^>]*>([\s\S]*?)<\/code>\s*<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
try { |
||||
return `<div class="bpmn-block my-4">
|
||||
<div class="bpmn-diagram p-4 bg-blue-50 dark:bg-blue-900 rounded-lg border border-blue-200 dark:border-blue-700"> |
||||
<div class="text-center text-blue-600 dark:text-blue-400 mb-2"> |
||||
<svg class="inline w-6 h-6 mr-2" fill="currentColor" viewBox="0 0 20 20"> |
||||
<path d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/> |
||||
</svg> |
||||
BPMN Diagram |
||||
</div> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show BPMN source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(content)}</code> |
||||
</pre> |
||||
</details> |
||||
</div> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process BPMN block:", error); |
||||
return match; |
||||
} |
||||
}, |
||||
); |
||||
// Fallback: match <pre> blocks whose content contains 'bpmn:' or '<?xml' and 'bpmn'
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre>([\s\S]*?)<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
const text = content.trim(); |
||||
if ( |
||||
text.includes("bpmn:") || |
||||
(text.startsWith("<?xml") && text.includes("bpmn")) |
||||
) { |
||||
try { |
||||
return `<div class="bpmn-block my-4">
|
||||
<div class="bpmn-diagram p-4 bg-blue-50 dark:bg-blue-900 rounded-lg border border-blue-200 dark:border-blue-700"> |
||||
<div class="text-center text-blue-600 dark:text-blue-400 mb-2"> |
||||
<svg class="inline w-6 h-6 mr-2" fill="currentColor" viewBox="0 0 20 20"> |
||||
<path d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z"/> |
||||
</svg> |
||||
BPMN Diagram |
||||
</div> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show BPMN source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(content)}</code> |
||||
</pre> |
||||
</details> |
||||
</div> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process BPMN fallback block:", error); |
||||
return match; |
||||
} |
||||
} |
||||
return match; |
||||
}, |
||||
); |
||||
return html; |
||||
} |
||||
|
||||
/** |
||||
* Processes TikZ blocks in HTML content |
||||
*/ |
||||
function processTikZBlocks(html: string): string { |
||||
// Only match code blocks with class 'language-tikz' or 'tikz'
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre class="highlight">\s*<code[^>]*class="[^"]*(?:language-tikz|tikz)[^"]*"[^>]*>([\s\S]*?)<\/code>\s*<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
try { |
||||
return `<div class="tikz-block my-4">
|
||||
<div class="tikz-diagram p-4 bg-green-50 dark:bg-green-900 rounded-lg border border-green-200 dark:border-green-700"> |
||||
<div class="text-center text-green-600 dark:text-green-400 mb-2"> |
||||
<svg class="inline w-6 h-6 mr-2" fill="currentColor" viewBox="0 0 20 20"> |
||||
<path d="M3 4a1 1 0 011-1h12a1 1 0 011 1v2a1 1 0 01-1 1H4a1 1 0 01-1-1V4zM3 10a1 1 0 011-1h6a1 1 0 011 1v6a1 1 0 01-1 1H4a1 1 0 01-1-1v-6zM14 9a1 1 0 00-1 1v6a1 1 0 001 1h2a1 1 0 001-1v-6a1 1 0 00-1-1h-2z"/> |
||||
</svg> |
||||
TikZ Diagram |
||||
</div> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show TikZ source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(content)}</code> |
||||
</pre> |
||||
</details> |
||||
</div> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process TikZ block:", error); |
||||
return match; |
||||
} |
||||
}, |
||||
); |
||||
// Fallback: match <pre> blocks whose content starts with \begin{tikzpicture} or contains tikz
|
||||
html = html.replace( |
||||
/<div class="listingblock">\s*<div class="content">\s*<pre>([\s\S]*?)<\/pre>\s*<\/div>\s*<\/div>/g, |
||||
(match, content) => { |
||||
const lines = content.trim().split("\n"); |
||||
if ( |
||||
lines[0].trim().startsWith("\\begin{tikzpicture}") || |
||||
content.includes("tikz") |
||||
) { |
||||
try { |
||||
return `<div class="tikz-block my-4">
|
||||
<div class="tikz-diagram p-4 bg-green-50 dark:bg-green-900 rounded-lg border border-green-200 dark:border-green-700"> |
||||
<div class="text-center text-green-600 dark:text-green-400 mb-2"> |
||||
<svg class="inline w-6 h-6 mr-2" fill="currentColor" viewBox="0 0 20 20"> |
||||
<path d="M3 4a1 1 0 011-1h12a1 1 0 011 1v2a1 1 0 01-1 1H4a1 1 0 01-1-1V4zM3 10a1 1 0 011-1h6a1 1 0 011 1v6a1 1 0 01-1 1H4a1 1 0 01-1-1v-6zM14 9a1 1 0 00-1 1v6a1 1 0 001 1h2a1 1 0 001-1v-6a1 1 0 00-1-1h-2z"/> |
||||
</svg> |
||||
TikZ Diagram |
||||
</div> |
||||
<details class="mt-2"> |
||||
<summary class="cursor-pointer text-sm text-gray-600 dark:text-gray-400"> |
||||
Show TikZ source |
||||
</summary> |
||||
<pre class="mt-2 p-2 bg-gray-100 dark:bg-gray-900 rounded text-xs overflow-x-auto"> |
||||
<code>${escapeHtml(content)}</code> |
||||
</pre> |
||||
</details> |
||||
</div> |
||||
</div>`;
|
||||
} catch (error) { |
||||
console.warn("Failed to process TikZ fallback block:", error); |
||||
return match; |
||||
} |
||||
} |
||||
return match; |
||||
}, |
||||
); |
||||
return html; |
||||
} |
||||
|
||||
/** |
||||
* Escapes HTML characters for safe display |
||||
*/ |
||||
function escapeHtml(text: string): string { |
||||
const div = document.createElement("div"); |
||||
div.textContent = text; |
||||
return div.innerHTML; |
||||
} |
||||
@ -0,0 +1,202 @@
@@ -0,0 +1,202 @@
|
||||
// deno-lint-ignore-file no-this-alias no-explicit-any
|
||||
import Processor from "asciidoctor"; |
||||
import { renderTikZ } from "./tikzRenderer.ts"; |
||||
|
||||
// Simple PlantUML rendering using PlantUML server
|
||||
function renderPlantUML(content: string): string { |
||||
// Encode content for PlantUML server
|
||||
const encoded = btoa(unescape(encodeURIComponent(content))); |
||||
const plantUMLUrl = `https://www.plantuml.com/plantuml/svg/${encoded}`; |
||||
|
||||
return `<img src="${plantUMLUrl}" alt="PlantUML diagram" class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg my-4" loading="lazy" decoding="async">`; |
||||
} |
||||
|
||||
/** |
||||
* Creates Asciidoctor extensions for advanced content rendering |
||||
* including Asciimath/Latex, PlantUML, BPMN, and TikZ |
||||
*/ |
||||
export function createAdvancedExtensions(): any { |
||||
const Asciidoctor = Processor(); |
||||
const extensions = Asciidoctor.Extensions.create(); |
||||
|
||||
// Math rendering extension (Asciimath/Latex)
|
||||
extensions.treeProcessor(function (this: any) { |
||||
const dsl = this; |
||||
dsl.process(function (this: any, document: any) { |
||||
const treeProcessor = this; |
||||
processMathBlocks(treeProcessor, document); |
||||
}); |
||||
}); |
||||
|
||||
// PlantUML rendering extension
|
||||
extensions.treeProcessor(function (this: any) { |
||||
const dsl = this; |
||||
dsl.process(function (this: any, document: any) { |
||||
const treeProcessor = this; |
||||
processPlantUMLBlocks(treeProcessor, document); |
||||
}); |
||||
}); |
||||
|
||||
// TikZ rendering extension
|
||||
extensions.treeProcessor(function (this: any) { |
||||
const dsl = this; |
||||
dsl.process(function (this: any, document: any) { |
||||
const treeProcessor = this; |
||||
processTikZBlocks(treeProcessor, document); |
||||
}); |
||||
}); |
||||
|
||||
// --- NEW: Support [plantuml], [tikz], [bpmn] as source blocks ---
|
||||
// Helper to register a block for a given name and treat it as a source block
|
||||
function registerDiagramBlock(name: string) { |
||||
extensions.block(name, function (this: any) { |
||||
const self = this; |
||||
self.process(function (parent: any, reader: any, attrs: any) { |
||||
// Read the block content
|
||||
const lines = reader.getLines(); |
||||
// Create a source block with the correct language and lang attributes
|
||||
const block = self.createBlock(parent, "source", lines, { |
||||
...attrs, |
||||
language: name, |
||||
lang: name, |
||||
style: "source", |
||||
role: name, |
||||
}); |
||||
block.setAttribute("language", name); |
||||
block.setAttribute("lang", name); |
||||
block.setAttribute("style", "source"); |
||||
block.setAttribute("role", name); |
||||
block.setOption("source", true); |
||||
block.setOption("listing", true); |
||||
block.setStyle("source"); |
||||
return block; |
||||
}); |
||||
}); |
||||
} |
||||
registerDiagramBlock("plantuml"); |
||||
registerDiagramBlock("tikz"); |
||||
registerDiagramBlock("bpmn"); |
||||
// --- END NEW ---
|
||||
|
||||
return extensions; |
||||
} |
||||
|
||||
/** |
||||
* Processes math blocks (stem blocks) and converts them to rendered HTML |
||||
*/ |
||||
function processMathBlocks(_: any, document: any): void { |
||||
const blocks = document.getBlocks(); |
||||
for (const block of blocks) { |
||||
if (block.getContext() === "stem") { |
||||
const content = block.getContent(); |
||||
if (content) { |
||||
try { |
||||
// Output as a single div with delimiters for MathJax
|
||||
const rendered = `<div class="math-block">$$${content}$$</div>`; |
||||
block.setContent(rendered); |
||||
} catch (error) { |
||||
console.warn("Failed to render math:", error); |
||||
} |
||||
} |
||||
} |
||||
// Inline math: context 'inline' and style 'stem' or 'latexmath'
|
||||
if ( |
||||
block.getContext() === "inline" && |
||||
(block.getStyle() === "stem" || block.getStyle() === "latexmath") |
||||
) { |
||||
const content = block.getContent(); |
||||
if (content) { |
||||
try { |
||||
const rendered = `<span class="math-inline">$${content}$</span>`; |
||||
block.setContent(rendered); |
||||
} catch (error) { |
||||
console.warn("Failed to render inline math:", error); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Processes PlantUML blocks and converts them to rendered SVG |
||||
*/ |
||||
function processPlantUMLBlocks(_: any, document: any): void { |
||||
const blocks = document.getBlocks(); |
||||
|
||||
for (const block of blocks) { |
||||
if (block.getContext() === "listing" && isPlantUMLBlock(block)) { |
||||
const content = block.getContent(); |
||||
if (content) { |
||||
try { |
||||
// Use simple PlantUML rendering
|
||||
const rendered = renderPlantUML(content); |
||||
|
||||
// Replace the block content with the image
|
||||
block.setContent(rendered); |
||||
} catch (error) { |
||||
console.warn("Failed to render PlantUML:", error); |
||||
// Keep original content if rendering fails
|
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Processes TikZ blocks and converts them to rendered SVG |
||||
*/ |
||||
function processTikZBlocks(_: any, document: any): void { |
||||
const blocks = document.getBlocks(); |
||||
|
||||
for (const block of blocks) { |
||||
if (block.getContext() === "listing" && isTikZBlock(block)) { |
||||
const content = block.getContent(); |
||||
if (content) { |
||||
try { |
||||
// Render TikZ to SVG
|
||||
const svg = renderTikZ(content); |
||||
|
||||
// Replace the block content with the SVG
|
||||
block.setContent(svg); |
||||
} catch (error) { |
||||
console.warn("Failed to render TikZ:", error); |
||||
// Keep original content if rendering fails
|
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Checks if a block contains PlantUML content |
||||
*/ |
||||
function isPlantUMLBlock(block: any): boolean { |
||||
const content = block.getContent() || ""; |
||||
const lines = content.split("\n"); |
||||
|
||||
// Check for PlantUML indicators
|
||||
return lines.some( |
||||
(line: string) => |
||||
line.trim().startsWith("@startuml") || |
||||
line.trim().startsWith("@start") || |
||||
line.includes("plantuml") || |
||||
line.includes("uml"), |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Checks if a block contains TikZ content |
||||
*/ |
||||
function isTikZBlock(block: any): boolean { |
||||
const content = block.getContent() || ""; |
||||
const lines = content.split("\n"); |
||||
|
||||
// Check for TikZ indicators
|
||||
return lines.some( |
||||
(line: string) => |
||||
line.trim().startsWith("\\begin{tikzpicture}") || |
||||
line.trim().startsWith("\\tikz") || |
||||
line.includes("tikzpicture") || |
||||
line.includes("tikz"), |
||||
); |
||||
} |
||||
@ -0,0 +1,133 @@
@@ -0,0 +1,133 @@
|
||||
import { processNostrIdentifiers } from "../nostrUtils"; |
||||
|
||||
/** |
||||
* Normalizes a string for use as a d-tag by converting to lowercase, |
||||
* replacing non-alphanumeric characters with dashes, and removing |
||||
* leading/trailing dashes. |
||||
*/ |
||||
function normalizeDTag(input: string): string { |
||||
return input |
||||
.toLowerCase() |
||||
.replace(/[^\p{L}\p{N}]/gu, "-") |
||||
.replace(/-+/g, "-") |
||||
.replace(/^-|-$/g, ""); |
||||
} |
||||
|
||||
/** |
||||
* Replaces wikilinks in the format [[target]] or [[target|display]] with |
||||
* clickable links to the events page. |
||||
*/ |
||||
function replaceWikilinks(html: string): string { |
||||
// [[target page]] or [[target page|display text]]
|
||||
return html.replace( |
||||
/\[\[([^\]|]+)(?:\|([^\]]+))?\]\]/g, |
||||
(_match, target, label) => { |
||||
const normalized = normalizeDTag(target.trim()); |
||||
const display = (label || target).trim(); |
||||
const url = `./events?d=${normalized}`; |
||||
// Output as a clickable <a> with the [[display]] format and matching link colors
|
||||
return `<a class="wikilink text-primary-600 dark:text-primary-500 hover:underline" data-dtag="${normalized}" data-url="${url}" href="${url}">${display}</a>`; |
||||
}, |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Replaces AsciiDoctor-generated empty anchor tags <a id="..."></a> with clickable wikilink-style <a> tags. |
||||
*/ |
||||
function replaceAsciiDocAnchors(html: string): string { |
||||
return html.replace(/<a id="([^"]+)"><\/a>/g, (_match, id) => { |
||||
const normalized = normalizeDTag(id.trim()); |
||||
const url = `./events?d=${normalized}`; |
||||
return `<a class="wikilink text-primary-600 dark:text-primary-500 hover:underline" data-dtag="${normalized}" data-url="${url}" href="${url}">${id}</a>`; |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Processes nostr addresses in HTML content, but skips addresses that are |
||||
* already within hyperlink tags. |
||||
*/ |
||||
async function processNostrAddresses(html: string): Promise<string> { |
||||
// Helper to check if a match is within an existing <a> tag
|
||||
function isWithinLink(text: string, index: number): boolean { |
||||
// Look backwards from the match position to find the nearest <a> tag
|
||||
const before = text.slice(0, index); |
||||
const lastOpenTag = before.lastIndexOf("<a"); |
||||
const lastCloseTag = before.lastIndexOf("</a>"); |
||||
|
||||
// If we find an opening <a> tag after the last closing </a> tag, we're inside a link
|
||||
return lastOpenTag > lastCloseTag; |
||||
} |
||||
|
||||
// Process nostr addresses that are not within existing links
|
||||
const nostrPattern = |
||||
/nostr:(npub|nprofile|note|nevent|naddr)[a-zA-Z0-9]{20,}/g; |
||||
let processedHtml = html; |
||||
|
||||
// Find all nostr addresses
|
||||
const matches = Array.from(processedHtml.matchAll(nostrPattern)); |
||||
|
||||
// Process them in reverse order to avoid index shifting issues
|
||||
for (let i = matches.length - 1; i >= 0; i--) { |
||||
const match = matches[i]; |
||||
const [fullMatch] = match; |
||||
const matchIndex = match.index ?? 0; |
||||
|
||||
// Skip if already within a link
|
||||
if (isWithinLink(processedHtml, matchIndex)) { |
||||
continue; |
||||
} |
||||
|
||||
// Process the nostr identifier
|
||||
const processedMatch = await processNostrIdentifiers(fullMatch); |
||||
|
||||
// Replace the match in the HTML
|
||||
processedHtml = |
||||
processedHtml.slice(0, matchIndex) + |
||||
processedMatch + |
||||
processedHtml.slice(matchIndex + fullMatch.length); |
||||
} |
||||
|
||||
return processedHtml; |
||||
} |
||||
|
||||
/** |
||||
* Fixes AsciiDoctor stem blocks for MathJax rendering. |
||||
* Joins split spans and wraps content in $$...$$ for block math. |
||||
*/ |
||||
function fixStemBlocks(html: string): string { |
||||
// Replace <div class="stemblock"><div class="content"><span>$</span>...<span>$</span></div></div>
|
||||
// with <div class="stemblock"><div class="content">$$...$$</div></div>
|
||||
return html.replace( |
||||
/<div class="stemblock">\s*<div class="content">\s*<span>\$<\/span>([\s\S]*?)<span>\$<\/span>\s*<\/div>\s*<\/div>/g, |
||||
(_match, mathContent) => { |
||||
// Remove any extra tags inside mathContent
|
||||
const cleanMath = mathContent.replace(/<\/?span[^>]*>/g, "").trim(); |
||||
return `<div class="stemblock"><div class="content">$$${cleanMath}$$</div></div>`; |
||||
}, |
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Post-processes asciidoctor HTML output to add wikilink and nostr address rendering. |
||||
* This function should be called after asciidoctor.convert() to enhance the HTML output. |
||||
*/ |
||||
export async function postProcessAsciidoctorHtml( |
||||
html: string, |
||||
): Promise<string> { |
||||
if (!html) return html; |
||||
|
||||
try { |
||||
// First process AsciiDoctor-generated anchors
|
||||
let processedHtml = replaceAsciiDocAnchors(html); |
||||
// Then process wikilinks in [[...]] format (if any remain)
|
||||
processedHtml = replaceWikilinks(processedHtml); |
||||
// Then process nostr addresses (but not those already in links)
|
||||
processedHtml = await processNostrAddresses(processedHtml); |
||||
processedHtml = fixStemBlocks(processedHtml); // Fix math blocks for MathJax
|
||||
|
||||
return processedHtml; |
||||
} catch (error) { |
||||
console.error("Error in postProcessAsciidoctorHtml:", error); |
||||
return html; // Return original HTML if processing fails
|
||||
} |
||||
} |
||||
@ -0,0 +1,60 @@
@@ -0,0 +1,60 @@
|
||||
/** |
||||
* TikZ renderer using node-tikzjax |
||||
* Converts TikZ LaTeX code to SVG for browser rendering |
||||
*/ |
||||
|
||||
// We'll use a simple approach for now since node-tikzjax might not be available
|
||||
// This is a placeholder implementation that can be enhanced later
|
||||
|
||||
export function renderTikZ(tikzCode: string): string { |
||||
try { |
||||
// For now, we'll create a simple SVG placeholder
|
||||
// In a full implementation, this would use node-tikzjax or similar library
|
||||
|
||||
// Extract TikZ content and create a basic SVG
|
||||
const svgContent = createBasicSVG(tikzCode); |
||||
|
||||
return svgContent; |
||||
} catch (error) { |
||||
console.error("Failed to render TikZ:", error); |
||||
return `<div class="tikz-error text-red-500 p-4 border border-red-300 rounded">
|
||||
<p class="font-bold">TikZ Rendering Error</p> |
||||
<p class="text-sm">Failed to render TikZ diagram. Original code:</p> |
||||
<pre class="mt-2 p-2 bg-gray-100 rounded text-xs overflow-x-auto">${tikzCode}</pre> |
||||
</div>`;
|
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Creates a basic SVG placeholder for TikZ content |
||||
* This is a temporary implementation until proper TikZ rendering is available |
||||
*/ |
||||
function createBasicSVG(tikzCode: string): string { |
||||
// Create a simple SVG with the TikZ code as text
|
||||
const width = 400; |
||||
const height = 300; |
||||
|
||||
return `<svg width="${width}" height="${height}" class="tikz-diagram max-w-full h-auto rounded-lg shadow-lg my-4" viewBox="0 0 ${width} ${height}">
|
||||
<rect width="${width}" height="${height}" fill="white" stroke="#ccc" stroke-width="1"/> |
||||
<text x="10" y="20" font-family="monospace" font-size="12" fill="#666"> |
||||
TikZ Diagram |
||||
</text> |
||||
<text x="10" y="40" font-family="monospace" font-size="10" fill="#999"> |
||||
(Rendering not yet implemented) |
||||
</text> |
||||
<foreignObject x="10" y="60" width="${width - 20}" height="${height - 70}"> |
||||
<div xmlns="http://www.w3.org/1999/xhtml" style="font-family: monospace; font-size: 10px; color: #666; overflow: hidden;"> |
||||
<pre style="margin: 0; white-space: pre-wrap; word-break: break-all;">${escapeHtml(tikzCode)}</pre> |
||||
</div> |
||||
</foreignObject> |
||||
</svg>`;
|
||||
} |
||||
|
||||
/** |
||||
* Escapes HTML characters for safe display |
||||
*/ |
||||
function escapeHtml(text: string): string { |
||||
const div = document.createElement("div"); |
||||
div.textContent = text; |
||||
return div.innerHTML; |
||||
} |
||||
@ -0,0 +1,188 @@
@@ -0,0 +1,188 @@
|
||||
import { deduplicateRelayUrls } from "./relay_management.ts"; |
||||
|
||||
/** |
||||
* Network conditions for relay selection |
||||
*/ |
||||
export enum NetworkCondition { |
||||
ONLINE = 'online', |
||||
SLOW = 'slow', |
||||
OFFLINE = 'offline' |
||||
} |
||||
|
||||
/** |
||||
* Network connectivity test endpoints |
||||
*/ |
||||
const NETWORK_ENDPOINTS = [ |
||||
'https://www.google.com/favicon.ico', |
||||
'https://httpbin.org/status/200', |
||||
'https://api.github.com/zen' |
||||
]; |
||||
|
||||
/** |
||||
* Detects if the network is online using more reliable endpoints |
||||
* @returns Promise that resolves to true if online, false otherwise |
||||
*/ |
||||
export async function isNetworkOnline(): Promise<boolean> { |
||||
for (const endpoint of NETWORK_ENDPOINTS) { |
||||
try { |
||||
// Use a simple fetch without HEAD method to avoid CORS issues
|
||||
await fetch(endpoint, { |
||||
method: 'GET', |
||||
cache: 'no-cache', |
||||
signal: AbortSignal.timeout(3000), |
||||
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
|
||||
}); |
||||
// With no-cors mode, we can't check response.ok, so we assume success if no error
|
||||
return true; |
||||
} catch (error) { |
||||
console.debug(`[network_detection.ts] Failed to reach ${endpoint}:`, error); |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
console.debug('[network_detection.ts] All network endpoints failed'); |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* Tests network speed by measuring response time |
||||
* @returns Promise that resolves to network speed in milliseconds |
||||
*/ |
||||
export async function testNetworkSpeed(): Promise<number> { |
||||
const startTime = performance.now(); |
||||
|
||||
for (const endpoint of NETWORK_ENDPOINTS) { |
||||
try { |
||||
await fetch(endpoint, { |
||||
method: 'GET', |
||||
cache: 'no-cache', |
||||
signal: AbortSignal.timeout(5000), |
||||
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
|
||||
}); |
||||
|
||||
const endTime = performance.now(); |
||||
return endTime - startTime; |
||||
} catch (error) { |
||||
console.debug(`[network_detection.ts] Speed test failed for ${endpoint}:`, error); |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
console.debug('[network_detection.ts] Network speed test failed for all endpoints'); |
||||
return Infinity; // Very slow if it fails
|
||||
} |
||||
|
||||
/** |
||||
* Determines network condition based on connectivity and speed |
||||
* @returns Promise that resolves to NetworkCondition |
||||
*/ |
||||
export async function detectNetworkCondition(): Promise<NetworkCondition> { |
||||
const isOnline = await isNetworkOnline(); |
||||
|
||||
if (!isOnline) { |
||||
console.debug('[network_detection.ts] Network condition: OFFLINE'); |
||||
return NetworkCondition.OFFLINE; |
||||
} |
||||
|
||||
const speed = await testNetworkSpeed(); |
||||
|
||||
// Consider network slow if response time > 2000ms
|
||||
if (speed > 2000) { |
||||
console.debug(`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`); |
||||
return NetworkCondition.SLOW; |
||||
} |
||||
|
||||
console.debug(`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`); |
||||
return NetworkCondition.ONLINE; |
||||
} |
||||
|
||||
/** |
||||
* Gets the appropriate relay sets based on network condition |
||||
* @param networkCondition The detected network condition |
||||
* @param discoveredLocalRelays Array of discovered local relay URLs |
||||
* @param lowbandwidthRelays Array of low bandwidth relay URLs |
||||
* @param fullRelaySet The complete relay set for normal conditions |
||||
* @returns Object with inbox and outbox relay arrays |
||||
*/ |
||||
export function getRelaySetForNetworkCondition( |
||||
networkCondition: NetworkCondition, |
||||
discoveredLocalRelays: string[], |
||||
lowbandwidthRelays: string[], |
||||
fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] } |
||||
): { inboxRelays: string[]; outboxRelays: string[] } { |
||||
switch (networkCondition) { |
||||
case NetworkCondition.OFFLINE: |
||||
// When offline, use local relays if available, otherwise rely on cache
|
||||
// This will be improved when IndexedDB local relay is implemented
|
||||
if (discoveredLocalRelays.length > 0) { |
||||
console.debug('[network_detection.ts] Using local relays (offline)'); |
||||
return { |
||||
inboxRelays: discoveredLocalRelays, |
||||
outboxRelays: discoveredLocalRelays |
||||
}; |
||||
} else { |
||||
console.debug('[network_detection.ts] No local relays available, will rely on cache (offline)'); |
||||
return { |
||||
inboxRelays: [], |
||||
outboxRelays: [] |
||||
}; |
||||
} |
||||
case NetworkCondition.SLOW: { |
||||
// Local relays + low bandwidth relays when slow (deduplicated)
|
||||
console.debug('[network_detection.ts] Using local + low bandwidth relays (slow network)'); |
||||
const slowInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); |
||||
const slowOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); |
||||
return { |
||||
inboxRelays: slowInboxRelays, |
||||
outboxRelays: slowOutboxRelays |
||||
}; |
||||
} |
||||
case NetworkCondition.ONLINE: |
||||
default: |
||||
// Full relay set when online
|
||||
console.debug('[network_detection.ts] Using full relay set (online)'); |
||||
return fullRelaySet; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Starts periodic network monitoring with reduced frequency to avoid spam |
||||
* @param onNetworkChange Callback function called when network condition changes |
||||
* @param checkInterval Interval in milliseconds between network checks (default: 60 seconds) |
||||
* @returns Function to stop the monitoring |
||||
*/ |
||||
export function startNetworkMonitoring( |
||||
onNetworkChange: (condition: NetworkCondition) => void, |
||||
checkInterval: number = 60000 // Increased to 60 seconds to reduce spam
|
||||
): () => void { |
||||
let lastCondition: NetworkCondition | null = null; |
||||
let intervalId: number | null = null; |
||||
|
||||
const checkNetwork = async () => { |
||||
try { |
||||
const currentCondition = await detectNetworkCondition(); |
||||
|
||||
if (currentCondition !== lastCondition) { |
||||
console.debug(`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`); |
||||
lastCondition = currentCondition; |
||||
onNetworkChange(currentCondition); |
||||
} |
||||
} catch (error) { |
||||
console.warn('[network_detection.ts] Network monitoring error:', error); |
||||
} |
||||
}; |
||||
|
||||
// Initial check
|
||||
checkNetwork(); |
||||
|
||||
// Set up periodic monitoring
|
||||
intervalId = globalThis.setInterval(checkNetwork, checkInterval); |
||||
|
||||
// Return function to stop monitoring
|
||||
return () => { |
||||
if (intervalId !== null) { |
||||
clearInterval(intervalId); |
||||
intervalId = null; |
||||
} |
||||
}; |
||||
}
|
||||
@ -0,0 +1,444 @@
@@ -0,0 +1,444 @@
|
||||
import { nip19 } from "nostr-tools"; |
||||
import { getEventHash, signEvent, prefixNostrAddresses } from "./nostrUtils.ts"; |
||||
import { get } from "svelte/store"; |
||||
import { goto } from "$app/navigation"; |
||||
import { EVENT_KINDS, TIME_CONSTANTS } from "./search_constants.ts"; |
||||
import { ndkInstance } from "../ndk.ts"; |
||||
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
|
||||
export interface RootEventInfo { |
||||
rootId: string; |
||||
rootPubkey: string; |
||||
rootRelay: string; |
||||
rootKind: number; |
||||
rootAddress: string; |
||||
rootIValue: string; |
||||
rootIRelay: string; |
||||
isRootA: boolean; |
||||
isRootI: boolean; |
||||
} |
||||
|
||||
export interface ParentEventInfo { |
||||
parentId: string; |
||||
parentPubkey: string; |
||||
parentRelay: string; |
||||
parentKind: number; |
||||
parentAddress: string; |
||||
} |
||||
|
||||
export interface EventPublishResult { |
||||
success: boolean; |
||||
relay?: string; |
||||
eventId?: string; |
||||
error?: string; |
||||
} |
||||
|
||||
/** |
||||
* Helper function to find a tag by its first element |
||||
*/ |
||||
function findTag(tags: string[][], tagName: string): string[] | undefined { |
||||
return tags?.find((t: string[]) => t[0] === tagName); |
||||
} |
||||
|
||||
/** |
||||
* Helper function to get tag value safely |
||||
*/ |
||||
function getTagValue( |
||||
tags: string[][], |
||||
tagName: string, |
||||
index: number = 1, |
||||
): string { |
||||
const tag = findTag(tags, tagName); |
||||
return tag?.[index] || ""; |
||||
} |
||||
|
||||
/** |
||||
* Helper function to create a tag array |
||||
*/ |
||||
function createTag(name: string, ...values: (string | number)[]): string[] { |
||||
return [name, ...values.map((v) => String(v))]; |
||||
} |
||||
|
||||
/** |
||||
* Helper function to add tags to an array |
||||
*/ |
||||
function addTags(tags: string[][], ...newTags: string[][]): void { |
||||
tags.push(...newTags); |
||||
} |
||||
|
||||
/** |
||||
* Extract root event information from parent event tags |
||||
*/ |
||||
export function extractRootEventInfo(parent: NDKEvent): RootEventInfo { |
||||
const rootInfo: RootEventInfo = { |
||||
rootId: parent.id, |
||||
rootPubkey: getPubkeyString(parent.pubkey), |
||||
rootRelay: getRelayString(parent.relay), |
||||
rootKind: parent.kind || 1, |
||||
rootAddress: "", |
||||
rootIValue: "", |
||||
rootIRelay: "", |
||||
isRootA: false, |
||||
isRootI: false, |
||||
}; |
||||
|
||||
if (!parent.tags) return rootInfo; |
||||
|
||||
const rootE = findTag(parent.tags, "E"); |
||||
const rootA = findTag(parent.tags, "A"); |
||||
const rootI = findTag(parent.tags, "I"); |
||||
|
||||
rootInfo.isRootA = !!rootA; |
||||
rootInfo.isRootI = !!rootI; |
||||
|
||||
if (rootE) { |
||||
rootInfo.rootId = rootE[1]; |
||||
rootInfo.rootRelay = getRelayString(rootE[2]); |
||||
rootInfo.rootPubkey = getPubkeyString(rootE[3] || rootInfo.rootPubkey); |
||||
rootInfo.rootKind = |
||||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; |
||||
} else if (rootA) { |
||||
rootInfo.rootAddress = rootA[1]; |
||||
rootInfo.rootRelay = getRelayString(rootA[2]); |
||||
rootInfo.rootPubkey = getPubkeyString( |
||||
getTagValue(parent.tags, "P") || rootInfo.rootPubkey, |
||||
); |
||||
rootInfo.rootKind = |
||||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; |
||||
} else if (rootI) { |
||||
rootInfo.rootIValue = rootI[1]; |
||||
rootInfo.rootIRelay = getRelayString(rootI[2]); |
||||
rootInfo.rootKind = |
||||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; |
||||
} |
||||
|
||||
return rootInfo; |
||||
} |
||||
|
||||
/** |
||||
* Extract parent event information |
||||
*/ |
||||
export function extractParentEventInfo(parent: NDKEvent): ParentEventInfo { |
||||
const dTag = getTagValue(parent.tags || [], "d"); |
||||
const parentAddress = dTag |
||||
? `${parent.kind}:${getPubkeyString(parent.pubkey)}:${dTag}` |
||||
: ""; |
||||
|
||||
return { |
||||
parentId: parent.id, |
||||
parentPubkey: getPubkeyString(parent.pubkey), |
||||
parentRelay: getRelayString(parent.relay), |
||||
parentKind: parent.kind || 1, |
||||
parentAddress, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Build root scope tags for NIP-22 threading |
||||
*/ |
||||
function buildRootScopeTags( |
||||
rootInfo: RootEventInfo, |
||||
): string[][] { |
||||
const tags: string[][] = []; |
||||
|
||||
if (rootInfo.rootAddress) { |
||||
const tagType = rootInfo.isRootA ? "A" : rootInfo.isRootI ? "I" : "E"; |
||||
addTags( |
||||
tags, |
||||
createTag( |
||||
tagType, |
||||
rootInfo.rootAddress || rootInfo.rootId, |
||||
rootInfo.rootRelay, |
||||
), |
||||
); |
||||
} else if (rootInfo.rootIValue) { |
||||
addTags(tags, createTag("I", rootInfo.rootIValue, rootInfo.rootIRelay)); |
||||
} else { |
||||
addTags(tags, createTag("E", rootInfo.rootId, rootInfo.rootRelay)); |
||||
} |
||||
|
||||
addTags(tags, createTag("K", rootInfo.rootKind)); |
||||
|
||||
if (rootInfo.rootPubkey && !rootInfo.rootIValue) { |
||||
addTags(tags, createTag("P", rootInfo.rootPubkey, rootInfo.rootRelay)); |
||||
} |
||||
|
||||
return tags; |
||||
} |
||||
|
||||
/** |
||||
* Build parent scope tags for NIP-22 threading |
||||
*/ |
||||
function buildParentScopeTags( |
||||
parent: NDKEvent, |
||||
parentInfo: ParentEventInfo, |
||||
rootInfo: RootEventInfo, |
||||
): string[][] { |
||||
const tags: string[][] = []; |
||||
|
||||
if (parentInfo.parentAddress) { |
||||
const tagType = rootInfo.isRootA ? "a" : rootInfo.isRootI ? "i" : "e"; |
||||
addTags( |
||||
tags, |
||||
createTag(tagType, parentInfo.parentAddress, parentInfo.parentRelay), |
||||
); |
||||
} |
||||
|
||||
addTags( |
||||
tags, |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
|
||||
return tags; |
||||
} |
||||
|
||||
/** |
||||
* Build tags for a reply event based on parent and root information |
||||
*/ |
||||
export function buildReplyTags( |
||||
parent: NDKEvent, |
||||
rootInfo: RootEventInfo, |
||||
parentInfo: ParentEventInfo, |
||||
kind: number, |
||||
): string[][] { |
||||
const tags: string[][] = []; |
||||
|
||||
const isParentReplaceable = |
||||
parentInfo.parentKind >= EVENT_KINDS.ADDRESSABLE.MIN && |
||||
parentInfo.parentKind < EVENT_KINDS.ADDRESSABLE.MAX; |
||||
const isParentComment = parentInfo.parentKind === EVENT_KINDS.COMMENT; |
||||
const isReplyToComment = isParentComment && rootInfo.rootId !== parent.id; |
||||
|
||||
if (kind === 1) { |
||||
// Kind 1 replies use simple e/p tags
|
||||
addTags( |
||||
tags, |
||||
createTag("e", parent.id, parentInfo.parentRelay, "root"), |
||||
createTag("p", parentInfo.parentPubkey), |
||||
); |
||||
|
||||
// Add address for replaceable events
|
||||
if (isParentReplaceable) { |
||||
const dTag = getTagValue(parent.tags || [], "d"); |
||||
if (dTag) { |
||||
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; |
||||
addTags(tags, createTag("a", parentAddress, "", "root")); |
||||
} |
||||
} |
||||
} else { |
||||
// Kind 1111 (comment) uses NIP-22 threading format
|
||||
if (isParentReplaceable) { |
||||
const dTag = getTagValue(parent.tags || [], "d"); |
||||
if (dTag) { |
||||
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; |
||||
|
||||
if (isReplyToComment) { |
||||
// Root scope (uppercase) - use the original article
|
||||
addTags( |
||||
tags, |
||||
createTag("A", parentAddress, parentInfo.parentRelay), |
||||
createTag("K", rootInfo.rootKind), |
||||
createTag("P", rootInfo.rootPubkey, rootInfo.rootRelay), |
||||
); |
||||
// Parent scope (lowercase) - the comment we're replying to
|
||||
addTags( |
||||
tags, |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
} else { |
||||
// Top-level comment - root and parent are the same
|
||||
addTags( |
||||
tags, |
||||
createTag("A", parentAddress, parentInfo.parentRelay), |
||||
createTag("K", rootInfo.rootKind), |
||||
createTag("P", rootInfo.rootPubkey, rootInfo.rootRelay), |
||||
createTag("a", parentAddress, parentInfo.parentRelay), |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
} |
||||
} else { |
||||
// Fallback to E/e tags if no d-tag found
|
||||
if (isReplyToComment) { |
||||
addTags( |
||||
tags, |
||||
createTag("E", rootInfo.rootId, rootInfo.rootRelay), |
||||
createTag("K", rootInfo.rootKind), |
||||
createTag("P", rootInfo.rootPubkey, rootInfo.rootRelay), |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
} else { |
||||
addTags( |
||||
tags, |
||||
createTag("E", parent.id, rootInfo.rootRelay), |
||||
createTag("K", rootInfo.rootKind), |
||||
createTag("P", rootInfo.rootPubkey, rootInfo.rootRelay), |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
} |
||||
} |
||||
} else { |
||||
// For regular events, use E/e tags
|
||||
if (isReplyToComment) { |
||||
// Reply to a comment - distinguish root from parent
|
||||
addTags(tags, ...buildRootScopeTags(rootInfo)); |
||||
addTags( |
||||
tags, |
||||
createTag("e", parent.id, parentInfo.parentRelay), |
||||
createTag("k", parentInfo.parentKind), |
||||
createTag("p", parentInfo.parentPubkey, parentInfo.parentRelay), |
||||
); |
||||
} else { |
||||
// Top-level comment or regular event
|
||||
addTags(tags, ...buildRootScopeTags(rootInfo)); |
||||
addTags(tags, ...buildParentScopeTags(parent, parentInfo, rootInfo)); |
||||
} |
||||
} |
||||
} |
||||
|
||||
return tags; |
||||
} |
||||
|
||||
/** |
||||
* Create and sign a Nostr event |
||||
*/ |
||||
export async function createSignedEvent( |
||||
content: string, |
||||
pubkey: string, |
||||
kind: number, |
||||
tags: string[][], |
||||
// deno-lint-ignore no-explicit-any
|
||||
): Promise<{ id: string; sig: string; event: any }> { |
||||
const prefixedContent = prefixNostrAddresses(content); |
||||
|
||||
const eventToSign = { |
||||
kind: Number(kind), |
||||
created_at: Number( |
||||
Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR), |
||||
), |
||||
tags: tags.map((tag) => [ |
||||
String(tag[0]), |
||||
String(tag[1]), |
||||
String(tag[2] || ""), |
||||
String(tag[3] || ""), |
||||
]), |
||||
content: String(prefixedContent), |
||||
pubkey: pubkey, |
||||
}; |
||||
|
||||
let sig, id; |
||||
if (typeof window !== "undefined" && globalThis.nostr && globalThis.nostr.signEvent) { |
||||
const signed = await globalThis.nostr.signEvent(eventToSign); |
||||
sig = signed.sig as string; |
||||
id = "id" in signed ? (signed.id as string) : getEventHash(eventToSign); |
||||
} else { |
||||
id = getEventHash(eventToSign); |
||||
sig = await signEvent(eventToSign); |
||||
} |
||||
|
||||
return { |
||||
id, |
||||
sig, |
||||
event: { |
||||
...eventToSign, |
||||
id, |
||||
sig, |
||||
}, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Publishes an event to relays using the new relay management system |
||||
* @param event The event to publish (can be NDKEvent or plain event object) |
||||
* @param relayUrls Array of relay URLs to publish to |
||||
* @returns Promise that resolves to array of successful relay URLs |
||||
*/ |
||||
export async function publishEvent( |
||||
event: NDKEvent, |
||||
relayUrls: string[], |
||||
): Promise<string[]> { |
||||
const successfulRelays: string[] = []; |
||||
const ndk = get(ndkInstance); |
||||
|
||||
if (!ndk) { |
||||
throw new Error("NDK instance not available"); |
||||
} |
||||
|
||||
// Create relay set from URLs
|
||||
const relaySet = NDKRelaySet.fromRelayUrls(relayUrls, ndk); |
||||
|
||||
try { |
||||
// If event is a plain object, create an NDKEvent from it
|
||||
let ndkEvent: NDKEvent; |
||||
if (event.publish && typeof event.publish === 'function') { |
||||
// It's already an NDKEvent
|
||||
ndkEvent = event; |
||||
} else { |
||||
// It's a plain event object, create NDKEvent
|
||||
ndkEvent = new NDKEvent(ndk, event); |
||||
} |
||||
|
||||
// Publish with timeout
|
||||
await ndkEvent.publish(relaySet).withTimeout(5000); |
||||
|
||||
// For now, assume all relays were successful
|
||||
// In a more sophisticated implementation, you'd track individual relay responses
|
||||
successfulRelays.push(...relayUrls); |
||||
|
||||
console.debug("[nostrEventService] Published event successfully:", { |
||||
eventId: ndkEvent.id, |
||||
relayCount: relayUrls.length, |
||||
successfulRelays |
||||
}); |
||||
} catch (error) { |
||||
console.error("[nostrEventService] Failed to publish event:", error); |
||||
throw new Error(`Failed to publish event: ${error}`); |
||||
} |
||||
|
||||
return successfulRelays; |
||||
} |
||||
|
||||
/** |
||||
* Navigate to the published event |
||||
*/ |
||||
export function navigateToEvent(eventId: string): void { |
||||
try { |
||||
// Validate that eventId is a valid hex string
|
||||
if (!/^[0-9a-fA-F]{64}$/.test(eventId)) { |
||||
console.warn("Invalid event ID format:", eventId); |
||||
return; |
||||
} |
||||
|
||||
const nevent = nip19.neventEncode({ id: eventId }); |
||||
goto(`/events?id=${nevent}`); |
||||
} catch (error) { |
||||
console.error("Failed to encode event ID for navigation:", eventId, error); |
||||
} |
||||
} |
||||
|
||||
// Helper functions to ensure relay and pubkey are always strings
|
||||
// deno-lint-ignore no-explicit-any
|
||||
function getRelayString(relay: any): string { |
||||
if (!relay) return ""; |
||||
if (typeof relay === "string") return relay; |
||||
if (typeof relay.url === "string") return relay.url; |
||||
return ""; |
||||
} |
||||
|
||||
// deno-lint-ignore no-explicit-any
|
||||
function getPubkeyString(pubkey: any): string { |
||||
if (!pubkey) return ""; |
||||
if (typeof pubkey === "string") return pubkey; |
||||
if (typeof pubkey.hex === "function") return pubkey.hex(); |
||||
if (typeof pubkey.pubkey === "string") return pubkey.pubkey; |
||||
return ""; |
||||
} |
||||
@ -0,0 +1,393 @@
@@ -0,0 +1,393 @@
|
||||
import { ndkInstance } from "../ndk.ts"; |
||||
import { getUserMetadata, getNpubFromNip05 } from "./nostrUtils.ts"; |
||||
import NDK, { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { searchCache } from "./searchCache.ts"; |
||||
import { communityRelays, secondaryRelays } from "../consts.ts"; |
||||
import { get } from "svelte/store"; |
||||
import type { NostrProfile, ProfileSearchResult } from "./search_types.ts"; |
||||
import { |
||||
fieldMatches, |
||||
nip05Matches, |
||||
normalizeSearchTerm, |
||||
createProfileFromEvent, |
||||
} from "./search_utils.ts"; |
||||
|
||||
/** |
||||
* Search for profiles by various criteria (display name, name, NIP-05, npub) |
||||
*/ |
||||
export async function searchProfiles( |
||||
searchTerm: string, |
||||
): Promise<ProfileSearchResult> { |
||||
const normalizedSearchTerm = normalizeSearchTerm(searchTerm); |
||||
|
||||
console.log( |
||||
"searchProfiles called with:", |
||||
searchTerm, |
||||
"normalized:", |
||||
normalizedSearchTerm, |
||||
); |
||||
|
||||
// Check cache first
|
||||
const cachedResult = searchCache.get("profile", normalizedSearchTerm); |
||||
if (cachedResult) { |
||||
console.log("Found cached result for:", normalizedSearchTerm); |
||||
const profiles = cachedResult.events |
||||
.map((event) => { |
||||
try { |
||||
const profileData = JSON.parse(event.content); |
||||
return createProfileFromEvent(event, profileData); |
||||
} catch { |
||||
return null; |
||||
} |
||||
}) |
||||
.filter(Boolean) as NostrProfile[]; |
||||
|
||||
console.log("Cached profiles found:", profiles.length); |
||||
return { profiles, Status: {} }; |
||||
} |
||||
|
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) { |
||||
console.error("NDK not initialized"); |
||||
throw new Error("NDK not initialized"); |
||||
} |
||||
|
||||
console.log("NDK initialized, starting search logic"); |
||||
|
||||
let foundProfiles: NostrProfile[] = []; |
||||
|
||||
try { |
||||
// Check if it's a valid npub/nprofile first
|
||||
if ( |
||||
normalizedSearchTerm.startsWith("npub") || |
||||
normalizedSearchTerm.startsWith("nprofile") |
||||
) { |
||||
try { |
||||
const metadata = await getUserMetadata(normalizedSearchTerm); |
||||
if (metadata) { |
||||
foundProfiles = [metadata]; |
||||
} |
||||
} catch (error) { |
||||
console.error("Error fetching metadata for npub:", error); |
||||
} |
||||
} else if (normalizedSearchTerm.includes("@")) { |
||||
// Check if it's a NIP-05 address - normalize it properly
|
||||
const normalizedNip05 = normalizedSearchTerm.toLowerCase(); |
||||
try { |
||||
const npub = await getNpubFromNip05(normalizedNip05); |
||||
if (npub) { |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub, |
||||
}; |
||||
foundProfiles = [profile]; |
||||
} |
||||
} catch (e) { |
||||
console.error("[Search] NIP-05 lookup failed:", e); |
||||
} |
||||
} else { |
||||
// Try NIP-05 search first (faster than relay search)
|
||||
console.log("Starting NIP-05 search for:", normalizedSearchTerm); |
||||
foundProfiles = await searchNip05Domains(normalizedSearchTerm); |
||||
console.log( |
||||
"NIP-05 search completed, found:", |
||||
foundProfiles.length, |
||||
"profiles", |
||||
); |
||||
|
||||
// If no NIP-05 results, try quick relay search
|
||||
if (foundProfiles.length === 0) { |
||||
console.log("No NIP-05 results, trying quick relay search"); |
||||
foundProfiles = await quickRelaySearch(normalizedSearchTerm, ndk); |
||||
console.log( |
||||
"Quick relay search completed, found:", |
||||
foundProfiles.length, |
||||
"profiles", |
||||
); |
||||
} |
||||
} |
||||
|
||||
// Cache the results
|
||||
if (foundProfiles.length > 0) { |
||||
const events = foundProfiles.map((profile) => { |
||||
const event = new NDKEvent(ndk); |
||||
event.content = JSON.stringify(profile); |
||||
event.pubkey = profile.pubkey || ""; |
||||
return event; |
||||
}); |
||||
|
||||
const result = { |
||||
events, |
||||
secondOrder: [], |
||||
tTagEvents: [], |
||||
eventIds: new Set<string>(), |
||||
addresses: new Set<string>(), |
||||
searchType: "profile", |
||||
searchTerm: normalizedSearchTerm, |
||||
}; |
||||
searchCache.set("profile", normalizedSearchTerm, result); |
||||
} |
||||
|
||||
console.log("Search completed, found profiles:", foundProfiles.length); |
||||
return { profiles: foundProfiles, Status: {} }; |
||||
} catch (error) { |
||||
console.error("Error searching profiles:", error); |
||||
return { profiles: [], Status: {} }; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Search for NIP-05 addresses across common domains |
||||
*/ |
||||
async function searchNip05Domains( |
||||
searchTerm: string, |
||||
): Promise<NostrProfile[]> { |
||||
const foundProfiles: NostrProfile[] = []; |
||||
|
||||
// Enhanced list of common domains for NIP-05 lookups
|
||||
// Prioritize gitcitadel.com since we know it has profiles
|
||||
const commonDomains = [ |
||||
"gitcitadel.com", // Prioritize this domain
|
||||
"theforest.nostr1.com", |
||||
"nostr1.com", |
||||
"nostr.land", |
||||
"sovbit.host", |
||||
"damus.io", |
||||
"snort.social", |
||||
"iris.to", |
||||
"coracle.social", |
||||
"nostr.band", |
||||
"nostr.wine", |
||||
"purplepag.es", |
||||
"relay.noswhere.com", |
||||
"aggr.nostr.land", |
||||
"nostr.sovbit.host", |
||||
"freelay.sovbit.host", |
||||
"nostr21.com", |
||||
"greensoul.space", |
||||
"relay.damus.io", |
||||
"relay.nostr.band", |
||||
]; |
||||
|
||||
// Normalize the search term for NIP-05 lookup
|
||||
const normalizedSearchTerm = searchTerm.toLowerCase().trim(); |
||||
console.log("NIP-05 search: normalized search term:", normalizedSearchTerm); |
||||
|
||||
// Try gitcitadel.com first with extra debugging
|
||||
const gitcitadelAddress = `${normalizedSearchTerm}@gitcitadel.com`; |
||||
console.log("NIP-05 search: trying gitcitadel.com first:", gitcitadelAddress); |
||||
try { |
||||
const npub = await getNpubFromNip05(gitcitadelAddress); |
||||
if (npub) { |
||||
console.log( |
||||
"NIP-05 search: SUCCESS! found npub for gitcitadel.com:", |
||||
npub, |
||||
); |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub, |
||||
}; |
||||
console.log( |
||||
"NIP-05 search: created profile for gitcitadel.com:", |
||||
profile, |
||||
); |
||||
foundProfiles.push(profile); |
||||
return foundProfiles; // Return immediately if we found it on gitcitadel.com
|
||||
} else { |
||||
console.log("NIP-05 search: no npub found for gitcitadel.com"); |
||||
} |
||||
} catch (e) { |
||||
console.log("NIP-05 search: error for gitcitadel.com:", e); |
||||
} |
||||
|
||||
// If gitcitadel.com didn't work, try other domains
|
||||
console.log("NIP-05 search: gitcitadel.com failed, trying other domains..."); |
||||
const otherDomains = commonDomains.filter( |
||||
(domain) => domain !== "gitcitadel.com", |
||||
); |
||||
|
||||
// Search all other domains in parallel with timeout
|
||||
const searchPromises = otherDomains.map(async (domain) => { |
||||
const nip05Address = `${normalizedSearchTerm}@${domain}`; |
||||
console.log("NIP-05 search: trying address:", nip05Address); |
||||
try { |
||||
const npub = await getNpubFromNip05(nip05Address); |
||||
if (npub) { |
||||
console.log("NIP-05 search: found npub for", nip05Address, ":", npub); |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub, |
||||
}; |
||||
console.log( |
||||
"NIP-05 search: created profile for", |
||||
nip05Address, |
||||
":", |
||||
profile, |
||||
); |
||||
return profile; |
||||
} else { |
||||
console.log("NIP-05 search: no npub found for", nip05Address); |
||||
} |
||||
} catch (e) { |
||||
console.log("NIP-05 search: error for", nip05Address, ":", e); |
||||
// Continue to next domain
|
||||
} |
||||
return null; |
||||
}); |
||||
|
||||
// Wait for all searches with timeout
|
||||
const results = await Promise.allSettled(searchPromises); |
||||
|
||||
for (const result of results) { |
||||
if (result.status === "fulfilled" && result.value) { |
||||
foundProfiles.push(result.value); |
||||
} |
||||
} |
||||
|
||||
console.log("NIP-05 search: total profiles found:", foundProfiles.length); |
||||
return foundProfiles; |
||||
} |
||||
|
||||
/** |
||||
* Quick relay search with short timeout |
||||
*/ |
||||
async function quickRelaySearch( |
||||
searchTerm: string, |
||||
ndk: NDK, |
||||
): Promise<NostrProfile[]> { |
||||
console.log("quickRelaySearch called with:", searchTerm); |
||||
|
||||
// Normalize the search term for relay search
|
||||
const normalizedSearchTerm = normalizeSearchTerm(searchTerm); |
||||
console.log("Normalized search term for relay search:", normalizedSearchTerm); |
||||
|
||||
// Use all profile relays for better coverage
|
||||
const quickRelayUrls = [...communityRelays, ...secondaryRelays]; // Use all available relays
|
||||
console.log("Using all relays for search:", quickRelayUrls); |
||||
|
||||
// Create relay sets for parallel search
|
||||
const relaySets = quickRelayUrls |
||||
.map((url) => { |
||||
try { |
||||
return NDKRelaySet.fromRelayUrls([url], ndk); |
||||
} catch (e) { |
||||
console.warn(`Failed to create relay set for ${url}:`, e); |
||||
return null; |
||||
} |
||||
}) |
||||
.filter(Boolean); |
||||
|
||||
// Search all relays in parallel with short timeout
|
||||
const searchPromises = relaySets.map((relaySet, index) => { |
||||
if (!relaySet) return []; |
||||
|
||||
return new Promise<NostrProfile[]>((resolve) => { |
||||
const foundInRelay: NostrProfile[] = []; |
||||
let eventCount = 0; |
||||
|
||||
console.log( |
||||
`Starting search on relay ${index + 1}: ${quickRelayUrls[index]}`, |
||||
); |
||||
|
||||
const sub = ndk.subscribe( |
||||
{ kinds: [0] }, |
||||
{ closeOnEose: true }, |
||||
relaySet, |
||||
); |
||||
|
||||
sub.on("event", (event: NDKEvent) => { |
||||
eventCount++; |
||||
try { |
||||
if (!event.content) return; |
||||
const profileData = JSON.parse(event.content); |
||||
const displayName = |
||||
profileData.displayName || profileData.display_name || ""; |
||||
const display_name = profileData.display_name || ""; |
||||
const name = profileData.name || ""; |
||||
const nip05 = profileData.nip05 || ""; |
||||
const about = profileData.about || ""; |
||||
|
||||
// Check if any field matches the search term using normalized comparison
|
||||
const matchesDisplayName = fieldMatches( |
||||
displayName, |
||||
normalizedSearchTerm, |
||||
); |
||||
const matchesDisplay_name = fieldMatches( |
||||
display_name, |
||||
normalizedSearchTerm, |
||||
); |
||||
const matchesName = fieldMatches(name, normalizedSearchTerm); |
||||
const matchesNip05 = nip05Matches(nip05, normalizedSearchTerm); |
||||
const matchesAbout = fieldMatches(about, normalizedSearchTerm); |
||||
|
||||
if ( |
||||
matchesDisplayName || |
||||
matchesDisplay_name || |
||||
matchesName || |
||||
matchesNip05 || |
||||
matchesAbout |
||||
) { |
||||
console.log(`Found matching profile on relay ${index + 1}:`, { |
||||
name: profileData.name, |
||||
display_name: profileData.display_name, |
||||
nip05: profileData.nip05, |
||||
pubkey: event.pubkey, |
||||
searchTerm: normalizedSearchTerm, |
||||
}); |
||||
const profile = createProfileFromEvent(event, profileData); |
||||
|
||||
// Check if we already have this profile in this relay
|
||||
const existingIndex = foundInRelay.findIndex( |
||||
(p) => p.pubkey === event.pubkey, |
||||
); |
||||
if (existingIndex === -1) { |
||||
foundInRelay.push(profile); |
||||
} |
||||
} |
||||
} catch { |
||||
// Invalid JSON or other error, skip
|
||||
} |
||||
}); |
||||
|
||||
sub.on("eose", () => { |
||||
console.log( |
||||
`Relay ${index + 1} (${quickRelayUrls[index]}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`, |
||||
); |
||||
resolve(foundInRelay); |
||||
}); |
||||
|
||||
// Short timeout for quick search
|
||||
setTimeout(() => { |
||||
console.log( |
||||
`Relay ${index + 1} (${quickRelayUrls[index]}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`, |
||||
); |
||||
sub.stop(); |
||||
resolve(foundInRelay); |
||||
}, 1500); // 1.5 second timeout per relay
|
||||
}); |
||||
}); |
||||
|
||||
// Wait for all searches to complete
|
||||
const results = await Promise.allSettled(searchPromises); |
||||
|
||||
// Combine and deduplicate results
|
||||
const allProfiles: Record<string, NostrProfile> = {}; |
||||
|
||||
for (const result of results) { |
||||
if (result.status === "fulfilled") { |
||||
for (const profile of result.value) { |
||||
if (profile.pubkey) { |
||||
allProfiles[profile.pubkey] = profile; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
console.log( |
||||
`Total unique profiles found: ${Object.keys(allProfiles).length}`, |
||||
); |
||||
return Object.values(allProfiles); |
||||
} |
||||
@ -0,0 +1,142 @@
@@ -0,0 +1,142 @@
|
||||
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; |
||||
import { TIMEOUTS } from "./search_constants.ts"; |
||||
import { get } from "svelte/store"; |
||||
|
||||
export interface RelayDiagnostic { |
||||
url: string; |
||||
connected: boolean; |
||||
requiresAuth: boolean; |
||||
error?: string; |
||||
responseTime?: number; |
||||
} |
||||
|
||||
/** |
||||
* Tests connection to a single relay |
||||
*/ |
||||
export function testRelay(url: string): Promise<RelayDiagnostic> { |
||||
const startTime = Date.now(); |
||||
|
||||
return new Promise((resolve) => { |
||||
const ws = new WebSocket(url); |
||||
let resolved = false; |
||||
|
||||
const timeout = setTimeout(() => { |
||||
if (!resolved) { |
||||
resolved = true; |
||||
ws.close(); |
||||
resolve({ |
||||
url, |
||||
connected: false, |
||||
requiresAuth: false, |
||||
error: "Connection timeout", |
||||
responseTime: Date.now() - startTime, |
||||
}); |
||||
} |
||||
}, TIMEOUTS.RELAY_DIAGNOSTICS); |
||||
|
||||
ws.onopen = () => { |
||||
if (!resolved) { |
||||
resolved = true; |
||||
clearTimeout(timeout); |
||||
ws.close(); |
||||
resolve({ |
||||
url, |
||||
connected: true, |
||||
requiresAuth: false, |
||||
responseTime: Date.now() - startTime, |
||||
}); |
||||
} |
||||
}; |
||||
|
||||
ws.onerror = () => { |
||||
if (!resolved) { |
||||
resolved = true; |
||||
clearTimeout(timeout); |
||||
resolve({ |
||||
url, |
||||
connected: false, |
||||
requiresAuth: false, |
||||
error: "WebSocket error", |
||||
responseTime: Date.now() - startTime, |
||||
}); |
||||
} |
||||
}; |
||||
|
||||
ws.onmessage = (event) => { |
||||
const data = JSON.parse(event.data); |
||||
if (data[0] === "NOTICE" && data[1]?.includes("auth-required")) { |
||||
if (!resolved) { |
||||
resolved = true; |
||||
clearTimeout(timeout); |
||||
ws.close(); |
||||
resolve({ |
||||
url, |
||||
connected: true, |
||||
requiresAuth: true, |
||||
responseTime: Date.now() - startTime, |
||||
}); |
||||
} |
||||
} |
||||
}; |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Tests all relays and returns diagnostic information |
||||
*/ |
||||
export async function testAllRelays(): Promise<RelayDiagnostic[]> { |
||||
// Use the new relay management system
|
||||
const allRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)]; |
||||
|
||||
console.log("[RelayDiagnostics] Testing", allRelays.length, "relays..."); |
||||
|
||||
const results = await Promise.allSettled( |
||||
allRelays.map((url) => testRelay(url)), |
||||
); |
||||
|
||||
return results.map((result, index) => { |
||||
if (result.status === "fulfilled") { |
||||
return result.value; |
||||
} else { |
||||
return { |
||||
url: allRelays[index], |
||||
connected: false, |
||||
requiresAuth: false, |
||||
error: "Test failed", |
||||
}; |
||||
} |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Gets working relays from diagnostic results |
||||
*/ |
||||
export function getWorkingRelays(diagnostics: RelayDiagnostic[]): string[] { |
||||
return diagnostics.filter((d) => d.connected).map((d) => d.url); |
||||
} |
||||
|
||||
/** |
||||
* Logs relay diagnostic results to console |
||||
*/ |
||||
export function logRelayDiagnostics(diagnostics: RelayDiagnostic[]): void { |
||||
console.group("[RelayDiagnostics] Results"); |
||||
|
||||
const working = diagnostics.filter((d) => d.connected); |
||||
const failed = diagnostics.filter((d) => !d.connected); |
||||
|
||||
console.log(`✅ Working relays (${working.length}):`); |
||||
working.forEach((d) => { |
||||
console.log( |
||||
` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${d.responseTime ? ` (${d.responseTime}ms)` : ""}`, |
||||
); |
||||
}); |
||||
|
||||
if (failed.length > 0) { |
||||
console.log(`❌ Failed relays (${failed.length}):`); |
||||
failed.forEach((d) => { |
||||
console.log(` - ${d.url}: ${d.error || "Unknown error"}`); |
||||
}); |
||||
} |
||||
|
||||
console.groupEnd(); |
||||
} |
||||
@ -0,0 +1,531 @@
@@ -0,0 +1,531 @@
|
||||
import NDK, { NDKKind, NDKRelay, NDKUser } from "@nostr-dev-kit/ndk"; |
||||
import { searchRelays, secondaryRelays, anonymousRelays, lowbandwidthRelays, localRelays } from "../consts.ts"; |
||||
import { getRelaySetForNetworkCondition } from "./network_detection.ts"; |
||||
import { networkCondition } from "../stores/networkStore.ts"; |
||||
import { get } from "svelte/store"; |
||||
|
||||
/** |
||||
* Normalizes a relay URL to a standard format |
||||
* @param url The relay URL to normalize |
||||
* @returns The normalized relay URL |
||||
*/ |
||||
export function normalizeRelayUrl(url: string): string { |
||||
let normalized = url.toLowerCase().trim(); |
||||
|
||||
// Ensure protocol is present
|
||||
if (!normalized.startsWith('ws://') && !normalized.startsWith('wss://')) { |
||||
normalized = 'wss://' + normalized; |
||||
} |
||||
|
||||
// Remove trailing slash
|
||||
normalized = normalized.replace(/\/$/, ''); |
||||
|
||||
return normalized; |
||||
} |
||||
|
||||
/** |
||||
* Normalizes an array of relay URLs |
||||
* @param urls Array of relay URLs to normalize |
||||
* @returns Array of normalized relay URLs |
||||
*/ |
||||
export function normalizeRelayUrls(urls: string[]): string[] { |
||||
return urls.map(normalizeRelayUrl); |
||||
} |
||||
|
||||
/** |
||||
* Removes duplicates from an array of relay URLs |
||||
* @param urls Array of relay URLs |
||||
* @returns Array of unique relay URLs |
||||
*/ |
||||
export function deduplicateRelayUrls(urls: string[]): string[] { |
||||
const normalized = normalizeRelayUrls(urls); |
||||
return [...new Set(normalized)]; |
||||
} |
||||
|
||||
/** |
||||
* Tests connection to a relay and returns connection status |
||||
* @param relayUrl The relay URL to test |
||||
* @param ndk The NDK instance |
||||
* @returns Promise that resolves to connection status |
||||
*/ |
||||
export function testRelayConnection( |
||||
relayUrl: string, |
||||
ndk: NDK, |
||||
): Promise<{ |
||||
connected: boolean; |
||||
requiresAuth: boolean; |
||||
error?: string; |
||||
actualUrl?: string; |
||||
}> { |
||||
return new Promise((resolve) => { |
||||
// Ensure the URL is using wss:// protocol
|
||||
const secureUrl = ensureSecureWebSocket(relayUrl); |
||||
|
||||
// Use the existing NDK instance instead of creating a new one
|
||||
const relay = new NDKRelay(secureUrl, undefined, ndk); |
||||
let authRequired = false; |
||||
let connected = false; |
||||
let error: string | undefined; |
||||
let actualUrl: string | undefined; |
||||
|
||||
const timeout = setTimeout(() => { |
||||
relay.disconnect(); |
||||
resolve({ |
||||
connected: false, |
||||
requiresAuth: authRequired, |
||||
error: "Connection timeout", |
||||
actualUrl, |
||||
}); |
||||
}, 3000); // Increased timeout to 3 seconds to give relays more time
|
||||
|
||||
relay.on("connect", () => { |
||||
connected = true; |
||||
actualUrl = secureUrl; |
||||
clearTimeout(timeout); |
||||
relay.disconnect(); |
||||
resolve({ |
||||
connected: true, |
||||
requiresAuth: authRequired, |
||||
error, |
||||
actualUrl, |
||||
}); |
||||
}); |
||||
|
||||
relay.on("notice", (message: string) => { |
||||
if (message.includes("auth-required")) { |
||||
authRequired = true; |
||||
} |
||||
}); |
||||
|
||||
relay.on("disconnect", () => { |
||||
if (!connected) { |
||||
error = "Connection failed"; |
||||
clearTimeout(timeout); |
||||
resolve({ |
||||
connected: false, |
||||
requiresAuth: authRequired, |
||||
error, |
||||
actualUrl, |
||||
}); |
||||
} |
||||
}); |
||||
|
||||
relay.connect(); |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Ensures a relay URL uses secure WebSocket protocol for remote relays |
||||
* @param url The relay URL to secure |
||||
* @returns The URL with wss:// protocol (except for localhost)
|
||||
*/ |
||||
function ensureSecureWebSocket(url: string): string { |
||||
// For localhost, always use ws:// (never wss://)
|
||||
if (url.includes('localhost') || url.includes('127.0.0.1')) { |
||||
// Convert any wss://localhost to ws://localhost
|
||||
return url.replace(/^wss:\/\//, "ws://"); |
||||
} |
||||
|
||||
// Replace ws:// with wss:// for remote relays
|
||||
const secureUrl = url.replace(/^ws:\/\//, "wss://"); |
||||
|
||||
if (secureUrl !== url) { |
||||
console.warn( |
||||
`[relay_management.ts] Protocol upgrade for rem ote relay: ${url} -> ${secureUrl}`, |
||||
); |
||||
} |
||||
|
||||
return secureUrl; |
||||
} |
||||
|
||||
/** |
||||
* Tests connection to local relays |
||||
* @param localRelayUrls Array of local relay URLs to test |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working local relay URLs |
||||
*/ |
||||
async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<string[]> { |
||||
const workingRelays: string[] = []; |
||||
|
||||
if (localRelayUrls.length === 0) { |
||||
return workingRelays; |
||||
} |
||||
|
||||
console.debug(`[relay_management.ts] Testing ${localRelayUrls.length} local relays...`); |
||||
|
||||
await Promise.all( |
||||
localRelayUrls.map(async (url) => { |
||||
try { |
||||
const result = await testRelayConnection(url, ndk); |
||||
if (result.connected) { |
||||
workingRelays.push(url); |
||||
console.debug(`[relay_management.ts] Local relay connected: ${url}`); |
||||
} else { |
||||
console.debug(`[relay_management.ts] Local relay failed: ${url} - ${result.error}`); |
||||
} |
||||
} catch { |
||||
// Silently ignore local relay failures - they're optional
|
||||
console.debug(`[relay_management.ts] Local relay error (ignored): ${url}`); |
||||
} |
||||
}) |
||||
); |
||||
|
||||
console.debug(`[relay_management.ts] Found ${workingRelays.length} working local relays`); |
||||
return workingRelays; |
||||
} |
||||
|
||||
/** |
||||
* Discovers local relays by testing common localhost URLs |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working local relay URLs |
||||
*/ |
||||
export async function discoverLocalRelays(ndk: NDK): Promise<string[]> { |
||||
try { |
||||
// If no local relays are configured, return empty array
|
||||
if (localRelays.length === 0) { |
||||
console.debug('[relay_management.ts] No local relays configured'); |
||||
return []; |
||||
} |
||||
|
||||
// Convert wss:// URLs from consts to ws:// for local testing
|
||||
const localRelayUrls = localRelays.map((url: string) =>
|
||||
url.replace(/^wss:\/\//, 'ws://') |
||||
); |
||||
|
||||
const workingRelays = await testLocalRelays(localRelayUrls, ndk); |
||||
|
||||
// If no local relays are working, return empty array
|
||||
// The network detection logic will provide fallback relays
|
||||
return workingRelays; |
||||
} catch { |
||||
// Silently fail and return empty array
|
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's local relays from kind 10432 event |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch local relays for |
||||
* @returns Promise that resolves to array of local relay URLs |
||||
*/ |
||||
export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
const localRelayEvent = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10432 as NDKKind], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!localRelayEvent) { |
||||
return []; |
||||
} |
||||
|
||||
const localRelays: string[] = []; |
||||
localRelayEvent.tags.forEach((tag) => { |
||||
if (tag[0] === 'r' && tag[1]) { |
||||
localRelays.push(tag[1]); |
||||
} |
||||
}); |
||||
|
||||
return localRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user local relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's blocked relays from kind 10006 event |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch blocked relays for |
||||
* @returns Promise that resolves to array of blocked relay URLs |
||||
*/ |
||||
export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
const blockedRelayEvent = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10006], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!blockedRelayEvent) { |
||||
return []; |
||||
} |
||||
|
||||
const blockedRelays: string[] = []; |
||||
blockedRelayEvent.tags.forEach((tag) => { |
||||
if (tag[0] === 'r' && tag[1]) { |
||||
blockedRelays.push(tag[1]); |
||||
} |
||||
}); |
||||
|
||||
return blockedRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user blocked relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's outbox relays from NIP-65 relay list |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch outbox relays for |
||||
* @returns Promise that resolves to array of outbox relay URLs |
||||
*/ |
||||
export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
console.debug('[relay_management.ts] Fetching outbox relays for user:', user.pubkey); |
||||
const relayList = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10002], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!relayList) { |
||||
console.debug('[relay_management.ts] No relay list found for user'); |
||||
return []; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Found relay list event:', relayList.id); |
||||
console.debug('[relay_management.ts] Relay list tags:', relayList.tags); |
||||
|
||||
const outboxRelays: string[] = []; |
||||
relayList.tags.forEach((tag) => { |
||||
console.debug('[relay_management.ts] Processing tag:', tag); |
||||
if (tag[0] === 'w' && tag[1]) { |
||||
outboxRelays.push(tag[1]); |
||||
console.debug('[relay_management.ts] Added outbox relay:', tag[1]); |
||||
} else if (tag[0] === 'r' && tag[1]) { |
||||
// Some relay lists use 'r' for both inbox and outbox
|
||||
outboxRelays.push(tag[1]); |
||||
console.debug('[relay_management.ts] Added relay (r tag):', tag[1]); |
||||
} else { |
||||
console.debug('[relay_management.ts] Skipping tag:', tag[0], 'value:', tag[1]); |
||||
} |
||||
}); |
||||
|
||||
console.debug('[relay_management.ts] Final outbox relays:', outboxRelays); |
||||
return outboxRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user outbox relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Gets browser extension's relay configuration by querying the extension directly |
||||
* @returns Promise that resolves to array of extension relay URLs |
||||
*/ |
||||
export async function getExtensionRelays(): Promise<string[]> { |
||||
try { |
||||
// Check if we're in a browser environment with extension support
|
||||
if (typeof window === 'undefined' || !globalThis.nostr) { |
||||
console.debug('[relay_management.ts] No globalThis.nostr available'); |
||||
return []; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Extension available, checking for getRelays()'); |
||||
const extensionRelays: string[] = []; |
||||
|
||||
// Try to get relays from the extension's API
|
||||
// Different extensions may expose their relay config differently
|
||||
if (globalThis.nostr.getRelays) { |
||||
console.debug('[relay_management.ts] getRelays() method found, calling it...'); |
||||
try { |
||||
const relays = await globalThis.nostr.getRelays(); |
||||
console.debug('[relay_management.ts] getRelays() returned:', relays); |
||||
if (relays && typeof relays === 'object') { |
||||
// Convert relay object to array of URLs
|
||||
const relayUrls = Object.keys(relays); |
||||
extensionRelays.push(...relayUrls); |
||||
console.debug('[relay_management.ts] Got relays from extension:', relayUrls); |
||||
} |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Extension getRelays() failed:', error); |
||||
} |
||||
} else { |
||||
console.debug('[relay_management.ts] getRelays() method not found on globalThis.nostr'); |
||||
} |
||||
|
||||
// If getRelays() didn't work, try alternative methods
|
||||
if (extensionRelays.length === 0) { |
||||
// Some extensions might expose relays through other methods
|
||||
// This is a fallback for extensions that don't expose getRelays()
|
||||
console.debug('[relay_management.ts] Extension does not expose relay configuration'); |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Final extension relays:', extensionRelays); |
||||
return extensionRelays; |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error getting extension relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Tests a set of relays in batches to avoid overwhelming them |
||||
* @param relayUrls Array of relay URLs to test |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working relay URLs |
||||
*/ |
||||
async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> { |
||||
const workingRelays: string[] = []; |
||||
const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them
|
||||
|
||||
for (let i = 0; i < relayUrls.length; i += maxConcurrent) { |
||||
const batch = relayUrls.slice(i, i + maxConcurrent); |
||||
|
||||
const batchPromises = batch.map(async (url) => { |
||||
try { |
||||
const result = await testRelayConnection(url, ndk); |
||||
return result.connected ? url : null; |
||||
} catch (error) { |
||||
console.debug(`[relay_management.ts] Failed to test relay ${url}:`, error); |
||||
return null; |
||||
} |
||||
}); |
||||
|
||||
const batchResults = await Promise.allSettled(batchPromises); |
||||
const batchWorkingRelays = batchResults |
||||
.filter((result): result is PromiseFulfilledResult<string | null> => result.status === 'fulfilled') |
||||
.map(result => result.value) |
||||
.filter((url): url is string => url !== null); |
||||
workingRelays.push(...batchWorkingRelays); |
||||
} |
||||
|
||||
return workingRelays; |
||||
} |
||||
|
||||
/** |
||||
* Builds a complete relay set for a user, including local, user-specific, and fallback relays |
||||
* @param ndk NDK instance |
||||
* @param user NDKUser or null for anonymous access |
||||
* @returns Promise that resolves to inbox and outbox relay arrays |
||||
*/ |
||||
export async function buildCompleteRelaySet( |
||||
ndk: NDK, |
||||
user: NDKUser | null |
||||
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Starting with user:', user?.pubkey || 'null'); |
||||
|
||||
// Discover local relays first
|
||||
const discoveredLocalRelays = await discoverLocalRelays(ndk); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Discovered local relays:', discoveredLocalRelays); |
||||
|
||||
// Get user-specific relays if available
|
||||
let userOutboxRelays: string[] = []; |
||||
let userLocalRelays: string[] = []; |
||||
let blockedRelays: string[] = []; |
||||
let extensionRelays: string[] = []; |
||||
|
||||
if (user) { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:', user.pubkey); |
||||
|
||||
try { |
||||
userOutboxRelays = await getUserOutboxRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User outbox relays:', userOutboxRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching user outbox relays:', error); |
||||
} |
||||
|
||||
try { |
||||
userLocalRelays = await getUserLocalRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User local relays:', userLocalRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching user local relays:', error); |
||||
} |
||||
|
||||
try { |
||||
blockedRelays = await getUserBlockedRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User blocked relays:', blockedRelays); |
||||
} catch { |
||||
// Silently ignore blocked relay fetch errors
|
||||
} |
||||
|
||||
try { |
||||
extensionRelays = await getExtensionRelays(); |
||||
console.debug('[relay_management.ts] Extension relays gathered:', extensionRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching extension relays:', error); |
||||
} |
||||
} else { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays'); |
||||
} |
||||
|
||||
// Build initial relay sets and deduplicate
|
||||
const finalInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userLocalRelays]); |
||||
const finalOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userOutboxRelays, ...extensionRelays]); |
||||
|
||||
// Test relays and filter out non-working ones
|
||||
let testedInboxRelays: string[] = []; |
||||
let testedOutboxRelays: string[] = []; |
||||
|
||||
if (finalInboxRelays.length > 0) { |
||||
testedInboxRelays = await testRelaySet(finalInboxRelays, ndk); |
||||
} |
||||
|
||||
if (finalOutboxRelays.length > 0) { |
||||
testedOutboxRelays = await testRelaySet(finalOutboxRelays, ndk); |
||||
} |
||||
|
||||
// If no relays passed testing, use remote relays without testing
|
||||
if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) { |
||||
const remoteRelays = deduplicateRelayUrls([...secondaryRelays, ...searchRelays]); |
||||
return { |
||||
inboxRelays: remoteRelays, |
||||
outboxRelays: remoteRelays |
||||
}; |
||||
} |
||||
|
||||
// Use tested relays and deduplicate
|
||||
const inboxRelays = testedInboxRelays.length > 0 ? deduplicateRelayUrls(testedInboxRelays) : deduplicateRelayUrls(secondaryRelays); |
||||
const outboxRelays = testedOutboxRelays.length > 0 ? deduplicateRelayUrls(testedOutboxRelays) : deduplicateRelayUrls(secondaryRelays); |
||||
|
||||
// Apply network condition optimization
|
||||
const currentNetworkCondition = get(networkCondition); |
||||
const networkOptimizedRelaySet = getRelaySetForNetworkCondition( |
||||
currentNetworkCondition, |
||||
discoveredLocalRelays, |
||||
lowbandwidthRelays, |
||||
{ inboxRelays, outboxRelays } |
||||
); |
||||
|
||||
// Filter out blocked relays and deduplicate final sets
|
||||
const finalRelaySet = { |
||||
inboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.inboxRelays.filter((r: string) => !blockedRelays.includes(r))), |
||||
outboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.outboxRelays.filter((r: string) => !blockedRelays.includes(r))) |
||||
}; |
||||
|
||||
// If no relays are working, use anonymous relays as fallback
|
||||
if (finalRelaySet.inboxRelays.length === 0 && finalRelaySet.outboxRelays.length === 0) { |
||||
return { |
||||
inboxRelays: deduplicateRelayUrls(anonymousRelays), |
||||
outboxRelays: deduplicateRelayUrls(anonymousRelays) |
||||
}; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:', finalRelaySet.inboxRelays.length, 'outbox:', finalRelaySet.outboxRelays.length); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final inbox relays:', finalRelaySet.inboxRelays); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final outbox relays:', finalRelaySet.outboxRelays); |
||||
|
||||
return finalRelaySet; |
||||
}
|
||||
@ -0,0 +1,108 @@
@@ -0,0 +1,108 @@
|
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { CACHE_DURATIONS, TIMEOUTS } from "./search_constants.ts"; |
||||
|
||||
export interface SearchResult { |
||||
events: NDKEvent[]; |
||||
secondOrder: NDKEvent[]; |
||||
tTagEvents: NDKEvent[]; |
||||
eventIds: Set<string>; |
||||
addresses: Set<string>; |
||||
searchType: string; |
||||
searchTerm: string; |
||||
timestamp: number; |
||||
} |
||||
|
||||
class SearchCache { |
||||
private cache: Map<string, SearchResult> = new Map(); |
||||
private readonly CACHE_DURATION = CACHE_DURATIONS.SEARCH_CACHE; |
||||
|
||||
/** |
||||
* Generate a cache key for a search |
||||
*/ |
||||
private generateKey(searchType: string, searchTerm: string): string { |
||||
if (!searchTerm) { |
||||
return `${searchType}:`; |
||||
} |
||||
return `${searchType}:${searchTerm.toLowerCase().trim()}`; |
||||
} |
||||
|
||||
/** |
||||
* Check if a cached result is still valid |
||||
*/ |
||||
private isExpired(result: SearchResult): boolean { |
||||
return Date.now() - result.timestamp > this.CACHE_DURATION; |
||||
} |
||||
|
||||
/** |
||||
* Get cached search results |
||||
*/ |
||||
get(searchType: string, searchTerm: string): SearchResult | null { |
||||
const key = this.generateKey(searchType, searchTerm); |
||||
const result = this.cache.get(key); |
||||
|
||||
if (!result || this.isExpired(result)) { |
||||
if (result) { |
||||
this.cache.delete(key); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* Store search results in cache |
||||
*/ |
||||
set( |
||||
searchType: string, |
||||
searchTerm: string, |
||||
result: Omit<SearchResult, "timestamp">, |
||||
): void { |
||||
const key = this.generateKey(searchType, searchTerm); |
||||
this.cache.set(key, { |
||||
...result, |
||||
timestamp: Date.now(), |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Check if a search result is cached and valid |
||||
*/ |
||||
has(searchType: string, searchTerm: string): boolean { |
||||
const key = this.generateKey(searchType, searchTerm); |
||||
const result = this.cache.get(key); |
||||
return result !== undefined && !this.isExpired(result); |
||||
} |
||||
|
||||
/** |
||||
* Clear expired entries from cache |
||||
*/ |
||||
cleanup(): void { |
||||
for (const [key, result] of this.cache.entries()) { |
||||
if (this.isExpired(result)) { |
||||
this.cache.delete(key); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Clear all cache entries |
||||
*/ |
||||
clear(): void { |
||||
this.cache.clear(); |
||||
} |
||||
|
||||
/** |
||||
* Get cache size |
||||
*/ |
||||
size(): number { |
||||
return this.cache.size; |
||||
} |
||||
} |
||||
|
||||
export const searchCache = new SearchCache(); |
||||
|
||||
// Clean up expired entries periodically
|
||||
setInterval(() => { |
||||
searchCache.cleanup(); |
||||
}, TIMEOUTS.CACHE_CLEANUP); // Check every minute
|
||||
@ -0,0 +1,124 @@
@@ -0,0 +1,124 @@
|
||||
/** |
||||
* Search and Event Utility Constants |
||||
* |
||||
* This file centralizes all magic numbers used throughout the search and event utilities |
||||
* to improve maintainability and reduce code duplication. |
||||
*/ |
||||
|
||||
// Timeout constants (in milliseconds)
|
||||
export const TIMEOUTS = { |
||||
/** Default timeout for event fetching operations */ |
||||
EVENT_FETCH: 10000, |
||||
|
||||
/** Timeout for profile search operations */ |
||||
PROFILE_SEARCH: 15000, |
||||
|
||||
/** Timeout for subscription search operations */ |
||||
SUBSCRIPTION_SEARCH: 10000, |
||||
|
||||
/** Timeout for second-order search operations */ |
||||
SECOND_ORDER_SEARCH: 5000, |
||||
|
||||
/** Timeout for relay diagnostics */ |
||||
RELAY_DIAGNOSTICS: 5000, |
||||
|
||||
/** Timeout for general operations */ |
||||
GENERAL: 5000, |
||||
|
||||
/** Cache cleanup interval */ |
||||
CACHE_CLEANUP: 60000, |
||||
} as const; |
||||
|
||||
// Cache duration constants (in milliseconds)
|
||||
export const CACHE_DURATIONS = { |
||||
/** Default cache duration for search results */ |
||||
SEARCH_CACHE: 5 * 60 * 1000, // 5 minutes
|
||||
|
||||
/** Cache duration for index events */ |
||||
INDEX_EVENT_CACHE: 10 * 60 * 1000, // 10 minutes
|
||||
} as const; |
||||
|
||||
// Search limits
|
||||
export const SEARCH_LIMITS = { |
||||
/** Limit for specific profile searches (npub, NIP-05) */ |
||||
SPECIFIC_PROFILE: 10, |
||||
|
||||
/** Limit for general profile searches */ |
||||
GENERAL_PROFILE: 500, |
||||
|
||||
/** Limit for community relay checks */ |
||||
COMMUNITY_CHECK: 1, |
||||
|
||||
/** Limit for second-order search results */ |
||||
SECOND_ORDER_RESULTS: 100, |
||||
} as const; |
||||
|
||||
// Nostr event kind ranges
|
||||
export const EVENT_KINDS = { |
||||
/** Replaceable event kinds (0, 3, 10000-19999) */ |
||||
REPLACEABLE: { |
||||
MIN: 0, |
||||
MAX: 19999, |
||||
SPECIFIC: [0, 3], |
||||
}, |
||||
|
||||
/** Parameterized replaceable event kinds (20000-29999) */ |
||||
PARAMETERIZED_REPLACEABLE: { |
||||
MIN: 20000, |
||||
MAX: 29999, |
||||
}, |
||||
|
||||
/** Addressable event kinds (30000-39999) */ |
||||
ADDRESSABLE: { |
||||
MIN: 30000, |
||||
MAX: 39999, |
||||
}, |
||||
|
||||
/** Comment event kind */ |
||||
COMMENT: 1111, |
||||
|
||||
/** Text note event kind */ |
||||
TEXT_NOTE: 1, |
||||
|
||||
/** Profile metadata event kind */ |
||||
PROFILE_METADATA: 0, |
||||
} as const; |
||||
|
||||
// Relay-specific constants
|
||||
export const RELAY_CONSTANTS = { |
||||
/** Request ID for community relay checks */ |
||||
COMMUNITY_REQUEST_ID: "alexandria-forest", |
||||
|
||||
/** Default relay request kinds for community checks */ |
||||
COMMUNITY_REQUEST_KINDS: [1], |
||||
} as const; |
||||
|
||||
// Time constants
|
||||
export const TIME_CONSTANTS = { |
||||
/** Unix timestamp conversion factor (seconds to milliseconds) */ |
||||
UNIX_TIMESTAMP_FACTOR: 1000, |
||||
|
||||
/** Current timestamp in seconds */ |
||||
CURRENT_TIMESTAMP: Math.floor(Date.now() / 1000), |
||||
} as const; |
||||
|
||||
// Validation constants
|
||||
export const VALIDATION = { |
||||
/** Hex string length for event IDs and pubkeys */ |
||||
HEX_LENGTH: 64, |
||||
|
||||
/** Minimum length for Nostr identifiers */ |
||||
MIN_NOSTR_IDENTIFIER_LENGTH: 4, |
||||
} as const; |
||||
|
||||
// HTTP status codes
|
||||
export const HTTP_STATUS = { |
||||
/** OK status code */ |
||||
OK: 200, |
||||
|
||||
/** Not found status code */ |
||||
NOT_FOUND: 404, |
||||
|
||||
/** Internal server error status code */ |
||||
INTERNAL_SERVER_ERROR: 500, |
||||
} as const; |
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue