88 changed files with 8996 additions and 4519 deletions
File diff suppressed because it is too large
Load Diff
@ -1,78 +0,0 @@
@@ -1,78 +0,0 @@
|
||||
<script lang="ts"> |
||||
import { type NDKUserProfile } from "@nostr-dev-kit/ndk"; |
||||
import { |
||||
activePubkey, |
||||
loginWithExtension, |
||||
ndkInstance, |
||||
ndkSignedIn, |
||||
persistLogin, |
||||
} from "$lib/ndk"; |
||||
import { Avatar, Button, Popover } from "flowbite-svelte"; |
||||
import Profile from "$components/util/Profile.svelte"; |
||||
|
||||
let profile = $state<NDKUserProfile | null>(null); |
||||
let npub = $state<string | undefined>(undefined); |
||||
|
||||
let signInFailed = $state<boolean>(false); |
||||
let errorMessage = $state<string>(""); |
||||
|
||||
$effect(() => { |
||||
if ($ndkSignedIn) { |
||||
$ndkInstance |
||||
.getUser({ pubkey: $activePubkey ?? undefined }) |
||||
?.fetchProfile() |
||||
.then((userProfile) => { |
||||
profile = userProfile; |
||||
}); |
||||
npub = $ndkInstance.activeUser?.npub; |
||||
} |
||||
}); |
||||
|
||||
async function handleSignInClick() { |
||||
try { |
||||
signInFailed = false; |
||||
errorMessage = ""; |
||||
|
||||
const user = await loginWithExtension(); |
||||
if (!user) { |
||||
throw new Error("The NIP-07 extension did not return a user."); |
||||
} |
||||
|
||||
profile = await user.fetchProfile(); |
||||
persistLogin(user); |
||||
} catch (e) { |
||||
console.error(e); |
||||
signInFailed = true; |
||||
errorMessage = |
||||
e instanceof Error ? e.message : "Failed to sign in. Please try again."; |
||||
} |
||||
} |
||||
</script> |
||||
|
||||
<div class="m-4"> |
||||
{#if $ndkSignedIn} |
||||
<Profile pubkey={$activePubkey} isNav={true} /> |
||||
{:else} |
||||
<Avatar rounded class="h-6 w-6 cursor-pointer bg-transparent" id="avatar" /> |
||||
<Popover |
||||
class="popover-leather w-fit" |
||||
placement="bottom" |
||||
triggeredBy="#avatar" |
||||
> |
||||
<div class="w-full flex flex-col space-y-2"> |
||||
<Button onclick={handleSignInClick}>Extension Sign-In</Button> |
||||
{#if signInFailed} |
||||
<div class="p-2 text-sm text-red-600 bg-red-100 rounded"> |
||||
{errorMessage} |
||||
</div> |
||||
{/if} |
||||
<!-- <Button |
||||
color='alternative' |
||||
on:click={signInWithBunker} |
||||
> |
||||
Bunker Sign-In |
||||
</Button> --> |
||||
</div> |
||||
</Popover> |
||||
{/if} |
||||
</div> |
||||
@ -0,0 +1,59 @@
@@ -0,0 +1,59 @@
|
||||
<script lang="ts"> |
||||
import { networkCondition, isNetworkChecking, startNetworkStatusMonitoring } from '$lib/stores/networkStore'; |
||||
import { NetworkCondition } from '$lib/utils/network_detection'; |
||||
import { onMount } from 'svelte'; |
||||
|
||||
function getStatusColor(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return 'text-green-600 dark:text-green-400'; |
||||
case NetworkCondition.SLOW: |
||||
return 'text-yellow-600 dark:text-yellow-400'; |
||||
case NetworkCondition.OFFLINE: |
||||
return 'text-red-600 dark:text-red-400'; |
||||
default: |
||||
return 'text-gray-600 dark:text-gray-400'; |
||||
} |
||||
} |
||||
|
||||
function getStatusIcon(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return '🟢'; |
||||
case NetworkCondition.SLOW: |
||||
return '🟡'; |
||||
case NetworkCondition.OFFLINE: |
||||
return '🔴'; |
||||
default: |
||||
return '⚪'; |
||||
} |
||||
} |
||||
|
||||
function getStatusText(): string { |
||||
switch ($networkCondition) { |
||||
case NetworkCondition.ONLINE: |
||||
return 'Online'; |
||||
case NetworkCondition.SLOW: |
||||
return 'Slow Connection'; |
||||
case NetworkCondition.OFFLINE: |
||||
return 'Offline'; |
||||
default: |
||||
return 'Unknown'; |
||||
} |
||||
} |
||||
|
||||
onMount(() => { |
||||
// Start centralized network monitoring |
||||
startNetworkStatusMonitoring(); |
||||
}); |
||||
</script> |
||||
|
||||
<div class="flex items-center space-x-2 text-xs {getStatusColor()} font-medium"> |
||||
{#if $isNetworkChecking} |
||||
<span class="animate-spin">⏳</span> |
||||
<span>Checking...</span> |
||||
{:else} |
||||
<span class="text-lg">{getStatusIcon()}</span> |
||||
<span>{getStatusText()}</span> |
||||
{/if} |
||||
</div> |
||||
@ -1,93 +0,0 @@
@@ -1,93 +0,0 @@
|
||||
<script lang="ts"> |
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { standardRelays } from "../consts"; |
||||
import { Card, Img } from "flowbite-svelte"; |
||||
import CardActions from "$components/util/CardActions.svelte"; |
||||
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; |
||||
import { getUserMetadata, toNpub } from "$lib/utils/nostrUtils"; |
||||
|
||||
const { event } = $props<{ event: NDKEvent }>(); |
||||
|
||||
const relays = $derived.by(() => { |
||||
return $ndkInstance.activeUser?.relayUrls ?? standardRelays; |
||||
}); |
||||
|
||||
const href = $derived.by(() => { |
||||
const d = event.getMatchingTags("d")[0]?.[1]; |
||||
if (d != null) { |
||||
return `publication?d=${d}`; |
||||
} else { |
||||
return `publication?id=${naddrEncode(event, relays)}`; |
||||
} |
||||
}); |
||||
|
||||
let title: string = $derived(event.getMatchingTags("title")[0]?.[1]); |
||||
let author: string = $derived( |
||||
event.getMatchingTags(event, "author")[0]?.[1] ?? "unknown", |
||||
); |
||||
let version: string = $derived( |
||||
event.getMatchingTags("version")[0]?.[1] ?? "1", |
||||
); |
||||
let image: string = $derived(event.getMatchingTags("image")[0]?.[1] ?? null); |
||||
let authorPubkey: string = $derived( |
||||
event.getMatchingTags("p")[0]?.[1] ?? null, |
||||
); |
||||
|
||||
// New: fetch profile display name for authorPubkey |
||||
let authorDisplayName = $state<string | undefined>(undefined); |
||||
|
||||
$effect(() => { |
||||
if (authorPubkey) { |
||||
getUserMetadata(toNpub(authorPubkey) as string).then((profile) => { |
||||
authorDisplayName = |
||||
profile.displayName || |
||||
(profile as any).display_name || |
||||
author || |
||||
authorPubkey; |
||||
}); |
||||
} else { |
||||
authorDisplayName = undefined; |
||||
} |
||||
}); |
||||
</script> |
||||
|
||||
{#if title != null && href != null} |
||||
<Card |
||||
class="ArticleBox card-leather max-w-md h-48 flex flex-row items-center space-x-2 relative overflow-hidden" |
||||
> |
||||
{#if image} |
||||
<div |
||||
class="flex col justify-center align-middle h-32 w-24 min-w-20 max-w-24 overflow-hidden" |
||||
> |
||||
<Img src={image} class="rounded w-full h-full object-cover" /> |
||||
</div> |
||||
{/if} |
||||
<div class="col flex flex-row flex-grow space-x-4"> |
||||
<div class="flex flex-col flex-grow"> |
||||
<a href="/{href}" class="flex flex-col space-y-2"> |
||||
<h2 class="text-lg font-bold line-clamp-2" {title}>{title}</h2> |
||||
<h3 class="text-base font-normal"> |
||||
by |
||||
{#if authorPubkey != null} |
||||
{@render userBadge(authorPubkey, authorDisplayName)} |
||||
{:else} |
||||
{author} |
||||
{/if} |
||||
</h3> |
||||
{#if version != "1"} |
||||
<h3 |
||||
class="text-base font-medium text-primary-700 dark:text-primary-300" |
||||
> |
||||
version: {version} |
||||
</h3> |
||||
{/if} |
||||
</a> |
||||
</div> |
||||
<div class="flex flex-col justify-start items-center"> |
||||
<CardActions {event} /> |
||||
</div> |
||||
</div> |
||||
</Card> |
||||
{/if} |
||||
@ -1,28 +0,0 @@
@@ -1,28 +0,0 @@
|
||||
<script lang="ts"> |
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { nip19 } from "nostr-tools"; |
||||
export let notes: NDKEvent[] = []; |
||||
// check if notes is empty |
||||
if (notes.length === 0) { |
||||
console.debug("notes is empty"); |
||||
} |
||||
</script> |
||||
|
||||
<div class="toc"> |
||||
<h2>Table of contents</h2> |
||||
<ul> |
||||
{#each notes as note} |
||||
<li> |
||||
<a href="#{nip19.noteEncode(note.id)}" |
||||
>{note.getMatchingTags("title")[0][1]}</a |
||||
> |
||||
</li> |
||||
{/each} |
||||
</ul> |
||||
</div> |
||||
|
||||
<style> |
||||
.toc h2 { |
||||
text-align: center; |
||||
} |
||||
</style> |
||||
@ -0,0 +1,180 @@
@@ -0,0 +1,180 @@
|
||||
<script lang="ts"> |
||||
import { Textarea, Button } from "flowbite-svelte"; |
||||
import { EyeOutline } from "flowbite-svelte-icons"; |
||||
import { |
||||
parseAsciiDocSections, |
||||
type ZettelSection, |
||||
} from "$lib/utils/ZettelParser"; |
||||
import asciidoctor from "asciidoctor"; |
||||
|
||||
// Component props |
||||
let { |
||||
content = "", |
||||
placeholder = `== Note Title |
||||
:author: {author} // author is optional |
||||
:tags: tag1, tag2, tag3 // tags are optional |
||||
|
||||
note content here... |
||||
|
||||
== Note Title 2 |
||||
:tags: tag1, tag2, tag3 |
||||
Note content here... |
||||
`, |
||||
showPreview = false, |
||||
onContentChange = (content: string) => {}, |
||||
onPreviewToggle = (show: boolean) => {}, |
||||
} = $props<{ |
||||
content?: string; |
||||
placeholder?: string; |
||||
showPreview?: boolean; |
||||
onContentChange?: (content: string) => void; |
||||
onPreviewToggle?: (show: boolean) => void; |
||||
}>(); |
||||
|
||||
// Initialize AsciiDoctor processor |
||||
const asciidoctorProcessor = asciidoctor(); |
||||
|
||||
// Parse sections for preview |
||||
let parsedSections = $derived(parseAsciiDocSections(content, 2)); |
||||
|
||||
// Toggle preview panel |
||||
function togglePreview() { |
||||
const newShowPreview = !showPreview; |
||||
onPreviewToggle(newShowPreview); |
||||
} |
||||
|
||||
// Handle content changes |
||||
function handleContentChange(event: Event) { |
||||
const target = event.target as HTMLTextAreaElement; |
||||
onContentChange(target.value); |
||||
} |
||||
</script> |
||||
|
||||
<div class="flex flex-col space-y-4"> |
||||
<div class="flex items-center justify-between"> |
||||
<Button |
||||
color="light" |
||||
size="sm" |
||||
on:click={togglePreview} |
||||
class="flex items-center space-x-1" |
||||
> |
||||
{#if showPreview} |
||||
<EyeOutline class="w-4 h-4" /> |
||||
<span>Hide Preview</span> |
||||
{:else} |
||||
<EyeOutline class="w-4 h-4" /> |
||||
<span>Show Preview</span> |
||||
{/if} |
||||
</Button> |
||||
</div> |
||||
|
||||
<div class="flex space-x-4 {showPreview ? 'h-96' : ''}"> |
||||
<!-- Editor Panel --> |
||||
<div class="{showPreview ? 'w-1/2' : 'w-full'} flex flex-col space-y-4"> |
||||
<div class="flex-1"> |
||||
<Textarea |
||||
bind:value={content} |
||||
on:input={handleContentChange} |
||||
{placeholder} |
||||
class="h-full min-h-64 resize-none" |
||||
rows={12} |
||||
/> |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Preview Panel --> |
||||
{#if showPreview} |
||||
<div class="w-1/2 border-l border-gray-200 dark:border-gray-700 pl-4"> |
||||
<div class="sticky top-4"> |
||||
<h3 |
||||
class="text-lg font-semibold mb-4 text-gray-900 dark:text-gray-100" |
||||
> |
||||
AsciiDoc Preview |
||||
</h3> |
||||
|
||||
<div |
||||
class="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 max-h-80 overflow-y-auto" |
||||
> |
||||
{#if !content.trim()} |
||||
<div class="text-gray-500 dark:text-gray-400 text-sm"> |
||||
Start typing to see the preview... |
||||
</div> |
||||
{:else} |
||||
<div class="prose prose-sm dark:prose-invert max-w-none"> |
||||
{#each parsedSections as section, index} |
||||
<div class="mb-6"> |
||||
<div |
||||
class="text-sm text-gray-800 dark:text-gray-200 asciidoc-content" |
||||
> |
||||
{@html asciidoctorProcessor.convert( |
||||
`== ${section.title}\n\n${section.content}`, |
||||
{ |
||||
standalone: false, |
||||
doctype: "article", |
||||
attributes: { |
||||
showtitle: true, |
||||
sectids: true, |
||||
}, |
||||
}, |
||||
)} |
||||
</div> |
||||
|
||||
{#if index < parsedSections.length - 1} |
||||
<!-- Gray area with tag bubbles above event boundary --> |
||||
<div class="my-4 relative"> |
||||
<!-- Gray background area --> |
||||
<div |
||||
class="bg-gray-200 dark:bg-gray-700 rounded-lg p-3 mb-2" |
||||
> |
||||
<div class="flex flex-wrap gap-2 items-center"> |
||||
{#if section.tags && section.tags.length > 0} |
||||
{#each section.tags as tag} |
||||
<div |
||||
class="bg-amber-900 text-amber-100 px-2 py-1 rounded-full text-xs font-medium flex items-baseline" |
||||
> |
||||
<span class="font-mono">{tag[0]}:</span> |
||||
<span>{tag[1]}</span> |
||||
</div> |
||||
{/each} |
||||
{:else} |
||||
<span |
||||
class="text-gray-500 dark:text-gray-400 text-xs italic" |
||||
>No tags</span |
||||
> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Event boundary line --> |
||||
<div |
||||
class="border-t-2 border-dashed border-blue-400 relative" |
||||
> |
||||
<div |
||||
class="absolute -top-2 left-1/2 transform -translate-x-1/2 bg-blue-100 dark:bg-blue-900 text-blue-800 dark:text-blue-200 px-2 py-1 rounded text-xs font-medium" |
||||
> |
||||
Event Boundary |
||||
</div> |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{/each} |
||||
</div> |
||||
|
||||
<div |
||||
class="mt-4 text-xs text-gray-600 dark:text-gray-400 bg-gray-50 dark:bg-gray-900 p-2 rounded border" |
||||
> |
||||
<strong>Event Count:</strong> |
||||
{parsedSections.length} event{parsedSections.length !== 1 |
||||
? "s" |
||||
: ""} |
||||
<br /> |
||||
<strong>Note:</strong> Currently only the first event will be published. |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
@ -0,0 +1,90 @@
@@ -0,0 +1,90 @@
|
||||
<script lang="ts"> |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import type { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { activeInboxRelays } from "$lib/ndk"; |
||||
import { Card } from "flowbite-svelte"; |
||||
import CardActions from "$components/util/CardActions.svelte"; |
||||
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; |
||||
import LazyImage from "$components/util/LazyImage.svelte"; |
||||
import { generateDarkPastelColor } from "$lib/utils/image_utils"; |
||||
|
||||
const { event } = $props<{ event: NDKEvent }>(); |
||||
|
||||
function getRelayUrls(): string[] { |
||||
return $activeInboxRelays; |
||||
} |
||||
|
||||
const relays = $derived.by(() => { |
||||
return getRelayUrls(); |
||||
}); |
||||
|
||||
const href = $derived.by(() => { |
||||
const d = event.getMatchingTags("d")[0]?.[1]; |
||||
if (d != null) { |
||||
return `publication?d=${d}`; |
||||
} else { |
||||
return `publication?id=${naddrEncode(event, relays)}`; |
||||
} |
||||
}); |
||||
|
||||
let title: string = $derived(event.getMatchingTags("title")[0]?.[1]); |
||||
let author: string = $derived( |
||||
event.getMatchingTags(event, "author")[0]?.[1] ?? "unknown", |
||||
); |
||||
let version: string = $derived( |
||||
event.getMatchingTags("version")[0]?.[1] ?? "1", |
||||
); |
||||
let image: string = $derived(event.getMatchingTags("image")[0]?.[1] ?? null); |
||||
let authorPubkey: string = $derived( |
||||
event.getMatchingTags("p")[0]?.[1] ?? null, |
||||
); |
||||
</script> |
||||
|
||||
{#if title != null && href != null} |
||||
<Card class="ArticleBox card-leather max-w-md h-48 flex flex-row space-x-2 relative"> |
||||
<div |
||||
class="flex-shrink-0 w-32 h-40 overflow-hidden rounded flex items-center justify-center p-2 -mt-2" |
||||
> |
||||
{#if image} |
||||
<LazyImage |
||||
src={image} |
||||
alt={title || "Publication image"} |
||||
eventId={event.id} |
||||
className="w-full h-full object-cover" |
||||
/> |
||||
{:else} |
||||
<div |
||||
class="w-full h-full rounded" |
||||
style="background-color: {generateDarkPastelColor(event.id)};" |
||||
> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
|
||||
<div class="flex flex-col flex-grow space-x-2"> |
||||
<div class="flex flex-col flex-grow"> |
||||
<a href="/{href}" class="flex flex-col space-y-2 h-full"> |
||||
<div class="flex-grow pt-2"> |
||||
<h2 class="text-lg font-bold line-clamp-2" {title}>{title}</h2> |
||||
<h3 class="text-base font-normal mt-2"> |
||||
by |
||||
{#if authorPubkey != null} |
||||
{@render userBadge(authorPubkey, author)} |
||||
{:else} |
||||
{author} |
||||
{/if} |
||||
</h3> |
||||
</div> |
||||
{#if version != "1"} |
||||
<h3 class="text-sm font-semibold text-primary-600 dark:text-primary-400 mt-auto">version: {version}</h3> |
||||
{/if} |
||||
</a> |
||||
</div> |
||||
</div> |
||||
|
||||
<!-- Position CardActions at bottom-right --> |
||||
<div class="absolute bottom-2 right-2"> |
||||
<CardActions {event} /> |
||||
</div> |
||||
</Card> |
||||
{/if} |
||||
@ -0,0 +1,182 @@
@@ -0,0 +1,182 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
TableOfContents, |
||||
type TocEntry, |
||||
} from "$lib/components/publications/table_of_contents.svelte"; |
||||
import { getContext } from "svelte"; |
||||
import { |
||||
SidebarDropdownWrapper, |
||||
SidebarGroup, |
||||
SidebarItem, |
||||
} from "flowbite-svelte"; |
||||
import Self from "./TableOfContents.svelte"; |
||||
import { onMount, onDestroy } from "svelte"; |
||||
|
||||
let { depth, onSectionFocused, onLoadMore } = $props<{ |
||||
rootAddress: string; |
||||
depth: number; |
||||
onSectionFocused?: (address: string) => void; |
||||
onLoadMore?: () => void; |
||||
}>(); |
||||
|
||||
let toc = getContext("toc") as TableOfContents; |
||||
|
||||
let entries = $derived.by<TocEntry[]>(() => { |
||||
const newEntries = []; |
||||
for (const [_, entry] of toc.addressMap) { |
||||
if (entry.depth !== depth) { |
||||
continue; |
||||
} |
||||
|
||||
newEntries.push(entry); |
||||
} |
||||
|
||||
return newEntries; |
||||
}); |
||||
|
||||
// Track the currently visible section |
||||
let currentVisibleSection = $state<string | null>(null); |
||||
let observer: IntersectionObserver; |
||||
|
||||
function setEntryExpanded(address: string, expanded: boolean = false) { |
||||
const entry = toc.getEntry(address); |
||||
if (!entry) { |
||||
return; |
||||
} |
||||
|
||||
toc.expandedMap.set(address, expanded); |
||||
entry.resolveChildren(); |
||||
} |
||||
|
||||
function handleSectionClick(address: string) { |
||||
// Smooth scroll to the section |
||||
const element = document.getElementById(address); |
||||
if (element) { |
||||
element.scrollIntoView({ |
||||
behavior: 'smooth', |
||||
block: 'start', |
||||
}); |
||||
} |
||||
|
||||
onSectionFocused?.(address); |
||||
|
||||
// Check if this is the last entry and trigger loading more events |
||||
const currentEntries = entries; |
||||
const lastEntry = currentEntries[currentEntries.length - 1]; |
||||
if (lastEntry && lastEntry.address === address) { |
||||
console.debug('[TableOfContents] Last entry clicked, triggering load more'); |
||||
onLoadMore?.(); |
||||
} |
||||
} |
||||
|
||||
// Check if an entry is currently visible |
||||
function isEntryVisible(address: string): boolean { |
||||
return currentVisibleSection === address; |
||||
} |
||||
|
||||
// Set up intersection observer to track visible sections |
||||
onMount(() => { |
||||
observer = new IntersectionObserver( |
||||
(entries) => { |
||||
// Find the section that is most visible in the viewport |
||||
let maxIntersectionRatio = 0; |
||||
let mostVisibleSection: string | null = null; |
||||
|
||||
entries.forEach((entry) => { |
||||
if (entry.isIntersecting && entry.intersectionRatio > maxIntersectionRatio) { |
||||
maxIntersectionRatio = entry.intersectionRatio; |
||||
mostVisibleSection = entry.target.id; |
||||
} |
||||
}); |
||||
|
||||
if (mostVisibleSection && mostVisibleSection !== currentVisibleSection) { |
||||
currentVisibleSection = mostVisibleSection; |
||||
} |
||||
}, |
||||
{ |
||||
threshold: [0, 0.25, 0.5, 0.75, 1], |
||||
rootMargin: "-20% 0px -20% 0px", // Consider section visible when it's in the middle 60% of the viewport |
||||
} |
||||
); |
||||
|
||||
// Function to observe all section elements |
||||
function observeSections() { |
||||
const sections = document.querySelectorAll('section[id]'); |
||||
sections.forEach((section) => { |
||||
observer.observe(section); |
||||
}); |
||||
} |
||||
|
||||
// Initial observation |
||||
observeSections(); |
||||
|
||||
// Set up a mutation observer to watch for new sections being added |
||||
const mutationObserver = new MutationObserver((mutations) => { |
||||
mutations.forEach((mutation) => { |
||||
mutation.addedNodes.forEach((node) => { |
||||
if (node.nodeType === Node.ELEMENT_NODE) { |
||||
const element = node as Element; |
||||
// Check if the added node is a section with an id |
||||
if (element.tagName === 'SECTION' && element.id) { |
||||
observer.observe(element); |
||||
} |
||||
// Check if the added node contains sections |
||||
const sections = element.querySelectorAll?.('section[id]'); |
||||
if (sections) { |
||||
sections.forEach((section) => { |
||||
observer.observe(section); |
||||
}); |
||||
} |
||||
} |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
// Start observing the document body for changes |
||||
mutationObserver.observe(document.body, { |
||||
childList: true, |
||||
subtree: true, |
||||
}); |
||||
|
||||
return () => { |
||||
observer.disconnect(); |
||||
mutationObserver.disconnect(); |
||||
}; |
||||
}); |
||||
|
||||
onDestroy(() => { |
||||
if (observer) { |
||||
observer.disconnect(); |
||||
} |
||||
}); |
||||
</script> |
||||
|
||||
<!-- TODO: Figure out how to style indentations. --> |
||||
<!-- TODO: Make group title fonts the same as entry title fonts. --> |
||||
<SidebarGroup> |
||||
{#each entries as entry, index} |
||||
{@const address = entry.address} |
||||
{@const expanded = toc.expandedMap.get(address) ?? false} |
||||
{@const isLeaf = toc.leaves.has(address)} |
||||
{@const isVisible = isEntryVisible(address)} |
||||
{@const isLastEntry = index === entries.length - 1} |
||||
{#if isLeaf} |
||||
<SidebarItem |
||||
label={entry.title} |
||||
href={`#${address}`} |
||||
spanClass="px-2 text-ellipsis" |
||||
class={`${isVisible ? "toc-highlight" : ""} ${isLastEntry ? "pb-4" : ""}`} |
||||
onclick={() => handleSectionClick(address)} |
||||
/> |
||||
{:else} |
||||
{@const childDepth = depth + 1} |
||||
<SidebarDropdownWrapper |
||||
label={entry.title} |
||||
btnClass="flex items-center p-2 w-full font-normal text-gray-900 rounded-lg transition duration-75 group hover:bg-primary-50 dark:text-white dark:hover:bg-primary-800 {isVisible ? 'toc-highlight' : ''} {isLastEntry ? 'pb-4' : ''}" |
||||
bind:isOpen={() => expanded, (open) => setEntryExpanded(address, open)} |
||||
> |
||||
<Self rootAddress={address} depth={childDepth} {onSectionFocused} {onLoadMore} /> |
||||
</SidebarDropdownWrapper> |
||||
{/if} |
||||
{/each} |
||||
</SidebarGroup> |
||||
@ -0,0 +1,111 @@
@@ -0,0 +1,111 @@
|
||||
import { SvelteSet } from "svelte/reactivity"; |
||||
import { PublicationTree } from "../../data_structures/publication_tree.ts"; |
||||
import NDK, { NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
|
||||
export class SveltePublicationTree { |
||||
resolvedAddresses: SvelteSet<string> = new SvelteSet(); |
||||
|
||||
#publicationTree: PublicationTree; |
||||
#nodeResolvedObservers: Array<(address: string) => void> = []; |
||||
#bookmarkMovedObservers: Array<(address: string) => void> = []; |
||||
|
||||
constructor(rootEvent: NDKEvent, ndk: NDK) { |
||||
this.#publicationTree = new PublicationTree(rootEvent, ndk); |
||||
|
||||
this.#publicationTree.onNodeResolved(this.#handleNodeResolved); |
||||
this.#publicationTree.onBookmarkMoved(this.#handleBookmarkMoved); |
||||
} |
||||
|
||||
// #region Proxied Public Methods
|
||||
|
||||
getChildAddresses(address: string): Promise<Array<string | null>> { |
||||
return this.#publicationTree.getChildAddresses(address); |
||||
} |
||||
|
||||
getEvent(address: string): Promise<NDKEvent | null> { |
||||
return this.#publicationTree.getEvent(address); |
||||
} |
||||
|
||||
getHierarchy(address: string): Promise<NDKEvent[]> { |
||||
return this.#publicationTree.getHierarchy(address); |
||||
} |
||||
|
||||
async getParent(address: string): Promise<NDKEvent | null> { |
||||
const hierarchy = await this.getHierarchy(address); |
||||
|
||||
// The last element in the hierarchy is the event with the given address, so the parent is the
|
||||
// second to last element.
|
||||
return hierarchy.at(-2) ?? null; |
||||
} |
||||
|
||||
setBookmark(address: string) { |
||||
this.#publicationTree.setBookmark(address); |
||||
} |
||||
|
||||
/** |
||||
* Registers an observer function that is invoked whenever a new node is resolved. |
||||
* @param observer The observer function. |
||||
*/ |
||||
onNodeResolved(observer: (address: string) => void) { |
||||
this.#nodeResolvedObservers.push(observer); |
||||
} |
||||
|
||||
/** |
||||
* Registers an observer function that is invoked whenever the bookmark is moved. |
||||
* @param observer The observer function. |
||||
*/ |
||||
onBookmarkMoved(observer: (address: string) => void) { |
||||
this.#bookmarkMovedObservers.push(observer); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Proxied Async Iterator Methods
|
||||
|
||||
[Symbol.asyncIterator](): AsyncIterator<NDKEvent | null> { |
||||
return this; |
||||
} |
||||
|
||||
next(): Promise<IteratorResult<NDKEvent | null>> { |
||||
return this.#publicationTree.next(); |
||||
} |
||||
|
||||
previous(): Promise<IteratorResult<NDKEvent | null>> { |
||||
return this.#publicationTree.previous(); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Private Methods
|
||||
|
||||
/** |
||||
* Observer function that is invoked whenever a new node is resolved on the publication tree. |
||||
* |
||||
* @param address The address of the resolved node. |
||||
* |
||||
* This member is declared as an arrow function to ensure that the correct `this` context is |
||||
* used when the function is invoked in this class's constructor. |
||||
*/ |
||||
#handleNodeResolved = (address: string) => { |
||||
this.resolvedAddresses.add(address); |
||||
for (const observer of this.#nodeResolvedObservers) { |
||||
observer(address); |
||||
} |
||||
}; |
||||
|
||||
/** |
||||
* Observer function that is invoked whenever the bookmark is moved on the publication tree. |
||||
* |
||||
* @param address The address of the new bookmark. |
||||
* |
||||
* This member is declared as an arrow function to ensure that the correct `this` context is |
||||
* used when the function is invoked in this class's constructor. |
||||
*/ |
||||
#handleBookmarkMoved = (address: string) => { |
||||
for (const observer of this.#bookmarkMovedObservers) { |
||||
observer(address); |
||||
} |
||||
}; |
||||
|
||||
// #endregion
|
||||
} |
||||
@ -0,0 +1,297 @@
@@ -0,0 +1,297 @@
|
||||
import { SvelteMap, SvelteSet } from "svelte/reactivity"; |
||||
import { SveltePublicationTree } from "./svelte_publication_tree.svelte.ts"; |
||||
import type { NDKEvent } from "../../utils/nostrUtils.ts"; |
||||
import { indexKind } from "../../consts.ts"; |
||||
|
||||
export interface TocEntry { |
||||
address: string; |
||||
title: string; |
||||
href?: string; |
||||
children: TocEntry[]; |
||||
parent?: TocEntry; |
||||
depth: number; |
||||
childrenResolved: boolean; |
||||
resolveChildren: () => Promise<void>; |
||||
} |
||||
|
||||
/** |
||||
* Maintains a table of contents (ToC) for a `SveltePublicationTree`. Since publication trees are |
||||
* conceptually infinite and lazy-loading, the ToC represents only the portion of the tree that has |
||||
* been "discovered". The ToC is updated as new nodes are resolved within the publication tree. |
||||
* |
||||
* @see SveltePublicationTree |
||||
*/ |
||||
export class TableOfContents { |
||||
public addressMap: SvelteMap<string, TocEntry> = new SvelteMap(); |
||||
public expandedMap: SvelteMap<string, boolean> = new SvelteMap(); |
||||
public leaves: SvelteSet<string> = new SvelteSet(); |
||||
|
||||
#root: TocEntry | null = null; |
||||
#publicationTree: SveltePublicationTree; |
||||
#pagePathname: string; |
||||
|
||||
/** |
||||
* Constructs a `TableOfContents` from a `SveltePublicationTree`. |
||||
* |
||||
* @param rootAddress The address of the root event. |
||||
* @param publicationTree The SveltePublicationTree instance. |
||||
* @param pagePathname The current page pathname for href generation. |
||||
*/ |
||||
constructor( |
||||
rootAddress: string, |
||||
publicationTree: SveltePublicationTree, |
||||
pagePathname: string, |
||||
) { |
||||
this.#publicationTree = publicationTree; |
||||
this.#pagePathname = pagePathname; |
||||
this.#init(rootAddress); |
||||
} |
||||
|
||||
// #region Public Methods
|
||||
|
||||
/** |
||||
* Returns the root entry of the ToC. |
||||
* |
||||
* @returns The root entry of the ToC, or `null` if the ToC has not been initialized. |
||||
*/ |
||||
getRootEntry(): TocEntry | null { |
||||
return this.#root; |
||||
} |
||||
|
||||
getEntry(address: string): TocEntry | undefined { |
||||
return this.addressMap.get(address); |
||||
} |
||||
|
||||
/** |
||||
* Builds a table of contents from the DOM subtree rooted at `parentElement`. |
||||
* |
||||
* @param parentElement The root of the DOM subtree containing the content to be added to the |
||||
* ToC. |
||||
* @param parentAddress The address of the event corresponding to the DOM subtree root indicated |
||||
* by `parentElement`. |
||||
* |
||||
* This function is intended for use on segments of HTML markup that are not directly derived |
||||
* from a structure publication of the kind supported by `PublicationTree`. It may be used to |
||||
* produce a table of contents from the contents of a kind `30041` event with AsciiDoc markup, or |
||||
* from a kind `30023` event with Markdown content. |
||||
*/ |
||||
buildTocFromDocument(parentElement: HTMLElement, parentEntry: TocEntry) { |
||||
parentElement |
||||
.querySelectorAll<HTMLHeadingElement>(`h${parentEntry.depth}`) |
||||
.forEach((header) => { |
||||
// TODO: Correctly update ToC state from DOM.
|
||||
const title = header.textContent?.trim(); |
||||
const id = header.id; |
||||
|
||||
// Only create an entry if the header has an ID and a title.
|
||||
if (id && title) { |
||||
const href = `${this.#pagePathname}#${id}`; |
||||
|
||||
// TODO: Check this logic.
|
||||
const tocEntry: TocEntry = { |
||||
address: parentEntry.address, |
||||
title, |
||||
href, |
||||
depth: parentEntry.depth + 1, |
||||
children: [], |
||||
childrenResolved: true, |
||||
resolveChildren: () => Promise.resolve(), |
||||
}; |
||||
parentEntry.children.push(tocEntry); |
||||
this.expandedMap.set(tocEntry.address, false); |
||||
|
||||
this.buildTocFromDocument(header, tocEntry); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Iterator Methods
|
||||
|
||||
/** |
||||
* Iterates over all ToC entries in depth-first order. |
||||
*/ |
||||
*[Symbol.iterator](): IterableIterator<TocEntry> { |
||||
function* traverse(entry: TocEntry | null): IterableIterator<TocEntry> { |
||||
if (!entry) { |
||||
return; |
||||
} |
||||
|
||||
yield entry; |
||||
|
||||
if (entry.children) { |
||||
for (const child of entry.children) { |
||||
yield* traverse(child); |
||||
} |
||||
} |
||||
} |
||||
|
||||
yield* traverse(this.#root); |
||||
} |
||||
|
||||
// #endregion
|
||||
|
||||
// #region Private Methods
|
||||
|
||||
/** |
||||
* Initializes the ToC from the associated publication tree. |
||||
* |
||||
* @param rootAddress The address of the publication's root event. |
||||
* |
||||
* Michael J - 07 July 2025 - NOTE: Since the publication tree is conceptually infinite and |
||||
* lazy-loading, the ToC is not guaranteed to contain all the nodes at any layer until the |
||||
* publication has been fully resolved. |
||||
* |
||||
* Michael J - 07 July 2025 - TODO: If the relay provides event metadata, use the metadata to |
||||
* initialize the ToC with all of its first-level children. |
||||
*/ |
||||
async #init(rootAddress: string) { |
||||
const rootEvent = await this.#publicationTree.getEvent(rootAddress); |
||||
if (!rootEvent) { |
||||
throw new Error(`[ToC] Root event ${rootAddress} not found.`); |
||||
} |
||||
|
||||
this.#root = await this.#buildTocEntry(rootAddress); |
||||
|
||||
this.addressMap.set(rootAddress, this.#root); |
||||
|
||||
// Handle any other nodes that have already been resolved in parallel.
|
||||
await Promise.all( |
||||
Array.from(this.#publicationTree.resolvedAddresses).map((address) => |
||||
this.#buildTocEntryFromResolvedNode(address), |
||||
), |
||||
); |
||||
|
||||
// Set up an observer to handle progressive resolution of the publication tree.
|
||||
this.#publicationTree.onNodeResolved((address: string) => { |
||||
this.#buildTocEntryFromResolvedNode(address); |
||||
}); |
||||
} |
||||
|
||||
#getTitle(event: NDKEvent | null): string { |
||||
if (!event) { |
||||
// TODO: What do we want to return in this case?
|
||||
return "[untitled]"; |
||||
} |
||||
const titleTag = event.getMatchingTags?.("title")?.[0]?.[1]; |
||||
return titleTag || event.tagAddress() || "[untitled]"; |
||||
} |
||||
|
||||
async #buildTocEntry(address: string): Promise<TocEntry> { |
||||
// Michael J - 07 July 2025 - NOTE: This arrow function is nested so as to use its containing
|
||||
// scope in its operation. Do not move it to the top level without ensuring it still has access
|
||||
// to the necessary variables.
|
||||
const resolver = async () => { |
||||
if (entry.childrenResolved) { |
||||
return; |
||||
} |
||||
|
||||
const event = await this.#publicationTree.getEvent(entry.address); |
||||
if (event?.kind !== indexKind) { |
||||
// TODO: Build ToC entries from HTML markup in this case.
|
||||
return; |
||||
} |
||||
|
||||
const childAddresses = await this.#publicationTree.getChildAddresses( |
||||
entry.address, |
||||
); |
||||
for (const childAddress of childAddresses) { |
||||
if (!childAddress) { |
||||
continue; |
||||
} |
||||
|
||||
// Michael J - 16 June 2025 - This duplicates logic in the outer function, but is necessary
|
||||
// here so that we can determine whether to render an entry as a leaf before it is fully
|
||||
// resolved.
|
||||
if (childAddress.split(":")[0] !== indexKind.toString()) { |
||||
this.leaves.add(childAddress); |
||||
} |
||||
|
||||
// Michael J - 05 June 2025 - The `getChildAddresses` method forces node resolution on the
|
||||
// publication tree. This is acceptable here, because the tree is always resolved
|
||||
// top-down. Therefore, by the time we handle a node's resolution, its parent and
|
||||
// siblings have already been resolved.
|
||||
const childEntry = await this.#buildTocEntry(childAddress); |
||||
childEntry.parent = entry; |
||||
childEntry.depth = entry.depth + 1; |
||||
entry.children.push(childEntry); |
||||
this.addressMap.set(childAddress, childEntry); |
||||
} |
||||
|
||||
await this.#matchChildrenToTagOrder(entry); |
||||
|
||||
entry.childrenResolved = true; |
||||
}; |
||||
|
||||
const event = await this.#publicationTree.getEvent(address); |
||||
if (!event) { |
||||
throw new Error(`[ToC] Event ${address} not found.`); |
||||
} |
||||
|
||||
const depth = (await this.#publicationTree.getHierarchy(address)).length; |
||||
|
||||
const entry: TocEntry = { |
||||
address, |
||||
title: this.#getTitle(event), |
||||
href: `${this.#pagePathname}#${address}`, |
||||
children: [], |
||||
depth, |
||||
childrenResolved: false, |
||||
resolveChildren: resolver, |
||||
}; |
||||
this.expandedMap.set(address, false); |
||||
|
||||
// Michael J - 16 June 2025 - We determine whether to add a leaf both here and in the inner
|
||||
// resolver function. The resolver function is called when entries are resolved by expanding
|
||||
// a ToC entry, and we'll reach the block below when entries are resolved by the publication
|
||||
// tree.
|
||||
if (event.kind !== indexKind) { |
||||
this.leaves.add(address); |
||||
} |
||||
|
||||
return entry; |
||||
} |
||||
|
||||
/** |
||||
* Reorders the children of a ToC entry to match the order of 'a' tags in the corresponding |
||||
* Nostr index event. |
||||
* |
||||
* @param entry The ToC entry to reorder. |
||||
* |
||||
* This function has a time complexity of `O(n log n)`, where `n` is the number of children the |
||||
* parent event has. Average size of `n` is small enough to be negligible. |
||||
*/ |
||||
async #matchChildrenToTagOrder(entry: TocEntry) { |
||||
const parentEvent = await this.#publicationTree.getEvent(entry.address); |
||||
if (parentEvent?.kind === indexKind) { |
||||
const tagOrder = parentEvent.getMatchingTags("a").map((tag) => tag[1]); |
||||
const addressToOrdinal = new Map<string, number>(); |
||||
|
||||
// Build map of addresses to their ordinals from tag order
|
||||
tagOrder.forEach((address, index) => { |
||||
addressToOrdinal.set(address, index); |
||||
}); |
||||
|
||||
entry.children.sort((a, b) => { |
||||
const aOrdinal = |
||||
addressToOrdinal.get(a.address) ?? Number.MAX_SAFE_INTEGER; |
||||
const bOrdinal = |
||||
addressToOrdinal.get(b.address) ?? Number.MAX_SAFE_INTEGER; |
||||
return aOrdinal - bOrdinal; |
||||
}); |
||||
} |
||||
} |
||||
|
||||
#buildTocEntryFromResolvedNode(address: string) { |
||||
if (this.addressMap.has(address)) { |
||||
return; |
||||
} |
||||
|
||||
this.#buildTocEntry(address).then((entry) => { |
||||
this.addressMap.set(address, entry); |
||||
}); |
||||
} |
||||
|
||||
// #endregion
|
||||
} |
||||
@ -0,0 +1,115 @@
@@ -0,0 +1,115 @@
|
||||
<script lang="ts"> |
||||
import { Button } from "flowbite-svelte"; |
||||
import { goto } from "$app/navigation"; |
||||
import type { NDKEvent } from "$lib/utils/nostrUtils"; |
||||
import { findContainingIndexEvents } from "$lib/utils/event_search"; |
||||
import { getMatchingTags } from "$lib/utils/nostrUtils"; |
||||
import { naddrEncode } from "$lib/utils"; |
||||
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; |
||||
|
||||
let { event } = $props<{ |
||||
event: NDKEvent; |
||||
}>(); |
||||
|
||||
let containingIndexes = $state<NDKEvent[]>([]); |
||||
let loading = $state(false); |
||||
let error = $state<string | null>(null); |
||||
let lastEventId = $state<string | null>(null); |
||||
|
||||
async function loadContainingIndexes() { |
||||
console.log( |
||||
"[ContainingIndexes] Loading containing indexes for event:", |
||||
event.id, |
||||
); |
||||
loading = true; |
||||
error = null; |
||||
|
||||
try { |
||||
containingIndexes = await findContainingIndexEvents(event); |
||||
console.log( |
||||
"[ContainingIndexes] Found containing indexes:", |
||||
containingIndexes.length, |
||||
); |
||||
} catch (err) { |
||||
error = |
||||
err instanceof Error |
||||
? err.message |
||||
: "Failed to load containing indexes"; |
||||
console.error( |
||||
"[ContainingIndexes] Error loading containing indexes:", |
||||
err, |
||||
); |
||||
} finally { |
||||
loading = false; |
||||
} |
||||
} |
||||
|
||||
function navigateToIndex(indexEvent: NDKEvent) { |
||||
const dTag = getMatchingTags(indexEvent, "d")[0]?.[1]; |
||||
if (dTag) { |
||||
goto(`/publication?d=${encodeURIComponent(dTag)}`); |
||||
} else { |
||||
// Fallback to naddr |
||||
try { |
||||
const naddr = naddrEncode(indexEvent, $activeInboxRelays); |
||||
goto(`/publication?id=${encodeURIComponent(naddr)}`); |
||||
} catch (err) { |
||||
console.error("[ContainingIndexes] Error creating naddr:", err); |
||||
} |
||||
} |
||||
} |
||||
|
||||
function getNaddrUrl(event: NDKEvent): string { |
||||
return naddrEncode(event, $activeInboxRelays); |
||||
} |
||||
|
||||
$effect(() => { |
||||
// Only reload if the event ID has actually changed |
||||
if (event.id !== lastEventId) { |
||||
lastEventId = event.id; |
||||
loadContainingIndexes(); |
||||
} |
||||
}); |
||||
</script> |
||||
|
||||
{#if containingIndexes.length > 0 || loading || error} |
||||
<div class="mb-4 p-3 bg-gray-50 dark:bg-gray-800 rounded-lg border"> |
||||
<h4 class="text-sm font-medium text-gray-700 dark:text-gray-300 mb-2"> |
||||
Containing Publications |
||||
</h4> |
||||
|
||||
{#if loading} |
||||
<div class="text-sm text-gray-500 dark:text-gray-400"> |
||||
Loading containing publications... |
||||
</div> |
||||
{:else if error} |
||||
<div class="text-sm text-red-600 dark:text-red-400"> |
||||
{error} |
||||
</div> |
||||
{:else if containingIndexes.length > 0} |
||||
<div class="max-h-32 overflow-y-auto"> |
||||
{#each containingIndexes.slice(0, 3) as indexEvent} |
||||
{@const title = |
||||
getMatchingTags(indexEvent, "title")[0]?.[1] || "Untitled"} |
||||
<Button |
||||
size="xs" |
||||
color="alternative" |
||||
class="mb-1 mr-1 text-xs" |
||||
onclick={() => navigateToIndex(indexEvent)} |
||||
> |
||||
{title} |
||||
</Button> |
||||
{/each} |
||||
{#if containingIndexes.length > 3} |
||||
<span class="text-xs text-gray-500 dark:text-gray-400"> |
||||
+{containingIndexes.length - 3} more |
||||
</span> |
||||
{/if} |
||||
</div> |
||||
{:else} |
||||
<div class="text-sm text-gray-500 dark:text-gray-400"> |
||||
No containing publications found |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
{/if} |
||||
@ -0,0 +1,90 @@
@@ -0,0 +1,90 @@
|
||||
<script lang="ts"> |
||||
import { generateDarkPastelColor } from '$lib/utils/image_utils'; |
||||
import { fade } from 'svelte/transition'; |
||||
import { quintOut } from 'svelte/easing'; |
||||
|
||||
let { |
||||
src, |
||||
alt, |
||||
eventId, |
||||
className = 'w-full h-full object-cover', |
||||
placeholderClassName = '', |
||||
}: { |
||||
src: string; |
||||
alt: string; |
||||
eventId: string; |
||||
className?: string; |
||||
placeholderClassName?: string; |
||||
} = $props(); |
||||
|
||||
let imageLoaded = $state(false); |
||||
let imageError = $state(false); |
||||
let imgElement = $state<HTMLImageElement | null>(null); |
||||
|
||||
const placeholderColor = $derived.by(() => generateDarkPastelColor(eventId)); |
||||
|
||||
function loadImage() { |
||||
if (!imgElement) return; |
||||
|
||||
imgElement.onload = () => { |
||||
// Small delay to ensure smooth transition |
||||
setTimeout(() => { |
||||
imageLoaded = true; |
||||
}, 100); |
||||
}; |
||||
|
||||
imgElement.onerror = () => { |
||||
imageError = true; |
||||
}; |
||||
|
||||
// Set src after setting up event handlers |
||||
imgElement.src = src; |
||||
} |
||||
|
||||
function bindImg(element: HTMLImageElement) { |
||||
imgElement = element; |
||||
// Load image immediately when element is bound |
||||
loadImage(); |
||||
} |
||||
</script> |
||||
|
||||
<div class="relative w-full h-full"> |
||||
<!-- Placeholder --> |
||||
<div |
||||
class="absolute inset-0 {placeholderClassName}" |
||||
style="background-color: {placeholderColor};" |
||||
class:hidden={imageLoaded} |
||||
> |
||||
</div> |
||||
|
||||
<!-- Image --> |
||||
<img |
||||
bind:this={imgElement} |
||||
{src} |
||||
{alt} |
||||
class="{className} {imageLoaded ? 'opacity-100' : 'opacity-0'}" |
||||
style="transition: opacity 0.2s ease-out;" |
||||
loading="lazy" |
||||
decoding="async" |
||||
class:hidden={imageError} |
||||
onload={() => { |
||||
setTimeout(() => { |
||||
imageLoaded = true; |
||||
}, 100); |
||||
}} |
||||
onerror={() => { |
||||
imageError = true; |
||||
}} |
||||
/> |
||||
|
||||
<!-- Error state --> |
||||
{#if imageError} |
||||
<div |
||||
class="absolute inset-0 flex items-center justify-center bg-gray-200 dark:bg-gray-700 {placeholderClassName}" |
||||
> |
||||
<div class="text-gray-500 dark:text-gray-400 text-xs"> |
||||
Failed to load |
||||
</div> |
||||
</div> |
||||
{/if} |
||||
</div> |
||||
@ -1,150 +0,0 @@
@@ -1,150 +0,0 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
Heading, |
||||
Sidebar, |
||||
SidebarGroup, |
||||
SidebarItem, |
||||
SidebarWrapper, |
||||
} from "flowbite-svelte"; |
||||
import { onMount } from "svelte"; |
||||
import { pharosInstance, tocUpdate } from "$lib/parser"; |
||||
import { publicationColumnVisibility } from "$lib/stores"; |
||||
|
||||
let { rootId } = $props<{ rootId: string }>(); |
||||
|
||||
if (rootId !== $pharosInstance.getRootIndexId()) { |
||||
console.error("Root ID does not match parser root index ID"); |
||||
} |
||||
|
||||
const tocBreakpoint = 1140; |
||||
|
||||
let activeHash = $state(window.location.hash); |
||||
|
||||
interface TocItem { |
||||
label: string; |
||||
hash: string; |
||||
} |
||||
|
||||
// Get TOC items from parser |
||||
let tocItems = $state<TocItem[]>([]); |
||||
|
||||
$effect(() => { |
||||
// This will re-run whenever tocUpdate changes |
||||
tocUpdate; |
||||
const items: TocItem[] = []; |
||||
const childIds = $pharosInstance.getChildIndexIds(rootId); |
||||
console.log("TOC rootId:", rootId, "childIds:", childIds); |
||||
const processNode = (nodeId: string) => { |
||||
const title = $pharosInstance.getIndexTitle(nodeId); |
||||
if (title) { |
||||
items.push({ |
||||
label: title, |
||||
hash: `#${nodeId}`, |
||||
}); |
||||
} |
||||
const children = $pharosInstance.getChildIndexIds(nodeId); |
||||
children.forEach(processNode); |
||||
}; |
||||
childIds.forEach(processNode); |
||||
tocItems = items; |
||||
}); |
||||
|
||||
function normalizeHashPath(str: string): string { |
||||
return str |
||||
.toLowerCase() |
||||
.replace(/\s+/g, "-") |
||||
.replace(/[^\w-]/g, ""); |
||||
} |
||||
|
||||
function scrollToElementWithOffset() { |
||||
const hash = window.location.hash; |
||||
if (hash) { |
||||
const targetElement = document.querySelector(hash); |
||||
if (targetElement) { |
||||
const headerOffset = 80; |
||||
const elementPosition = targetElement.getBoundingClientRect().top; |
||||
const offsetPosition = elementPosition + window.scrollY - headerOffset; |
||||
|
||||
window.scrollTo({ |
||||
top: offsetPosition, |
||||
behavior: "auto", |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
|
||||
function updateActiveHash() { |
||||
activeHash = window.location.hash; |
||||
} |
||||
|
||||
/** |
||||
* Hides the table of contents sidebar when the window shrinks below a certain size. This |
||||
* prevents the sidebar from occluding the article content. |
||||
*/ |
||||
function setTocVisibilityOnResize() { |
||||
// Always show TOC on laptop and larger screens, collapsible only on small/medium |
||||
publicationColumnVisibility.update((v) => ({ |
||||
...v, |
||||
toc: window.innerWidth >= tocBreakpoint, |
||||
})); |
||||
} |
||||
|
||||
/** |
||||
* Hides the table of contents sidebar when the user clicks outside of it. |
||||
*/ |
||||
function hideTocOnClick(ev: MouseEvent) { |
||||
const target = ev.target as HTMLElement; |
||||
|
||||
if (target.closest(".sidebar-leather") || target.closest(".btn-leather")) { |
||||
return; |
||||
} |
||||
|
||||
// Only allow hiding TOC on screens smaller than tocBreakpoint |
||||
if (window.innerWidth < tocBreakpoint && $publicationColumnVisibility.toc) { |
||||
publicationColumnVisibility.update((v) => ({ ...v, toc: false })); |
||||
} |
||||
} |
||||
|
||||
onMount(() => { |
||||
// Always check whether the TOC sidebar should be visible. |
||||
setTocVisibilityOnResize(); |
||||
|
||||
window.addEventListener("hashchange", updateActiveHash); |
||||
window.addEventListener("hashchange", scrollToElementWithOffset); |
||||
// Also handle the case where the user lands on the page with a hash in the URL |
||||
scrollToElementWithOffset(); |
||||
|
||||
window.addEventListener("resize", setTocVisibilityOnResize); |
||||
window.addEventListener("click", hideTocOnClick); |
||||
|
||||
return () => { |
||||
window.removeEventListener("hashchange", updateActiveHash); |
||||
window.removeEventListener("hashchange", scrollToElementWithOffset); |
||||
window.removeEventListener("resize", setTocVisibilityOnResize); |
||||
window.removeEventListener("click", hideTocOnClick); |
||||
}; |
||||
}); |
||||
</script> |
||||
|
||||
<!-- TODO: Get TOC from parser. --> |
||||
{#if $publicationColumnVisibility.toc} |
||||
<Sidebar class="sidebar-leather left-0"> |
||||
<SidebarWrapper> |
||||
<SidebarGroup class="sidebar-group-leather"> |
||||
<Heading tag="h1" class="h-leather !text-lg">Table of contents</Heading> |
||||
<p> |
||||
(This ToC is only for demo purposes, and is not fully-functional.) |
||||
</p> |
||||
{#each tocItems as item} |
||||
<SidebarItem |
||||
class="sidebar-item-leather {activeHash === item.hash |
||||
? 'bg-primary-200 font-bold' |
||||
: ''}" |
||||
label={item.label} |
||||
href={item.hash} |
||||
/> |
||||
{/each} |
||||
</SidebarGroup> |
||||
</SidebarWrapper> |
||||
</Sidebar> |
||||
{/if} |
||||
@ -0,0 +1,115 @@
@@ -0,0 +1,115 @@
|
||||
import { get } from "svelte/store"; |
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { getMimeTags } from "$lib/utils/mime"; |
||||
import { |
||||
parseAsciiDocSections, |
||||
type ZettelSection, |
||||
} from "$lib/utils/ZettelParser"; |
||||
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { nip19 } from "nostr-tools"; |
||||
|
||||
export interface PublishResult { |
||||
success: boolean; |
||||
eventId?: string; |
||||
error?: string; |
||||
} |
||||
|
||||
export interface PublishOptions { |
||||
content: string; |
||||
kind?: number; |
||||
onSuccess?: (eventId: string) => void; |
||||
onError?: (error: string) => void; |
||||
} |
||||
|
||||
/** |
||||
* Publishes AsciiDoc content as Nostr events |
||||
* @param options - Publishing options |
||||
* @returns Promise resolving to publish result |
||||
*/ |
||||
export async function publishZettel( |
||||
options: PublishOptions, |
||||
): Promise<PublishResult> { |
||||
const { content, kind = 30041, onSuccess, onError } = options; |
||||
|
||||
if (!content.trim()) { |
||||
const error = "Please enter some content"; |
||||
onError?.(error); |
||||
return { success: false, error }; |
||||
} |
||||
|
||||
// Get the current NDK instance from the store
|
||||
const ndk = get(ndkInstance); |
||||
|
||||
if (!ndk?.activeUser) { |
||||
const error = "Please log in first"; |
||||
onError?.(error); |
||||
return { success: false, error }; |
||||
} |
||||
|
||||
try { |
||||
// Parse content into sections
|
||||
const sections = parseAsciiDocSections(content, 2); |
||||
|
||||
if (sections.length === 0) { |
||||
throw new Error("No valid sections found in content"); |
||||
} |
||||
|
||||
// For now, publish only the first section
|
||||
const firstSection = sections[0]; |
||||
const title = firstSection.title; |
||||
const cleanContent = firstSection.content; |
||||
const sectionTags = firstSection.tags || []; |
||||
|
||||
// Generate d-tag and create event
|
||||
const dTag = generateDTag(title); |
||||
const [mTag, MTag] = getMimeTags(kind); |
||||
|
||||
const tags: string[][] = [["d", dTag], mTag, MTag, ["title", title]]; |
||||
if (sectionTags) { |
||||
tags.push(...sectionTags); |
||||
} |
||||
|
||||
// Create and sign NDK event
|
||||
const ndkEvent = new NDKEvent(ndk); |
||||
ndkEvent.kind = kind; |
||||
ndkEvent.created_at = Math.floor(Date.now() / 1000); |
||||
ndkEvent.tags = tags; |
||||
ndkEvent.content = cleanContent; |
||||
ndkEvent.pubkey = ndk.activeUser.pubkey; |
||||
|
||||
await ndkEvent.sign(); |
||||
|
||||
// Publish to relays
|
||||
const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map( |
||||
(r) => r.url, |
||||
); |
||||
|
||||
if (allRelayUrls.length === 0) { |
||||
throw new Error("No relays available in NDK pool"); |
||||
} |
||||
|
||||
const relaySet = NDKRelaySet.fromRelayUrls(allRelayUrls, ndk); |
||||
const publishedToRelays = await ndkEvent.publish(relaySet); |
||||
|
||||
if (publishedToRelays.size > 0) { |
||||
const result = { success: true, eventId: ndkEvent.id }; |
||||
onSuccess?.(ndkEvent.id); |
||||
return result; |
||||
} else { |
||||
// Try fallback publishing logic here...
|
||||
throw new Error("Failed to publish to any relays"); |
||||
} |
||||
} catch (error) { |
||||
const errorMessage = |
||||
error instanceof Error ? error.message : "Unknown error"; |
||||
onError?.(errorMessage); |
||||
return { success: false, error: errorMessage }; |
||||
} |
||||
} |
||||
|
||||
function generateDTag(title: string): string { |
||||
return title |
||||
.toLowerCase() |
||||
.replace(/[^\w\s-]/g, "") |
||||
.replace(/\s+/g, "-"); |
||||
} |
||||
@ -0,0 +1,55 @@
@@ -0,0 +1,55 @@
|
||||
import { writable, type Writable } from 'svelte/store'; |
||||
import { detectNetworkCondition, NetworkCondition, startNetworkMonitoring } from '$lib/utils/network_detection'; |
||||
|
||||
// Network status store
|
||||
export const networkCondition = writable<NetworkCondition>(NetworkCondition.ONLINE); |
||||
export const isNetworkChecking = writable<boolean>(false); |
||||
|
||||
// Network monitoring state
|
||||
let stopNetworkMonitoring: (() => void) | null = null; |
||||
|
||||
/** |
||||
* Starts network monitoring if not already running |
||||
*/ |
||||
export function startNetworkStatusMonitoring(): void { |
||||
if (stopNetworkMonitoring) { |
||||
return; // Already monitoring
|
||||
} |
||||
|
||||
console.debug('[networkStore.ts] Starting network status monitoring'); |
||||
|
||||
stopNetworkMonitoring = startNetworkMonitoring( |
||||
(condition: NetworkCondition) => { |
||||
console.debug(`[networkStore.ts] Network condition changed to: ${condition}`); |
||||
networkCondition.set(condition); |
||||
}, |
||||
60000 // Check every 60 seconds to reduce spam
|
||||
); |
||||
} |
||||
|
||||
/** |
||||
* Stops network monitoring |
||||
*/ |
||||
export function stopNetworkStatusMonitoring(): void { |
||||
if (stopNetworkMonitoring) { |
||||
console.debug('[networkStore.ts] Stopping network status monitoring'); |
||||
stopNetworkMonitoring(); |
||||
stopNetworkMonitoring = null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Manually check network status (for immediate updates) |
||||
*/ |
||||
export async function checkNetworkStatus(): Promise<void> { |
||||
try { |
||||
isNetworkChecking.set(true); |
||||
const condition = await detectNetworkCondition(); |
||||
networkCondition.set(condition); |
||||
} catch (error) { |
||||
console.warn('[networkStore.ts] Failed to check network status:', error); |
||||
networkCondition.set(NetworkCondition.OFFLINE); |
||||
} finally { |
||||
isNetworkChecking.set(false); |
||||
} |
||||
}
|
||||
@ -1,4 +0,0 @@
@@ -1,4 +0,0 @@
|
||||
import { writable } from "svelte/store"; |
||||
|
||||
// Initialize with empty array, will be populated from user preferences
|
||||
export const userRelays = writable<string[]>([]); |
||||
@ -0,0 +1,436 @@
@@ -0,0 +1,436 @@
|
||||
import { writable, get } from "svelte/store"; |
||||
import type { NostrProfile } from "$lib/utils/nostrUtils"; |
||||
import type { NDKUser, NDKSigner } from "@nostr-dev-kit/ndk"; |
||||
import { |
||||
NDKNip07Signer, |
||||
NDKRelayAuthPolicies, |
||||
NDKRelaySet, |
||||
NDKRelay, |
||||
} from "@nostr-dev-kit/ndk"; |
||||
import { getUserMetadata } from "$lib/utils/nostrUtils"; |
||||
import { ndkInstance, activeInboxRelays, activeOutboxRelays, updateActiveRelayStores } from "$lib/ndk"; |
||||
import { loginStorageKey } from "$lib/consts"; |
||||
import { nip19 } from "nostr-tools"; |
||||
import { userPubkey } from "$lib/stores/authStore.Svelte"; |
||||
|
||||
export interface UserState { |
||||
pubkey: string | null; |
||||
npub: string | null; |
||||
profile: NostrProfile | null; |
||||
relays: { inbox: string[]; outbox: string[] }; |
||||
loginMethod: "extension" | "amber" | "npub" | null; |
||||
ndkUser: NDKUser | null; |
||||
signer: NDKSigner | null; |
||||
signedIn: boolean; |
||||
} |
||||
|
||||
export const userStore = writable<UserState>({ |
||||
pubkey: null, |
||||
npub: null, |
||||
profile: null, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: null, |
||||
ndkUser: null, |
||||
signer: null, |
||||
signedIn: false, |
||||
}); |
||||
|
||||
// Helper functions for relay management
|
||||
function getRelayStorageKey(user: NDKUser, type: "inbox" | "outbox"): string { |
||||
return `${loginStorageKey}/${user.pubkey}/${type}`; |
||||
} |
||||
|
||||
function persistRelays( |
||||
user: NDKUser, |
||||
inboxes: Set<NDKRelay>, |
||||
outboxes: Set<NDKRelay>, |
||||
): void { |
||||
localStorage.setItem( |
||||
getRelayStorageKey(user, "inbox"), |
||||
JSON.stringify(Array.from(inboxes).map((relay) => relay.url)), |
||||
); |
||||
localStorage.setItem( |
||||
getRelayStorageKey(user, "outbox"), |
||||
JSON.stringify(Array.from(outboxes).map((relay) => relay.url)), |
||||
); |
||||
} |
||||
|
||||
function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] { |
||||
const inboxes = new Set<string>( |
||||
JSON.parse(localStorage.getItem(getRelayStorageKey(user, "inbox")) ?? "[]"), |
||||
); |
||||
const outboxes = new Set<string>( |
||||
JSON.parse( |
||||
localStorage.getItem(getRelayStorageKey(user, "outbox")) ?? "[]", |
||||
), |
||||
); |
||||
|
||||
return [inboxes, outboxes]; |
||||
} |
||||
|
||||
async function getUserPreferredRelays( |
||||
ndk: any, |
||||
user: NDKUser, |
||||
fallbacks: readonly string[] = [...get(activeInboxRelays), ...get(activeOutboxRelays)], |
||||
): Promise<[Set<NDKRelay>, Set<NDKRelay>]> { |
||||
const relayList = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10002], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
}, |
||||
NDKRelaySet.fromRelayUrls(fallbacks, ndk), |
||||
); |
||||
|
||||
const inboxRelays = new Set<NDKRelay>(); |
||||
const outboxRelays = new Set<NDKRelay>(); |
||||
|
||||
if (relayList == null) { |
||||
const relayMap = await window.nostr?.getRelays?.(); |
||||
Object.entries(relayMap ?? {}).forEach( |
||||
([url, relayType]: [string, any]) => { |
||||
const relay = new NDKRelay( |
||||
url, |
||||
NDKRelayAuthPolicies.signIn({ ndk }), |
||||
ndk, |
||||
); |
||||
if (relayType.read) inboxRelays.add(relay); |
||||
if (relayType.write) outboxRelays.add(relay); |
||||
}, |
||||
); |
||||
} else { |
||||
relayList.tags.forEach((tag: string[]) => { |
||||
switch (tag[0]) { |
||||
case "r": |
||||
inboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
case "w": |
||||
outboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
default: |
||||
inboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
outboxRelays.add( |
||||
new NDKRelay(tag[1], NDKRelayAuthPolicies.signIn({ ndk }), ndk), |
||||
); |
||||
break; |
||||
} |
||||
}); |
||||
} |
||||
|
||||
return [inboxRelays, outboxRelays]; |
||||
} |
||||
|
||||
// --- Unified login/logout helpers ---
|
||||
|
||||
export const loginMethodStorageKey = "alexandria/login/method"; |
||||
|
||||
function persistLogin(user: NDKUser, method: "extension" | "amber" | "npub") { |
||||
localStorage.setItem(loginStorageKey, user.pubkey); |
||||
localStorage.setItem(loginMethodStorageKey, method); |
||||
} |
||||
|
||||
function getPersistedLoginMethod(): "extension" | "amber" | "npub" | null { |
||||
return ( |
||||
(localStorage.getItem(loginMethodStorageKey) as |
||||
| "extension" |
||||
| "amber" |
||||
| "npub") ?? null |
||||
); |
||||
} |
||||
|
||||
function clearLogin() { |
||||
localStorage.removeItem(loginStorageKey); |
||||
localStorage.removeItem(loginMethodStorageKey); |
||||
} |
||||
|
||||
/** |
||||
* Login with NIP-07 browser extension |
||||
*/ |
||||
export async function loginWithExtension() { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
const signer = new NDKNip07Signer(); |
||||
const user = await signer.user(); |
||||
const npub = user.npub; |
||||
|
||||
console.log("Login with extension - fetching profile for npub:", npub); |
||||
|
||||
// Try to fetch user metadata, but don't fail if it times out
|
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
console.log("Login with extension - attempting to fetch profile..."); |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with extension - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with extension - using fallback profile:", profile); |
||||
} |
||||
|
||||
// Fetch user's preferred relays
|
||||
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); |
||||
for (const relay of persistedInboxes) { |
||||
ndk.addExplicitRelay(relay); |
||||
} |
||||
const [inboxes, outboxes] = await getUserPreferredRelays(ndk, user); |
||||
persistRelays(user, inboxes, outboxes); |
||||
ndk.signer = signer; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { |
||||
inbox: Array.from(inboxes ?? persistedInboxes).map((relay) => relay.url), |
||||
outbox: Array.from(outboxes ?? persistedOutboxes).map( |
||||
(relay) => relay.url, |
||||
), |
||||
}, |
||||
loginMethod: "extension" as const, |
||||
ndkUser: user, |
||||
signer, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with extension - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithExtension: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithExtension: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "extension"); |
||||
} |
||||
|
||||
/** |
||||
* Login with Amber (NIP-46) |
||||
*/ |
||||
export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
const npub = user.npub; |
||||
|
||||
console.log("Login with Amber - fetching profile for npub:", npub); |
||||
|
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with Amber - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during Amber login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with Amber - using fallback profile:", profile); |
||||
} |
||||
|
||||
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); |
||||
for (const relay of persistedInboxes) { |
||||
ndk.addExplicitRelay(relay); |
||||
} |
||||
const [inboxes, outboxes] = await getUserPreferredRelays(ndk, user); |
||||
persistRelays(user, inboxes, outboxes); |
||||
ndk.signer = amberSigner; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { |
||||
inbox: Array.from(inboxes ?? persistedInboxes).map((relay) => relay.url), |
||||
outbox: Array.from(outboxes ?? persistedOutboxes).map( |
||||
(relay) => relay.url, |
||||
), |
||||
}, |
||||
loginMethod: "amber" as const, |
||||
ndkUser: user, |
||||
signer: amberSigner, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with Amber - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithAmber: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithAmber: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "amber"); |
||||
} |
||||
|
||||
/** |
||||
* Login with npub (read-only) |
||||
*/ |
||||
export async function loginWithNpub(pubkeyOrNpub: string) { |
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) throw new Error("NDK not initialized"); |
||||
// Only clear previous login state after successful login
|
||||
let hexPubkey: string; |
||||
if (pubkeyOrNpub.startsWith("npub")) { |
||||
try { |
||||
hexPubkey = nip19.decode(pubkeyOrNpub).data as string; |
||||
} catch (e) { |
||||
console.error("Failed to decode hex pubkey from npub:", pubkeyOrNpub, e); |
||||
throw e; |
||||
} |
||||
} else { |
||||
hexPubkey = pubkeyOrNpub; |
||||
} |
||||
let npub: string; |
||||
try { |
||||
npub = nip19.npubEncode(hexPubkey); |
||||
} catch (e) { |
||||
console.error("Failed to encode npub from hex pubkey:", hexPubkey, e); |
||||
throw e; |
||||
} |
||||
|
||||
console.log("Login with npub - fetching profile for npub:", npub); |
||||
|
||||
const user = ndk.getUser({ npub }); |
||||
let profile: NostrProfile | null = null; |
||||
try { |
||||
profile = await getUserMetadata(npub, true); // Force fresh fetch
|
||||
console.log("Login with npub - fetched profile:", profile); |
||||
} catch (error) { |
||||
console.warn("Failed to fetch user metadata during npub login:", error); |
||||
// Continue with login even if metadata fetch fails
|
||||
profile = { |
||||
name: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
displayName: npub.slice(0, 8) + "..." + npub.slice(-4), |
||||
}; |
||||
console.log("Login with npub - using fallback profile:", profile); |
||||
} |
||||
|
||||
ndk.signer = undefined; |
||||
ndk.activeUser = user; |
||||
|
||||
const userState = { |
||||
pubkey: user.pubkey, |
||||
npub, |
||||
profile, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: "npub" as const, |
||||
ndkUser: user, |
||||
signer: null, |
||||
signedIn: true, |
||||
}; |
||||
|
||||
console.log("Login with npub - setting userStore with:", userState); |
||||
userStore.set(userState); |
||||
userPubkey.set(user.pubkey); |
||||
|
||||
// Update relay stores with the new user's relays
|
||||
try { |
||||
console.debug('[userStore.ts] loginWithNpub: Updating relay stores for authenticated user'); |
||||
await updateActiveRelayStores(ndk); |
||||
} catch (error) { |
||||
console.warn('[userStore.ts] loginWithNpub: Failed to update relay stores:', error); |
||||
} |
||||
|
||||
clearLogin(); |
||||
localStorage.removeItem("alexandria/logout/flag"); |
||||
persistLogin(user, "npub"); |
||||
} |
||||
|
||||
/** |
||||
* Logout and clear all user state |
||||
*/ |
||||
export function logoutUser() { |
||||
console.log("Logging out user..."); |
||||
const currentUser = get(userStore); |
||||
if (currentUser.ndkUser) { |
||||
// Clear persisted relays for the user
|
||||
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "inbox")); |
||||
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "outbox")); |
||||
} |
||||
|
||||
// Clear all possible login states from localStorage
|
||||
clearLogin(); |
||||
|
||||
// Also clear any other potential login keys that might exist
|
||||
const keysToRemove = []; |
||||
for (let i = 0; i < localStorage.length; i++) { |
||||
const key = localStorage.key(i); |
||||
if ( |
||||
key && |
||||
(key.includes("login") || |
||||
key.includes("nostr") || |
||||
key.includes("user") || |
||||
key.includes("alexandria") || |
||||
key === "pubkey") |
||||
) { |
||||
keysToRemove.push(key); |
||||
} |
||||
} |
||||
|
||||
// Specifically target the login storage key
|
||||
keysToRemove.push("alexandria/login/pubkey"); |
||||
keysToRemove.push("alexandria/login/method"); |
||||
|
||||
keysToRemove.forEach((key) => { |
||||
console.log("Removing localStorage key:", key); |
||||
localStorage.removeItem(key); |
||||
}); |
||||
|
||||
// Clear Amber-specific flags
|
||||
localStorage.removeItem("alexandria/amber/fallback"); |
||||
|
||||
// Set a flag to prevent auto-login on next page load
|
||||
localStorage.setItem("alexandria/logout/flag", "true"); |
||||
|
||||
console.log("Cleared all login data from localStorage"); |
||||
|
||||
userStore.set({ |
||||
pubkey: null, |
||||
npub: null, |
||||
profile: null, |
||||
relays: { inbox: [], outbox: [] }, |
||||
loginMethod: null, |
||||
ndkUser: null, |
||||
signer: null, |
||||
signedIn: false, |
||||
}); |
||||
userPubkey.set(null); |
||||
|
||||
const ndk = get(ndkInstance); |
||||
if (ndk) { |
||||
ndk.activeUser = undefined; |
||||
ndk.signer = undefined; |
||||
} |
||||
|
||||
console.log("Logout complete"); |
||||
} |
||||
@ -0,0 +1,116 @@
@@ -0,0 +1,116 @@
|
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { signEvent, getEventHash } from "$lib/utils/nostrUtils"; |
||||
import { getMimeTags } from "$lib/utils/mime"; |
||||
import { communityRelays } from "$lib/consts"; |
||||
import { nip19 } from "nostr-tools"; |
||||
|
||||
export interface ZettelSection { |
||||
title: string; |
||||
content: string; |
||||
tags?: string[][]; |
||||
} |
||||
|
||||
/** |
||||
* Splits AsciiDoc content into sections at the specified heading level. |
||||
* Each section starts with the heading and includes all lines up to the next heading of the same level. |
||||
* @param content The AsciiDoc string. |
||||
* @param level The heading level (2 for '==', 3 for '===', etc.). |
||||
* @returns Array of section strings, each starting with the heading. |
||||
*/ |
||||
export function splitAsciiDocByHeadingLevel( |
||||
content: string, |
||||
level: number, |
||||
): string[] { |
||||
if (level < 1 || level > 6) throw new Error("Heading level must be 1-6"); |
||||
const heading = "^" + "=".repeat(level) + " "; |
||||
const regex = new RegExp(`(?=${heading})`, "gm"); |
||||
return content |
||||
.split(regex) |
||||
.map((section) => section.trim()) |
||||
.filter((section) => section.length > 0); |
||||
} |
||||
|
||||
/** |
||||
* Parses a single AsciiDoc section string into a ZettelSection object. |
||||
* @param section The section string (must start with heading). |
||||
*/ |
||||
export function parseZettelSection(section: string): ZettelSection { |
||||
const lines = section.split("\n"); |
||||
let title = "Untitled"; |
||||
let contentLines: string[] = []; |
||||
let inHeader = true; |
||||
let tags: string[][] = []; |
||||
tags = extractTags(section); |
||||
|
||||
for (const line of lines) { |
||||
const trimmed = line.trim(); |
||||
if (inHeader && trimmed.startsWith("==")) { |
||||
title = trimmed.replace(/^==+/, "").trim(); |
||||
continue; |
||||
} else if (inHeader && trimmed.startsWith(":")) { |
||||
continue; |
||||
} |
||||
|
||||
inHeader = false; |
||||
contentLines.push(line); |
||||
} |
||||
|
||||
return { |
||||
title, |
||||
content: contentLines.join("\n").trim(), |
||||
tags, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Parses AsciiDoc into an array of ZettelSection objects at the given heading level. |
||||
*/ |
||||
export function parseAsciiDocSections( |
||||
content: string, |
||||
level: number, |
||||
): ZettelSection[] { |
||||
return splitAsciiDocByHeadingLevel(content, level).map(parseZettelSection); |
||||
} |
||||
|
||||
/** |
||||
* Extracts tag names and values from the content. |
||||
* :tagname: tagvalue // tags are optional
|
||||
* @param content The AsciiDoc string. |
||||
* @returns Array of tags. |
||||
*/ |
||||
export function extractTags(content: string): string[][] { |
||||
const tags: string[][] = []; |
||||
const lines = content.split("\n"); |
||||
|
||||
for (const line of lines) { |
||||
const trimmed = line.trim(); |
||||
if (trimmed.startsWith(":")) { |
||||
// Parse AsciiDoc attribute format: :tagname: value
|
||||
const match = trimmed.match(/^:([^:]+):\s*(.*)$/); |
||||
if (match) { |
||||
const tagName = match[1].trim(); |
||||
const tagValue = match[2].trim(); |
||||
|
||||
// Special handling for tags attribute
|
||||
if (tagName === "tags") { |
||||
// Split comma-separated values and create individual "t" tags
|
||||
const tagValues = tagValue |
||||
.split(",") |
||||
.map((v) => v.trim()) |
||||
.filter((v) => v.length > 0); |
||||
for (const value of tagValues) { |
||||
tags.push(["t", value]); |
||||
} |
||||
} else { |
||||
// Regular attribute becomes a tag
|
||||
tags.push([tagName, tagValue]); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
console.log("Extracted tags:", tags); |
||||
return tags; |
||||
} |
||||
// You can add publishing logic here as needed, e.g.,
|
||||
// export async function publishZettelSection(...) { ... }
|
||||
@ -0,0 +1,31 @@
@@ -0,0 +1,31 @@
|
||||
/** |
||||
* Generate a dark-pastel color based on a string (like an event ID) |
||||
* @param seed - The string to generate a color from |
||||
* @returns A dark-pastel hex color |
||||
*/ |
||||
export function generateDarkPastelColor(seed: string): string { |
||||
// Create a simple hash from the seed string
|
||||
let hash = 0; |
||||
for (let i = 0; i < seed.length; i++) { |
||||
const char = seed.charCodeAt(i); |
||||
hash = ((hash << 5) - hash) + char; |
||||
hash = hash & hash; // Convert to 32-bit integer
|
||||
} |
||||
|
||||
// Use the hash to generate lighter pastel colors
|
||||
// Keep values in the 120-200 range for better pastel effect
|
||||
const r = Math.abs(hash) % 80 + 120; // 120-200 range
|
||||
const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range
|
||||
const b = Math.abs(hash >> 16) % 80 + 120; // 120-200 range
|
||||
|
||||
return `#${r.toString(16).padStart(2, '0')}${g.toString(16).padStart(2, '0')}${b.toString(16).padStart(2, '0')}`; |
||||
} |
||||
|
||||
/** |
||||
* Test function to verify color generation |
||||
* @param eventId - The event ID to test |
||||
* @returns The generated color |
||||
*/ |
||||
export function testColorGeneration(eventId: string): string { |
||||
return generateDarkPastelColor(eventId); |
||||
}
|
||||
@ -0,0 +1,189 @@
@@ -0,0 +1,189 @@
|
||||
import { deduplicateRelayUrls } from './relay_management'; |
||||
|
||||
/** |
||||
* Network conditions for relay selection |
||||
*/ |
||||
export enum NetworkCondition { |
||||
ONLINE = 'online', |
||||
SLOW = 'slow', |
||||
OFFLINE = 'offline' |
||||
} |
||||
|
||||
/** |
||||
* Network connectivity test endpoints |
||||
*/ |
||||
const NETWORK_ENDPOINTS = [ |
||||
'https://www.google.com/favicon.ico', |
||||
'https://httpbin.org/status/200', |
||||
'https://api.github.com/zen' |
||||
]; |
||||
|
||||
/** |
||||
* Detects if the network is online using more reliable endpoints |
||||
* @returns Promise that resolves to true if online, false otherwise |
||||
*/ |
||||
export async function isNetworkOnline(): Promise<boolean> { |
||||
for (const endpoint of NETWORK_ENDPOINTS) { |
||||
try { |
||||
// Use a simple fetch without HEAD method to avoid CORS issues
|
||||
const response = await fetch(endpoint, { |
||||
method: 'GET', |
||||
cache: 'no-cache', |
||||
signal: AbortSignal.timeout(3000), |
||||
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
|
||||
}); |
||||
// With no-cors mode, we can't check response.ok, so we assume success if no error
|
||||
return true; |
||||
} catch (error) { |
||||
console.debug(`[network_detection.ts] Failed to reach ${endpoint}:`, error); |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
console.debug('[network_detection.ts] All network endpoints failed'); |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* Tests network speed by measuring response time |
||||
* @returns Promise that resolves to network speed in milliseconds |
||||
*/ |
||||
export async function testNetworkSpeed(): Promise<number> { |
||||
const startTime = performance.now(); |
||||
|
||||
for (const endpoint of NETWORK_ENDPOINTS) { |
||||
try { |
||||
await fetch(endpoint, { |
||||
method: 'GET', |
||||
cache: 'no-cache', |
||||
signal: AbortSignal.timeout(5000), |
||||
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
|
||||
}); |
||||
|
||||
const endTime = performance.now(); |
||||
return endTime - startTime; |
||||
} catch (error) { |
||||
console.debug(`[network_detection.ts] Speed test failed for ${endpoint}:`, error); |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
console.debug('[network_detection.ts] Network speed test failed for all endpoints'); |
||||
return Infinity; // Very slow if it fails
|
||||
} |
||||
|
||||
/** |
||||
* Determines network condition based on connectivity and speed |
||||
* @returns Promise that resolves to NetworkCondition |
||||
*/ |
||||
export async function detectNetworkCondition(): Promise<NetworkCondition> { |
||||
const isOnline = await isNetworkOnline(); |
||||
|
||||
if (!isOnline) { |
||||
console.debug('[network_detection.ts] Network condition: OFFLINE'); |
||||
return NetworkCondition.OFFLINE; |
||||
} |
||||
|
||||
const speed = await testNetworkSpeed(); |
||||
|
||||
// Consider network slow if response time > 2000ms
|
||||
if (speed > 2000) { |
||||
console.debug(`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`); |
||||
return NetworkCondition.SLOW; |
||||
} |
||||
|
||||
console.debug(`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`); |
||||
return NetworkCondition.ONLINE; |
||||
} |
||||
|
||||
/** |
||||
* Gets the appropriate relay sets based on network condition |
||||
* @param networkCondition The detected network condition |
||||
* @param discoveredLocalRelays Array of discovered local relay URLs |
||||
* @param lowbandwidthRelays Array of low bandwidth relay URLs |
||||
* @param fullRelaySet The complete relay set for normal conditions |
||||
* @returns Object with inbox and outbox relay arrays |
||||
*/ |
||||
export function getRelaySetForNetworkCondition( |
||||
networkCondition: NetworkCondition, |
||||
discoveredLocalRelays: string[], |
||||
lowbandwidthRelays: string[], |
||||
fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] } |
||||
): { inboxRelays: string[]; outboxRelays: string[] } { |
||||
switch (networkCondition) { |
||||
case NetworkCondition.OFFLINE: |
||||
// When offline, use local relays if available, otherwise rely on cache
|
||||
// This will be improved when IndexedDB local relay is implemented
|
||||
if (discoveredLocalRelays.length > 0) { |
||||
console.debug('[network_detection.ts] Using local relays (offline)'); |
||||
return { |
||||
inboxRelays: discoveredLocalRelays, |
||||
outboxRelays: discoveredLocalRelays |
||||
}; |
||||
} else { |
||||
console.debug('[network_detection.ts] No local relays available, will rely on cache (offline)'); |
||||
return { |
||||
inboxRelays: [], |
||||
outboxRelays: [] |
||||
}; |
||||
} |
||||
|
||||
case NetworkCondition.SLOW: |
||||
// Local relays + low bandwidth relays when slow (deduplicated)
|
||||
console.debug('[network_detection.ts] Using local + low bandwidth relays (slow network)'); |
||||
const slowInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); |
||||
const slowOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); |
||||
return { |
||||
inboxRelays: slowInboxRelays, |
||||
outboxRelays: slowOutboxRelays |
||||
}; |
||||
|
||||
case NetworkCondition.ONLINE: |
||||
default: |
||||
// Full relay set when online
|
||||
console.debug('[network_detection.ts] Using full relay set (online)'); |
||||
return fullRelaySet; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Starts periodic network monitoring with reduced frequency to avoid spam |
||||
* @param onNetworkChange Callback function called when network condition changes |
||||
* @param checkInterval Interval in milliseconds between network checks (default: 60 seconds) |
||||
* @returns Function to stop the monitoring |
||||
*/ |
||||
export function startNetworkMonitoring( |
||||
onNetworkChange: (condition: NetworkCondition) => void, |
||||
checkInterval: number = 60000 // Increased to 60 seconds to reduce spam
|
||||
): () => void { |
||||
let lastCondition: NetworkCondition | null = null; |
||||
let intervalId: number | null = null; |
||||
|
||||
const checkNetwork = async () => { |
||||
try { |
||||
const currentCondition = await detectNetworkCondition(); |
||||
|
||||
if (currentCondition !== lastCondition) { |
||||
console.debug(`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`); |
||||
lastCondition = currentCondition; |
||||
onNetworkChange(currentCondition); |
||||
} |
||||
} catch (error) { |
||||
console.warn('[network_detection.ts] Network monitoring error:', error); |
||||
} |
||||
}; |
||||
|
||||
// Initial check
|
||||
checkNetwork(); |
||||
|
||||
// Set up periodic monitoring
|
||||
intervalId = window.setInterval(checkNetwork, checkInterval); |
||||
|
||||
// Return function to stop monitoring
|
||||
return () => { |
||||
if (intervalId !== null) { |
||||
clearInterval(intervalId); |
||||
intervalId = null; |
||||
} |
||||
}; |
||||
}
|
||||
@ -1,233 +1,397 @@
@@ -1,233 +1,397 @@
|
||||
import { ndkInstance } from '$lib/ndk'; |
||||
import { getUserMetadata, getNpubFromNip05 } from '$lib/utils/nostrUtils'; |
||||
import { NDKRelaySet, NDKEvent } from '@nostr-dev-kit/ndk'; |
||||
import { searchCache } from '$lib/utils/searchCache'; |
||||
import { communityRelay, profileRelay } from '$lib/consts'; |
||||
import { get } from 'svelte/store'; |
||||
import type { NostrProfile, ProfileSearchResult } from './search_types'; |
||||
import { fieldMatches, nip05Matches, normalizeSearchTerm, COMMON_DOMAINS, createProfileFromEvent } from './search_utils'; |
||||
import { checkCommunityStatus } from './community_checker'; |
||||
import { TIMEOUTS } from './search_constants'; |
||||
import { ndkInstance } from "$lib/ndk"; |
||||
import { getUserMetadata, getNpubFromNip05 } from "$lib/utils/nostrUtils"; |
||||
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; |
||||
import { searchCache } from "$lib/utils/searchCache"; |
||||
import { communityRelays, secondaryRelays } from "$lib/consts"; |
||||
import { get } from "svelte/store"; |
||||
import type { NostrProfile, ProfileSearchResult } from "./search_types"; |
||||
import { |
||||
fieldMatches, |
||||
nip05Matches, |
||||
normalizeSearchTerm, |
||||
COMMON_DOMAINS, |
||||
createProfileFromEvent, |
||||
} from "./search_utils"; |
||||
import { checkCommunityStatus } from "./community_checker"; |
||||
import { TIMEOUTS } from "./search_constants"; |
||||
|
||||
/** |
||||
* Search for profiles by various criteria (display name, name, NIP-05, npub) |
||||
*/ |
||||
export async function searchProfiles(searchTerm: string): Promise<ProfileSearchResult> { |
||||
const normalizedSearchTerm = searchTerm.toLowerCase().trim(); |
||||
|
||||
export async function searchProfiles( |
||||
searchTerm: string, |
||||
): Promise<ProfileSearchResult> { |
||||
const normalizedSearchTerm = normalizeSearchTerm(searchTerm); |
||||
|
||||
console.log( |
||||
"searchProfiles called with:", |
||||
searchTerm, |
||||
"normalized:", |
||||
normalizedSearchTerm, |
||||
); |
||||
|
||||
// Check cache first
|
||||
const cachedResult = searchCache.get('profile', normalizedSearchTerm); |
||||
const cachedResult = searchCache.get("profile", normalizedSearchTerm); |
||||
if (cachedResult) { |
||||
const profiles = cachedResult.events.map(event => { |
||||
try { |
||||
const profileData = JSON.parse(event.content); |
||||
return createProfileFromEvent(event, profileData); |
||||
} catch { |
||||
return null; |
||||
} |
||||
}).filter(Boolean) as NostrProfile[]; |
||||
|
||||
const communityStatus = await checkCommunityStatus(profiles); |
||||
return { profiles, Status: communityStatus }; |
||||
console.log("Found cached result for:", normalizedSearchTerm); |
||||
const profiles = cachedResult.events |
||||
.map((event) => { |
||||
try { |
||||
const profileData = JSON.parse(event.content); |
||||
return createProfileFromEvent(event, profileData); |
||||
} catch { |
||||
return null; |
||||
} |
||||
}) |
||||
.filter(Boolean) as NostrProfile[]; |
||||
|
||||
console.log("Cached profiles found:", profiles.length); |
||||
return { profiles, Status: {} }; |
||||
} |
||||
|
||||
const ndk = get(ndkInstance); |
||||
if (!ndk) { |
||||
throw new Error('NDK not initialized'); |
||||
console.error("NDK not initialized"); |
||||
throw new Error("NDK not initialized"); |
||||
} |
||||
|
||||
let foundProfiles: NostrProfile[] = []; |
||||
let timeoutId: ReturnType<typeof setTimeout> | null = null; |
||||
console.log("NDK initialized, starting search logic"); |
||||
|
||||
// Set a timeout to force completion after profile search timeout
|
||||
timeoutId = setTimeout(() => { |
||||
if (foundProfiles.length === 0) { |
||||
// Timeout reached, but no need to log this
|
||||
} |
||||
}, TIMEOUTS.PROFILE_SEARCH); |
||||
let foundProfiles: NostrProfile[] = []; |
||||
|
||||
try { |
||||
// Check if it's a valid npub/nprofile first
|
||||
if (normalizedSearchTerm.startsWith('npub') || normalizedSearchTerm.startsWith('nprofile')) { |
||||
if ( |
||||
normalizedSearchTerm.startsWith("npub") || |
||||
normalizedSearchTerm.startsWith("nprofile") |
||||
) { |
||||
try { |
||||
const metadata = await getUserMetadata(normalizedSearchTerm); |
||||
if (metadata) { |
||||
foundProfiles = [metadata]; |
||||
} |
||||
} catch (error) { |
||||
console.error('Error fetching metadata for npub:', error); |
||||
console.error("Error fetching metadata for npub:", error); |
||||
} |
||||
} else if (normalizedSearchTerm.includes('@')) { |
||||
// Check if it's a NIP-05 address
|
||||
} else if (normalizedSearchTerm.includes("@")) { |
||||
// Check if it's a NIP-05 address - normalize it properly
|
||||
const normalizedNip05 = normalizedSearchTerm.toLowerCase(); |
||||
try { |
||||
const npub = await getNpubFromNip05(normalizedSearchTerm); |
||||
const npub = await getNpubFromNip05(normalizedNip05); |
||||
if (npub) { |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub |
||||
pubkey: npub, |
||||
}; |
||||
foundProfiles = [profile]; |
||||
} |
||||
} catch (e) { |
||||
console.error('[Search] NIP-05 lookup failed:', e); |
||||
// If NIP-05 lookup fails, continue with regular search
|
||||
console.error("[Search] NIP-05 lookup failed:", e); |
||||
} |
||||
} else { |
||||
// Try searching for NIP-05 addresses that match the search term
|
||||
// Try NIP-05 search first (faster than relay search)
|
||||
console.log("Starting NIP-05 search for:", normalizedSearchTerm); |
||||
foundProfiles = await searchNip05Domains(normalizedSearchTerm, ndk); |
||||
console.log( |
||||
"NIP-05 search completed, found:", |
||||
foundProfiles.length, |
||||
"profiles", |
||||
); |
||||
|
||||
// If no NIP-05 results found, search for profiles across relays
|
||||
// If no NIP-05 results, try quick relay search
|
||||
if (foundProfiles.length === 0) { |
||||
foundProfiles = await searchProfilesAcrossRelays(normalizedSearchTerm, ndk); |
||||
console.log("No NIP-05 results, trying quick relay search"); |
||||
foundProfiles = await quickRelaySearch(normalizedSearchTerm, ndk); |
||||
console.log( |
||||
"Quick relay search completed, found:", |
||||
foundProfiles.length, |
||||
"profiles", |
||||
); |
||||
} |
||||
} |
||||
|
||||
// Wait for search to complete or timeout
|
||||
await new Promise<void>((resolve) => { |
||||
const checkComplete = () => { |
||||
if (timeoutId === null || foundProfiles.length > 0) { |
||||
resolve(); |
||||
} else { |
||||
setTimeout(checkComplete, 100); |
||||
} |
||||
}; |
||||
checkComplete(); |
||||
}); |
||||
|
||||
// Cache the results
|
||||
if (foundProfiles.length > 0) { |
||||
const events = foundProfiles.map(profile => { |
||||
const events = foundProfiles.map((profile) => { |
||||
const event = new NDKEvent(ndk); |
||||
event.content = JSON.stringify(profile); |
||||
event.pubkey = profile.pubkey || ''; |
||||
event.pubkey = profile.pubkey || ""; |
||||
return event; |
||||
}); |
||||
|
||||
|
||||
const result = { |
||||
events, |
||||
secondOrder: [], |
||||
tTagEvents: [], |
||||
eventIds: new Set<string>(), |
||||
addresses: new Set<string>(), |
||||
searchType: 'profile', |
||||
searchTerm: normalizedSearchTerm |
||||
searchType: "profile", |
||||
searchTerm: normalizedSearchTerm, |
||||
}; |
||||
searchCache.set('profile', normalizedSearchTerm, result); |
||||
searchCache.set("profile", normalizedSearchTerm, result); |
||||
} |
||||
|
||||
// Check community status for all profiles
|
||||
const communityStatus = await checkCommunityStatus(foundProfiles); |
||||
return { profiles: foundProfiles, Status: communityStatus }; |
||||
|
||||
console.log("Search completed, found profiles:", foundProfiles.length); |
||||
return { profiles: foundProfiles, Status: {} }; |
||||
} catch (error) { |
||||
console.error('Error searching profiles:', error); |
||||
console.error("Error searching profiles:", error); |
||||
return { profiles: [], Status: {} }; |
||||
} finally { |
||||
if (timeoutId) { |
||||
clearTimeout(timeoutId); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Search for NIP-05 addresses across common domains |
||||
*/ |
||||
async function searchNip05Domains(searchTerm: string, ndk: any): Promise<NostrProfile[]> { |
||||
async function searchNip05Domains( |
||||
searchTerm: string, |
||||
ndk: any, |
||||
): Promise<NostrProfile[]> { |
||||
const foundProfiles: NostrProfile[] = []; |
||||
|
||||
// Enhanced list of common domains for NIP-05 lookups
|
||||
// Prioritize gitcitadel.com since we know it has profiles
|
||||
const commonDomains = [ |
||||
"gitcitadel.com", // Prioritize this domain
|
||||
"theforest.nostr1.com", |
||||
"nostr1.com", |
||||
"nostr.land", |
||||
"sovbit.host", |
||||
"damus.io", |
||||
"snort.social", |
||||
"iris.to", |
||||
"coracle.social", |
||||
"nostr.band", |
||||
"nostr.wine", |
||||
"purplepag.es", |
||||
"relay.noswhere.com", |
||||
"aggr.nostr.land", |
||||
"nostr.sovbit.host", |
||||
"freelay.sovbit.host", |
||||
"nostr21.com", |
||||
"greensoul.space", |
||||
"relay.damus.io", |
||||
"relay.nostr.band", |
||||
]; |
||||
|
||||
// Normalize the search term for NIP-05 lookup
|
||||
const normalizedSearchTerm = searchTerm.toLowerCase().trim(); |
||||
console.log("NIP-05 search: normalized search term:", normalizedSearchTerm); |
||||
|
||||
// Try gitcitadel.com first with extra debugging
|
||||
const gitcitadelAddress = `${normalizedSearchTerm}@gitcitadel.com`; |
||||
console.log("NIP-05 search: trying gitcitadel.com first:", gitcitadelAddress); |
||||
try { |
||||
for (const domain of COMMON_DOMAINS) { |
||||
const nip05Address = `${searchTerm}@${domain}`; |
||||
try { |
||||
const npub = await getNpubFromNip05(nip05Address); |
||||
if (npub) { |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub |
||||
}; |
||||
return [profile]; |
||||
} |
||||
} catch (e) { |
||||
// Continue to next domain
|
||||
} |
||||
const npub = await getNpubFromNip05(gitcitadelAddress); |
||||
if (npub) { |
||||
console.log( |
||||
"NIP-05 search: SUCCESS! found npub for gitcitadel.com:", |
||||
npub, |
||||
); |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub, |
||||
}; |
||||
console.log( |
||||
"NIP-05 search: created profile for gitcitadel.com:", |
||||
profile, |
||||
); |
||||
foundProfiles.push(profile); |
||||
return foundProfiles; // Return immediately if we found it on gitcitadel.com
|
||||
} else { |
||||
console.log("NIP-05 search: no npub found for gitcitadel.com"); |
||||
} |
||||
} catch (e) { |
||||
console.error('[Search] NIP-05 domain search failed:', e); |
||||
console.log("NIP-05 search: error for gitcitadel.com:", e); |
||||
} |
||||
return []; |
||||
|
||||
// If gitcitadel.com didn't work, try other domains
|
||||
console.log("NIP-05 search: gitcitadel.com failed, trying other domains..."); |
||||
const otherDomains = commonDomains.filter( |
||||
(domain) => domain !== "gitcitadel.com", |
||||
); |
||||
|
||||
// Search all other domains in parallel with timeout
|
||||
const searchPromises = otherDomains.map(async (domain) => { |
||||
const nip05Address = `${normalizedSearchTerm}@${domain}`; |
||||
console.log("NIP-05 search: trying address:", nip05Address); |
||||
try { |
||||
const npub = await getNpubFromNip05(nip05Address); |
||||
if (npub) { |
||||
console.log("NIP-05 search: found npub for", nip05Address, ":", npub); |
||||
const metadata = await getUserMetadata(npub); |
||||
const profile: NostrProfile = { |
||||
...metadata, |
||||
pubkey: npub, |
||||
}; |
||||
console.log( |
||||
"NIP-05 search: created profile for", |
||||
nip05Address, |
||||
":", |
||||
profile, |
||||
); |
||||
return profile; |
||||
} else { |
||||
console.log("NIP-05 search: no npub found for", nip05Address); |
||||
} |
||||
} catch (e) { |
||||
console.log("NIP-05 search: error for", nip05Address, ":", e); |
||||
// Continue to next domain
|
||||
} |
||||
return null; |
||||
}); |
||||
|
||||
// Wait for all searches with timeout
|
||||
const results = await Promise.allSettled(searchPromises); |
||||
|
||||
for (const result of results) { |
||||
if (result.status === "fulfilled" && result.value) { |
||||
foundProfiles.push(result.value); |
||||
} |
||||
} |
||||
|
||||
console.log("NIP-05 search: total profiles found:", foundProfiles.length); |
||||
return foundProfiles; |
||||
} |
||||
|
||||
/** |
||||
* Search for profiles across relays |
||||
* Quick relay search with short timeout |
||||
*/ |
||||
async function searchProfilesAcrossRelays(searchTerm: string, ndk: any): Promise<NostrProfile[]> { |
||||
async function quickRelaySearch( |
||||
searchTerm: string, |
||||
ndk: any, |
||||
): Promise<NostrProfile[]> { |
||||
console.log("quickRelaySearch called with:", searchTerm); |
||||
const foundProfiles: NostrProfile[] = []; |
||||
|
||||
// Prioritize community relays for better search results
|
||||
const allRelays = Array.from(ndk.pool.relays.values()) as any[]; |
||||
const prioritizedRelays = new Set([ |
||||
...allRelays.filter((relay: any) => relay.url === communityRelay), |
||||
...allRelays.filter((relay: any) => relay.url !== communityRelay) |
||||
]); |
||||
const relaySet = new NDKRelaySet(prioritizedRelays as any, ndk); |
||||
|
||||
// Subscribe to profile events
|
||||
const sub = ndk.subscribe( |
||||
{ kinds: [0] }, |
||||
{ closeOnEose: true }, |
||||
relaySet |
||||
); |
||||
|
||||
return new Promise((resolve) => { |
||||
sub.on('event', (event: NDKEvent) => { |
||||
// Normalize the search term for relay search
|
||||
const normalizedSearchTerm = normalizeSearchTerm(searchTerm); |
||||
console.log("Normalized search term for relay search:", normalizedSearchTerm); |
||||
|
||||
// Use all profile relays for better coverage
|
||||
const quickRelayUrls = [...communityRelays, ...secondaryRelays]; // Use all available relays
|
||||
console.log("Using all relays for search:", quickRelayUrls); |
||||
|
||||
// Create relay sets for parallel search
|
||||
const relaySets = quickRelayUrls |
||||
.map((url) => { |
||||
try { |
||||
if (!event.content) return; |
||||
const profileData = JSON.parse(event.content); |
||||
const displayName = profileData.displayName || profileData.display_name || ''; |
||||
const display_name = profileData.display_name || ''; |
||||
const name = profileData.name || ''; |
||||
const nip05 = profileData.nip05 || ''; |
||||
const about = profileData.about || ''; |
||||
|
||||
// Check if any field matches the search term
|
||||
const matchesDisplayName = fieldMatches(displayName, searchTerm); |
||||
const matchesDisplay_name = fieldMatches(display_name, searchTerm); |
||||
const matchesName = fieldMatches(name, searchTerm); |
||||
const matchesNip05 = nip05Matches(nip05, searchTerm); |
||||
const matchesAbout = fieldMatches(about, searchTerm); |
||||
|
||||
if (matchesDisplayName || matchesDisplay_name || matchesName || matchesNip05 || matchesAbout) { |
||||
const profile = createProfileFromEvent(event, profileData); |
||||
|
||||
// Check if we already have this profile
|
||||
const existingIndex = foundProfiles.findIndex(p => p.pubkey === event.pubkey); |
||||
if (existingIndex === -1) { |
||||
foundProfiles.push(profile); |
||||
} |
||||
} |
||||
return NDKRelaySet.fromRelayUrls([url], ndk); |
||||
} catch (e) { |
||||
// Invalid JSON or other error, skip
|
||||
console.warn(`Failed to create relay set for ${url}:`, e); |
||||
return null; |
||||
} |
||||
}); |
||||
}) |
||||
.filter(Boolean); |
||||
|
||||
// Search all relays in parallel with short timeout
|
||||
const searchPromises = relaySets.map(async (relaySet, index) => { |
||||
if (!relaySet) return []; |
||||
|
||||
return new Promise<NostrProfile[]>((resolve) => { |
||||
const foundInRelay: NostrProfile[] = []; |
||||
let eventCount = 0; |
||||
|
||||
console.log( |
||||
`Starting search on relay ${index + 1}: ${quickRelayUrls[index]}`, |
||||
); |
||||
|
||||
const sub = ndk.subscribe( |
||||
{ kinds: [0] }, |
||||
{ closeOnEose: true, relaySet }, |
||||
); |
||||
|
||||
sub.on('eose', () => { |
||||
if (foundProfiles.length > 0) { |
||||
// Deduplicate by pubkey, keep only newest
|
||||
const deduped: Record<string, { profile: NostrProfile; created_at: number }> = {}; |
||||
for (const profile of foundProfiles) { |
||||
const pubkey = profile.pubkey; |
||||
if (pubkey) { |
||||
// We don't have created_at from getUserMetadata, so just keep the first one
|
||||
if (!deduped[pubkey]) { |
||||
deduped[pubkey] = { profile, created_at: 0 }; |
||||
sub.on("event", (event: NDKEvent) => { |
||||
eventCount++; |
||||
try { |
||||
if (!event.content) return; |
||||
const profileData = JSON.parse(event.content); |
||||
const displayName = |
||||
profileData.displayName || profileData.display_name || ""; |
||||
const display_name = profileData.display_name || ""; |
||||
const name = profileData.name || ""; |
||||
const nip05 = profileData.nip05 || ""; |
||||
const about = profileData.about || ""; |
||||
|
||||
// Check if any field matches the search term using normalized comparison
|
||||
const matchesDisplayName = fieldMatches( |
||||
displayName, |
||||
normalizedSearchTerm, |
||||
); |
||||
const matchesDisplay_name = fieldMatches( |
||||
display_name, |
||||
normalizedSearchTerm, |
||||
); |
||||
const matchesName = fieldMatches(name, normalizedSearchTerm); |
||||
const matchesNip05 = nip05Matches(nip05, normalizedSearchTerm); |
||||
const matchesAbout = fieldMatches(about, normalizedSearchTerm); |
||||
|
||||
if ( |
||||
matchesDisplayName || |
||||
matchesDisplay_name || |
||||
matchesName || |
||||
matchesNip05 || |
||||
matchesAbout |
||||
) { |
||||
console.log(`Found matching profile on relay ${index + 1}:`, { |
||||
name: profileData.name, |
||||
display_name: profileData.display_name, |
||||
nip05: profileData.nip05, |
||||
pubkey: event.pubkey, |
||||
searchTerm: normalizedSearchTerm, |
||||
}); |
||||
const profile = createProfileFromEvent(event, profileData); |
||||
|
||||
// Check if we already have this profile in this relay
|
||||
const existingIndex = foundInRelay.findIndex( |
||||
(p) => p.pubkey === event.pubkey, |
||||
); |
||||
if (existingIndex === -1) { |
||||
foundInRelay.push(profile); |
||||
} |
||||
} |
||||
} catch (e) { |
||||
// Invalid JSON or other error, skip
|
||||
} |
||||
const dedupedProfiles = Object.values(deduped).map(x => x.profile); |
||||
resolve(dedupedProfiles); |
||||
} else { |
||||
resolve([]); |
||||
} |
||||
}); |
||||
|
||||
sub.on("eose", () => { |
||||
console.log( |
||||
`Relay ${index + 1} (${quickRelayUrls[index]}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`, |
||||
); |
||||
resolve(foundInRelay); |
||||
}); |
||||
|
||||
// Short timeout for quick search
|
||||
setTimeout(() => { |
||||
console.log( |
||||
`Relay ${index + 1} (${quickRelayUrls[index]}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`, |
||||
); |
||||
sub.stop(); |
||||
resolve(foundInRelay); |
||||
}, 1500); // 1.5 second timeout per relay
|
||||
}); |
||||
}); |
||||
}
|
||||
|
||||
// Wait for all searches to complete
|
||||
const results = await Promise.allSettled(searchPromises); |
||||
|
||||
// Combine and deduplicate results
|
||||
const allProfiles: Record<string, NostrProfile> = {}; |
||||
|
||||
for (const result of results) { |
||||
if (result.status === "fulfilled") { |
||||
for (const profile of result.value) { |
||||
if (profile.pubkey) { |
||||
allProfiles[profile.pubkey] = profile; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
console.log( |
||||
`Total unique profiles found: ${Object.keys(allProfiles).length}`, |
||||
); |
||||
return Object.values(allProfiles); |
||||
} |
||||
|
||||
@ -0,0 +1,531 @@
@@ -0,0 +1,531 @@
|
||||
import NDK, { NDKRelay, NDKUser } from "@nostr-dev-kit/ndk"; |
||||
import { communityRelays, searchRelays, secondaryRelays, anonymousRelays, lowbandwidthRelays, localRelays } from "../consts"; |
||||
import { getRelaySetForNetworkCondition, NetworkCondition } from "./network_detection"; |
||||
import { networkCondition } from "../stores/networkStore"; |
||||
import { get } from "svelte/store"; |
||||
|
||||
/** |
||||
* Normalizes a relay URL to a standard format |
||||
* @param url The relay URL to normalize |
||||
* @returns The normalized relay URL |
||||
*/ |
||||
export function normalizeRelayUrl(url: string): string { |
||||
let normalized = url.toLowerCase().trim(); |
||||
|
||||
// Ensure protocol is present
|
||||
if (!normalized.startsWith('ws://') && !normalized.startsWith('wss://')) { |
||||
normalized = 'wss://' + normalized; |
||||
} |
||||
|
||||
// Remove trailing slash
|
||||
normalized = normalized.replace(/\/$/, ''); |
||||
|
||||
return normalized; |
||||
} |
||||
|
||||
/** |
||||
* Normalizes an array of relay URLs |
||||
* @param urls Array of relay URLs to normalize |
||||
* @returns Array of normalized relay URLs |
||||
*/ |
||||
export function normalizeRelayUrls(urls: string[]): string[] { |
||||
return urls.map(normalizeRelayUrl); |
||||
} |
||||
|
||||
/** |
||||
* Removes duplicates from an array of relay URLs |
||||
* @param urls Array of relay URLs |
||||
* @returns Array of unique relay URLs |
||||
*/ |
||||
export function deduplicateRelayUrls(urls: string[]): string[] { |
||||
const normalized = normalizeRelayUrls(urls); |
||||
return [...new Set(normalized)]; |
||||
} |
||||
|
||||
/** |
||||
* Tests connection to a relay and returns connection status |
||||
* @param relayUrl The relay URL to test |
||||
* @param ndk The NDK instance |
||||
* @returns Promise that resolves to connection status |
||||
*/ |
||||
export async function testRelayConnection( |
||||
relayUrl: string, |
||||
ndk: NDK, |
||||
): Promise<{ |
||||
connected: boolean; |
||||
requiresAuth: boolean; |
||||
error?: string; |
||||
actualUrl?: string; |
||||
}> { |
||||
return new Promise((resolve) => { |
||||
// Ensure the URL is using wss:// protocol
|
||||
const secureUrl = ensureSecureWebSocket(relayUrl); |
||||
|
||||
// Use the existing NDK instance instead of creating a new one
|
||||
const relay = new NDKRelay(secureUrl, undefined, ndk); |
||||
let authRequired = false; |
||||
let connected = false; |
||||
let error: string | undefined; |
||||
let actualUrl: string | undefined; |
||||
|
||||
const timeout = setTimeout(() => { |
||||
relay.disconnect(); |
||||
resolve({ |
||||
connected: false, |
||||
requiresAuth: authRequired, |
||||
error: "Connection timeout", |
||||
actualUrl, |
||||
}); |
||||
}, 3000); // Increased timeout to 3 seconds to give relays more time
|
||||
|
||||
relay.on("connect", () => { |
||||
connected = true; |
||||
actualUrl = secureUrl; |
||||
clearTimeout(timeout); |
||||
relay.disconnect(); |
||||
resolve({ |
||||
connected: true, |
||||
requiresAuth: authRequired, |
||||
error, |
||||
actualUrl, |
||||
}); |
||||
}); |
||||
|
||||
relay.on("notice", (message: string) => { |
||||
if (message.includes("auth-required")) { |
||||
authRequired = true; |
||||
} |
||||
}); |
||||
|
||||
relay.on("disconnect", () => { |
||||
if (!connected) { |
||||
error = "Connection failed"; |
||||
clearTimeout(timeout); |
||||
resolve({ |
||||
connected: false, |
||||
requiresAuth: authRequired, |
||||
error, |
||||
actualUrl, |
||||
}); |
||||
} |
||||
}); |
||||
|
||||
relay.connect(); |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Ensures a relay URL uses secure WebSocket protocol for remote relays |
||||
* @param url The relay URL to secure |
||||
* @returns The URL with wss:// protocol (except for localhost)
|
||||
*/ |
||||
function ensureSecureWebSocket(url: string): string { |
||||
// For localhost, always use ws:// (never wss://)
|
||||
if (url.includes('localhost') || url.includes('127.0.0.1')) { |
||||
// Convert any wss://localhost to ws://localhost
|
||||
return url.replace(/^wss:\/\//, "ws://"); |
||||
} |
||||
|
||||
// Replace ws:// with wss:// for remote relays
|
||||
const secureUrl = url.replace(/^ws:\/\//, "wss://"); |
||||
|
||||
if (secureUrl !== url) { |
||||
console.warn( |
||||
`[relay_management.ts] Protocol upgrade for rem ote relay: ${url} -> ${secureUrl}`, |
||||
); |
||||
} |
||||
|
||||
return secureUrl; |
||||
} |
||||
|
||||
/** |
||||
* Tests connection to local relays |
||||
* @param localRelayUrls Array of local relay URLs to test |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working local relay URLs |
||||
*/ |
||||
async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<string[]> { |
||||
const workingRelays: string[] = []; |
||||
|
||||
if (localRelayUrls.length === 0) { |
||||
return workingRelays; |
||||
} |
||||
|
||||
console.debug(`[relay_management.ts] Testing ${localRelayUrls.length} local relays...`); |
||||
|
||||
await Promise.all( |
||||
localRelayUrls.map(async (url) => { |
||||
try { |
||||
const result = await testRelayConnection(url, ndk); |
||||
if (result.connected) { |
||||
workingRelays.push(url); |
||||
console.debug(`[relay_management.ts] Local relay connected: ${url}`); |
||||
} else { |
||||
console.debug(`[relay_management.ts] Local relay failed: ${url} - ${result.error}`); |
||||
} |
||||
} catch (error) { |
||||
// Silently ignore local relay failures - they're optional
|
||||
console.debug(`[relay_management.ts] Local relay error (ignored): ${url}`); |
||||
} |
||||
}) |
||||
); |
||||
|
||||
console.debug(`[relay_management.ts] Found ${workingRelays.length} working local relays`); |
||||
return workingRelays; |
||||
} |
||||
|
||||
/** |
||||
* Discovers local relays by testing common localhost URLs |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working local relay URLs |
||||
*/ |
||||
export async function discoverLocalRelays(ndk: NDK): Promise<string[]> { |
||||
try { |
||||
// If no local relays are configured, return empty array
|
||||
if (localRelays.length === 0) { |
||||
console.debug('[relay_management.ts] No local relays configured'); |
||||
return []; |
||||
} |
||||
|
||||
// Convert wss:// URLs from consts to ws:// for local testing
|
||||
const localRelayUrls = localRelays.map(url =>
|
||||
url.replace(/^wss:\/\//, 'ws://') |
||||
); |
||||
|
||||
const workingRelays = await testLocalRelays(localRelayUrls, ndk); |
||||
|
||||
// If no local relays are working, return empty array
|
||||
// The network detection logic will provide fallback relays
|
||||
return workingRelays; |
||||
} catch (error) { |
||||
// Silently fail and return empty array
|
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's local relays from kind 10432 event |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch local relays for |
||||
* @returns Promise that resolves to array of local relay URLs |
||||
*/ |
||||
export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
const localRelayEvent = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10432 as any], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!localRelayEvent) { |
||||
return []; |
||||
} |
||||
|
||||
const localRelays: string[] = []; |
||||
localRelayEvent.tags.forEach((tag) => { |
||||
if (tag[0] === 'r' && tag[1]) { |
||||
localRelays.push(tag[1]); |
||||
} |
||||
}); |
||||
|
||||
return localRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user local relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's blocked relays from kind 10006 event |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch blocked relays for |
||||
* @returns Promise that resolves to array of blocked relay URLs |
||||
*/ |
||||
export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
const blockedRelayEvent = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10006], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!blockedRelayEvent) { |
||||
return []; |
||||
} |
||||
|
||||
const blockedRelays: string[] = []; |
||||
blockedRelayEvent.tags.forEach((tag) => { |
||||
if (tag[0] === 'r' && tag[1]) { |
||||
blockedRelays.push(tag[1]); |
||||
} |
||||
}); |
||||
|
||||
return blockedRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user blocked relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetches user's outbox relays from NIP-65 relay list |
||||
* @param ndk NDK instance |
||||
* @param user User to fetch outbox relays for |
||||
* @returns Promise that resolves to array of outbox relay URLs |
||||
*/ |
||||
export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<string[]> { |
||||
try { |
||||
console.debug('[relay_management.ts] Fetching outbox relays for user:', user.pubkey); |
||||
const relayList = await ndk.fetchEvent( |
||||
{ |
||||
kinds: [10002], |
||||
authors: [user.pubkey], |
||||
}, |
||||
{ |
||||
groupable: false, |
||||
skipVerification: false, |
||||
skipValidation: false, |
||||
} |
||||
); |
||||
|
||||
if (!relayList) { |
||||
console.debug('[relay_management.ts] No relay list found for user'); |
||||
return []; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Found relay list event:', relayList.id); |
||||
console.debug('[relay_management.ts] Relay list tags:', relayList.tags); |
||||
|
||||
const outboxRelays: string[] = []; |
||||
relayList.tags.forEach((tag) => { |
||||
console.debug('[relay_management.ts] Processing tag:', tag); |
||||
if (tag[0] === 'w' && tag[1]) { |
||||
outboxRelays.push(tag[1]); |
||||
console.debug('[relay_management.ts] Added outbox relay:', tag[1]); |
||||
} else if (tag[0] === 'r' && tag[1]) { |
||||
// Some relay lists use 'r' for both inbox and outbox
|
||||
outboxRelays.push(tag[1]); |
||||
console.debug('[relay_management.ts] Added relay (r tag):', tag[1]); |
||||
} else { |
||||
console.debug('[relay_management.ts] Skipping tag:', tag[0], 'value:', tag[1]); |
||||
} |
||||
}); |
||||
|
||||
console.debug('[relay_management.ts] Final outbox relays:', outboxRelays); |
||||
return outboxRelays; |
||||
} catch (error) { |
||||
console.info('[relay_management.ts] Error fetching user outbox relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Gets browser extension's relay configuration by querying the extension directly |
||||
* @returns Promise that resolves to array of extension relay URLs |
||||
*/ |
||||
export async function getExtensionRelays(): Promise<string[]> { |
||||
try { |
||||
// Check if we're in a browser environment with extension support
|
||||
if (typeof window === 'undefined' || !window.nostr) { |
||||
console.debug('[relay_management.ts] No window.nostr available'); |
||||
return []; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Extension available, checking for getRelays()'); |
||||
const extensionRelays: string[] = []; |
||||
|
||||
// Try to get relays from the extension's API
|
||||
// Different extensions may expose their relay config differently
|
||||
if (window.nostr.getRelays) { |
||||
console.debug('[relay_management.ts] getRelays() method found, calling it...'); |
||||
try { |
||||
const relays = await window.nostr.getRelays(); |
||||
console.debug('[relay_management.ts] getRelays() returned:', relays); |
||||
if (relays && typeof relays === 'object') { |
||||
// Convert relay object to array of URLs
|
||||
const relayUrls = Object.keys(relays); |
||||
extensionRelays.push(...relayUrls); |
||||
console.debug('[relay_management.ts] Got relays from extension:', relayUrls); |
||||
} |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Extension getRelays() failed:', error); |
||||
} |
||||
} else { |
||||
console.debug('[relay_management.ts] getRelays() method not found on window.nostr'); |
||||
} |
||||
|
||||
// If getRelays() didn't work, try alternative methods
|
||||
if (extensionRelays.length === 0) { |
||||
// Some extensions might expose relays through other methods
|
||||
// This is a fallback for extensions that don't expose getRelays()
|
||||
console.debug('[relay_management.ts] Extension does not expose relay configuration'); |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] Final extension relays:', extensionRelays); |
||||
return extensionRelays; |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error getting extension relays:', error); |
||||
return []; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Tests a set of relays in batches to avoid overwhelming them |
||||
* @param relayUrls Array of relay URLs to test |
||||
* @param ndk NDK instance |
||||
* @returns Promise that resolves to array of working relay URLs |
||||
*/ |
||||
async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> { |
||||
const workingRelays: string[] = []; |
||||
const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them
|
||||
|
||||
for (let i = 0; i < relayUrls.length; i += maxConcurrent) { |
||||
const batch = relayUrls.slice(i, i + maxConcurrent); |
||||
|
||||
const batchPromises = batch.map(async (url) => { |
||||
try { |
||||
const result = await testRelayConnection(url, ndk); |
||||
return result.connected ? url : null; |
||||
} catch (error) { |
||||
console.debug(`[relay_management.ts] Failed to test relay ${url}:`, error); |
||||
return null; |
||||
} |
||||
}); |
||||
|
||||
const batchResults = await Promise.allSettled(batchPromises); |
||||
const batchWorkingRelays = batchResults |
||||
.filter((result): result is PromiseFulfilledResult<string | null> => result.status === 'fulfilled') |
||||
.map(result => result.value) |
||||
.filter((url): url is string => url !== null); |
||||
workingRelays.push(...batchWorkingRelays); |
||||
} |
||||
|
||||
return workingRelays; |
||||
} |
||||
|
||||
/** |
||||
* Builds a complete relay set for a user, including local, user-specific, and fallback relays |
||||
* @param ndk NDK instance |
||||
* @param user NDKUser or null for anonymous access |
||||
* @returns Promise that resolves to inbox and outbox relay arrays |
||||
*/ |
||||
export async function buildCompleteRelaySet( |
||||
ndk: NDK, |
||||
user: NDKUser | null |
||||
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Starting with user:', user?.pubkey || 'null'); |
||||
|
||||
// Discover local relays first
|
||||
const discoveredLocalRelays = await discoverLocalRelays(ndk); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Discovered local relays:', discoveredLocalRelays); |
||||
|
||||
// Get user-specific relays if available
|
||||
let userOutboxRelays: string[] = []; |
||||
let userLocalRelays: string[] = []; |
||||
let blockedRelays: string[] = []; |
||||
let extensionRelays: string[] = []; |
||||
|
||||
if (user) { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:', user.pubkey); |
||||
|
||||
try { |
||||
userOutboxRelays = await getUserOutboxRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User outbox relays:', userOutboxRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching user outbox relays:', error); |
||||
} |
||||
|
||||
try { |
||||
userLocalRelays = await getUserLocalRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User local relays:', userLocalRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching user local relays:', error); |
||||
} |
||||
|
||||
try { |
||||
blockedRelays = await getUserBlockedRelays(ndk, user); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: User blocked relays:', blockedRelays); |
||||
} catch (error) { |
||||
// Silently ignore blocked relay fetch errors
|
||||
} |
||||
|
||||
try { |
||||
extensionRelays = await getExtensionRelays(); |
||||
console.debug('[relay_management.ts] Extension relays gathered:', extensionRelays); |
||||
} catch (error) { |
||||
console.debug('[relay_management.ts] Error fetching extension relays:', error); |
||||
} |
||||
} else { |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays'); |
||||
} |
||||
|
||||
// Build initial relay sets and deduplicate
|
||||
const finalInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userLocalRelays]); |
||||
const finalOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userOutboxRelays, ...extensionRelays]); |
||||
|
||||
// Test relays and filter out non-working ones
|
||||
let testedInboxRelays: string[] = []; |
||||
let testedOutboxRelays: string[] = []; |
||||
|
||||
if (finalInboxRelays.length > 0) { |
||||
testedInboxRelays = await testRelaySet(finalInboxRelays, ndk); |
||||
} |
||||
|
||||
if (finalOutboxRelays.length > 0) { |
||||
testedOutboxRelays = await testRelaySet(finalOutboxRelays, ndk); |
||||
} |
||||
|
||||
// If no relays passed testing, use remote relays without testing
|
||||
if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) { |
||||
const remoteRelays = deduplicateRelayUrls([...secondaryRelays, ...searchRelays]); |
||||
return { |
||||
inboxRelays: remoteRelays, |
||||
outboxRelays: remoteRelays |
||||
}; |
||||
} |
||||
|
||||
// Use tested relays and deduplicate
|
||||
const inboxRelays = testedInboxRelays.length > 0 ? deduplicateRelayUrls(testedInboxRelays) : deduplicateRelayUrls(secondaryRelays); |
||||
const outboxRelays = testedOutboxRelays.length > 0 ? deduplicateRelayUrls(testedOutboxRelays) : deduplicateRelayUrls(secondaryRelays); |
||||
|
||||
// Apply network condition optimization
|
||||
const currentNetworkCondition = get(networkCondition); |
||||
const networkOptimizedRelaySet = getRelaySetForNetworkCondition( |
||||
currentNetworkCondition, |
||||
discoveredLocalRelays, |
||||
lowbandwidthRelays, |
||||
{ inboxRelays, outboxRelays } |
||||
); |
||||
|
||||
// Filter out blocked relays and deduplicate final sets
|
||||
const finalRelaySet = { |
||||
inboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.inboxRelays.filter(r => !blockedRelays.includes(r))), |
||||
outboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.outboxRelays.filter(r => !blockedRelays.includes(r))) |
||||
}; |
||||
|
||||
// If no relays are working, use anonymous relays as fallback
|
||||
if (finalRelaySet.inboxRelays.length === 0 && finalRelaySet.outboxRelays.length === 0) { |
||||
return { |
||||
inboxRelays: deduplicateRelayUrls(anonymousRelays), |
||||
outboxRelays: deduplicateRelayUrls(anonymousRelays) |
||||
}; |
||||
} |
||||
|
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:', finalRelaySet.inboxRelays.length, 'outbox:', finalRelaySet.outboxRelays.length); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final inbox relays:', finalRelaySet.inboxRelays); |
||||
console.debug('[relay_management.ts] buildCompleteRelaySet: Final outbox relays:', finalRelaySet.outboxRelays); |
||||
|
||||
return finalRelaySet; |
||||
}
|
||||
@ -1,25 +1,25 @@
@@ -1,25 +1,25 @@
|
||||
// Re-export all search functionality from modular files
|
||||
export * from './search_types'; |
||||
export * from './search_utils'; |
||||
export * from './community_checker'; |
||||
export * from './profile_search'; |
||||
export * from './event_search'; |
||||
export * from './subscription_search'; |
||||
export * from './search_constants'; |
||||
export * from "./search_types"; |
||||
export * from "./search_utils"; |
||||
export * from "./community_checker"; |
||||
export * from "./profile_search"; |
||||
export * from "./event_search"; |
||||
export * from "./subscription_search"; |
||||
export * from "./search_constants"; |
||||
|
||||
// Legacy exports for backward compatibility
|
||||
export { searchProfiles } from './profile_search'; |
||||
export { searchBySubscription } from './subscription_search'; |
||||
export { searchEvent, searchNip05 } from './event_search'; |
||||
export { checkCommunity } from './community_checker'; |
||||
export {
|
||||
wellKnownUrl,
|
||||
lnurlpWellKnownUrl,
|
||||
export { searchProfiles } from "./profile_search"; |
||||
export { searchBySubscription } from "./subscription_search"; |
||||
export { searchEvent, searchNip05 } from "./event_search"; |
||||
export { checkCommunity } from "./community_checker"; |
||||
export { |
||||
wellKnownUrl, |
||||
lnurlpWellKnownUrl, |
||||
isValidNip05Address, |
||||
normalizeSearchTerm, |
||||
fieldMatches, |
||||
nip05Matches, |
||||
COMMON_DOMAINS, |
||||
isEmojiReaction, |
||||
createProfileFromEvent |
||||
} from './search_utils';
|
||||
createProfileFromEvent, |
||||
} from "./search_utils"; |
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,94 +1,38 @@
@@ -1,94 +1,38 @@
|
||||
<script lang="ts"> |
||||
import { |
||||
FeedType, |
||||
feedTypeStorageKey, |
||||
standardRelays, |
||||
fallbackRelays, |
||||
} from "$lib/consts"; |
||||
import { Alert, Button, Dropdown, Radio, Input } from "flowbite-svelte"; |
||||
import { ChevronDownOutline, HammerSolid } from "flowbite-svelte-icons"; |
||||
import { inboxRelays, ndkSignedIn } from "$lib/ndk"; |
||||
import PublicationFeed from "$lib/components/PublicationFeed.svelte"; |
||||
import { feedType } from "$lib/stores"; |
||||
|
||||
$effect(() => { |
||||
localStorage.setItem(feedTypeStorageKey, $feedType); |
||||
}); |
||||
|
||||
$effect(() => { |
||||
if (!$ndkSignedIn && $feedType !== FeedType.StandardRelays) { |
||||
feedType.set(FeedType.StandardRelays); |
||||
} |
||||
}); |
||||
|
||||
const getFeedTypeFriendlyName = (feedType: FeedType): string => { |
||||
switch (feedType) { |
||||
case FeedType.StandardRelays: |
||||
return `Alexandria's Relays`; |
||||
case FeedType.UserRelays: |
||||
return `Your Relays`; |
||||
default: |
||||
return ""; |
||||
} |
||||
}; |
||||
import { Alert, Input } from "flowbite-svelte"; |
||||
import { HammerSolid } from "flowbite-svelte-icons"; |
||||
import { userStore } from "$lib/stores/userStore"; |
||||
import { activeInboxRelays, ndkSignedIn } from "$lib/ndk"; |
||||
import PublicationFeed from "$lib/components/publications/PublicationFeed.svelte"; |
||||
|
||||
let searchQuery = $state(""); |
||||
</script> |
||||
let user = $derived($userStore); |
||||
let eventCount = $state({ displayed: 0, total: 0 }); |
||||
|
||||
<Alert |
||||
rounded={false} |
||||
id="alert-experimental" |
||||
class="border-t-4 border-primary-600 text-gray-900 dark:text-gray-100 dark:border-primary-500 flex justify-left mb-2" |
||||
> |
||||
<HammerSolid class="mr-2 h-5 w-5 text-primary-500 dark:text-primary-500" /> |
||||
<span class="font-medium"> |
||||
Pardon our dust! The publication view is currently using an experimental |
||||
loader, and may be unstable. |
||||
</span> |
||||
</Alert> |
||||
function handleEventCountUpdate(counts: { displayed: number; total: number }) { |
||||
eventCount = counts; |
||||
} |
||||
</script> |
||||
|
||||
<main class="leather flex flex-col flex-grow-0 space-y-4 p-4"> |
||||
<div |
||||
class="leather w-full flex flex-row items-center justify-center gap-4 mb-4" |
||||
> |
||||
<Button id="feed-toggle-btn" class="min-w-[220px] max-w-sm"> |
||||
{`Showing publications from: ${getFeedTypeFriendlyName($feedType)}`} |
||||
{#if $ndkSignedIn} |
||||
<ChevronDownOutline class="w-6 h-6" /> |
||||
{/if} |
||||
</Button> |
||||
<Input |
||||
bind:value={searchQuery} |
||||
placeholder="Search publications by title or author..." |
||||
class="flex-grow max-w-2xl min-w-[300px] text-base" |
||||
/> |
||||
{#if $ndkSignedIn} |
||||
<Dropdown |
||||
class="w-fit p-2 space-y-2 text-sm" |
||||
triggeredBy="#feed-toggle-btn" |
||||
> |
||||
<li> |
||||
<Radio |
||||
name="relays" |
||||
bind:group={$feedType} |
||||
value={FeedType.StandardRelays}>Alexandria's Relays</Radio |
||||
> |
||||
</li> |
||||
<li> |
||||
<Radio |
||||
name="follows" |
||||
bind:group={$feedType} |
||||
value={FeedType.UserRelays}>Your Relays</Radio |
||||
> |
||||
</li> |
||||
</Dropdown> |
||||
{/if} |
||||
</div> |
||||
{#if !$ndkSignedIn} |
||||
<PublicationFeed relays={standardRelays} {fallbackRelays} {searchQuery} /> |
||||
{:else if $feedType === FeedType.StandardRelays} |
||||
<PublicationFeed relays={standardRelays} {fallbackRelays} {searchQuery} /> |
||||
{:else if $feedType === FeedType.UserRelays} |
||||
<PublicationFeed relays={$inboxRelays} {fallbackRelays} {searchQuery} /> |
||||
|
||||
{#if eventCount.total > 0} |
||||
<div class="text-center text-sm text-gray-600 dark:text-gray-400"> |
||||
Showing {eventCount.displayed} of {eventCount.total} events. |
||||
</div> |
||||
{/if} |
||||
|
||||
<PublicationFeed |
||||
{searchQuery} |
||||
onEventCountUpdate={handleEventCountUpdate} |
||||
/> |
||||
</main> |
||||
|
||||
@ -1,29 +1,98 @@
@@ -1,29 +1,98 @@
|
||||
<script lang="ts"> |
||||
import Preview from "$lib/components/Preview.svelte"; |
||||
import { pharosInstance } from "$lib/parser"; |
||||
import { Heading } from "flowbite-svelte"; |
||||
|
||||
let treeNeedsUpdate: boolean = false; |
||||
let treeUpdateCount: number = 0; |
||||
let someIndexValue = 0; |
||||
|
||||
$: { |
||||
if (treeNeedsUpdate) { |
||||
treeUpdateCount++; |
||||
} |
||||
import { Heading, Button, Alert } from "flowbite-svelte"; |
||||
import { PaperPlaneOutline } from "flowbite-svelte-icons"; |
||||
import ZettelEditor from "$lib/components/ZettelEditor.svelte"; |
||||
import { goto } from "$app/navigation"; |
||||
import { nip19 } from "nostr-tools"; |
||||
import { publishZettel } from "$lib/services/publisher"; |
||||
|
||||
let content = $state(""); |
||||
let showPreview = $state(false); |
||||
let isPublishing = $state(false); |
||||
let publishResult = $state<{ |
||||
success: boolean; |
||||
eventId?: string; |
||||
error?: string; |
||||
} | null>(null); |
||||
|
||||
// Handle content changes from ZettelEditor |
||||
function handleContentChange(newContent: string) { |
||||
content = newContent; |
||||
} |
||||
|
||||
// Handle preview toggle from ZettelEditor |
||||
function handlePreviewToggle(show: boolean) { |
||||
showPreview = show; |
||||
} |
||||
|
||||
async function handlePublish() { |
||||
isPublishing = true; |
||||
publishResult = null; |
||||
|
||||
const result = await publishZettel({ |
||||
content, |
||||
onSuccess: (eventId) => { |
||||
publishResult = { success: true, eventId }; |
||||
const nevent = nip19.neventEncode({ id: eventId }); |
||||
goto(`/events?id=${nevent}`); |
||||
}, |
||||
onError: (error) => { |
||||
publishResult = { success: false, error }; |
||||
}, |
||||
}); |
||||
|
||||
isPublishing = false; |
||||
} |
||||
</script> |
||||
|
||||
<div class="w-full flex justify-center"> |
||||
<main class="main-leather flex flex-col space-y-4 max-w-2xl w-full mt-4 mb-4"> |
||||
<Heading tag="h1" class="h-leather mb-2">Compose</Heading> |
||||
{#key treeUpdateCount} |
||||
<Preview |
||||
rootId={$pharosInstance.getRootIndexId()} |
||||
allowEditing={true} |
||||
bind:needsUpdate={treeNeedsUpdate} |
||||
index={someIndexValue} |
||||
/> |
||||
{/key} |
||||
</main> |
||||
<svelte:head> |
||||
<title>Compose Note - Alexandria</title> |
||||
</svelte:head> |
||||
|
||||
<!-- Main container with 75% width and centered --> |
||||
<div class="w-3/4 mx-auto"> |
||||
<div class="flex flex-col space-y-4"> |
||||
<Heading |
||||
tag="h1" |
||||
class="text-2xl font-bold text-gray-900 dark:text-gray-100" |
||||
> |
||||
Compose Notes |
||||
</Heading> |
||||
|
||||
<ZettelEditor |
||||
{content} |
||||
{showPreview} |
||||
onContentChange={handleContentChange} |
||||
onPreviewToggle={handlePreviewToggle} |
||||
/> |
||||
|
||||
<!-- Publish Button --> |
||||
<Button |
||||
on:click={handlePublish} |
||||
disabled={isPublishing || !content.trim()} |
||||
class="w-full" |
||||
> |
||||
{#if isPublishing} |
||||
Publishing... |
||||
{:else} |
||||
<PaperPlaneOutline class="w-4 h-4 mr-2" /> |
||||
Publish |
||||
{/if} |
||||
</Button> |
||||
|
||||
<!-- Status Messages --> |
||||
{#if publishResult} |
||||
{#if publishResult.success} |
||||
<Alert color="green" dismissable> |
||||
<span class="font-medium">Success!</span> |
||||
Event published successfully. Event ID: {publishResult.eventId} |
||||
</Alert> |
||||
{:else} |
||||
<Alert color="red" dismissable> |
||||
<span class="font-medium">Error!</span> |
||||
{publishResult.error} |
||||
</Alert> |
||||
{/if} |
||||
{/if} |
||||
</div> |
||||
</div> |
||||
|
||||
@ -0,0 +1,53 @@
@@ -0,0 +1,53 @@
|
||||
/* AsciiDoc Content Styling */ |
||||
/* These styles are for rendered AsciiDoc content in previews and publications */ |
||||
|
||||
.asciidoc-content h1, |
||||
.asciidoc-content h2, |
||||
.asciidoc-content h3, |
||||
.asciidoc-content h4, |
||||
.asciidoc-content h5, |
||||
.asciidoc-content h6 { |
||||
font-weight: bold; |
||||
margin-top: 1.5em; |
||||
margin-bottom: 0.5em; |
||||
line-height: 1.25; |
||||
color: inherit; |
||||
} |
||||
|
||||
.asciidoc-content h1 { |
||||
font-size: 1.875rem; |
||||
} |
||||
|
||||
.asciidoc-content h2 { |
||||
font-size: 1.5rem; |
||||
} |
||||
|
||||
.asciidoc-content h3 { |
||||
font-size: 1.25rem; |
||||
} |
||||
|
||||
.asciidoc-content h4 { |
||||
font-size: 1.125rem; |
||||
} |
||||
|
||||
.asciidoc-content h5 { |
||||
font-size: 1rem; |
||||
} |
||||
|
||||
.asciidoc-content h6 { |
||||
font-size: 0.875rem; |
||||
} |
||||
|
||||
.asciidoc-content p { |
||||
margin-bottom: 1em; |
||||
} |
||||
|
||||
/* Dark mode support */ |
||||
.dark .asciidoc-content h1, |
||||
.dark .asciidoc-content h2, |
||||
.dark .asciidoc-content h3, |
||||
.dark .asciidoc-content h4, |
||||
.dark .asciidoc-content h5, |
||||
.dark .asciidoc-content h6 { |
||||
color: inherit; |
||||
} |
||||
File diff suppressed because one or more lines are too long
Loading…
Reference in new issue