diff --git a/postcss.config.js b/postcss.config.js
index 1d5a3b2..c2ddf74 100644
--- a/postcss.config.js
+++ b/postcss.config.js
@@ -1,5 +1,5 @@
export default {
plugins: {
- '@tailwindcss/postcss': {}
- }
-}
+ "@tailwindcss/postcss": {},
+ },
+};
diff --git a/src/app.css b/src/app.css
index 1a86416..7e837d6 100644
--- a/src/app.css
+++ b/src/app.css
@@ -226,7 +226,8 @@
div.note-leather,
p.note-leather,
section.note-leather {
- @apply bg-primary-50 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 p-2 rounded;
+ @apply bg-primary-50 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
+ p-2 rounded;
}
.edit div.note-leather:hover:not(:has(.note-leather:hover)),
@@ -278,7 +279,8 @@
}
div.modal-leather > div {
- @apply bg-primary-50 dark:bg-primary-950 border-b-[1px] border-primary-100 dark:border-primary-600;
+ @apply bg-primary-50 dark:bg-primary-950 border-b-[1px] border-primary-100
+ dark:border-primary-600;
}
div.modal-leather > div > h1,
@@ -292,7 +294,9 @@
}
div.modal-leather button {
- @apply bg-primary-50 hover:bg-primary-50 dark:bg-primary-950 dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400;
+ @apply bg-primary-50 hover:bg-primary-50 dark:bg-primary-950
+ dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600
+ dark:text-gray-100 dark:hover:text-primary-400;
}
/* Navbar */
@@ -491,11 +495,11 @@
/* Legend */
.leather-legend {
@apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2
- rounded;
+ rounded;
@apply shadow-none text-primary-1000 border border-s-4 bg-highlight
- border-primary-200 has-[:hover]:border-primary-700;
+ border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800
- dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
+ dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
max-width: 450px;
min-width: 300px;
overflow-x: auto;
@@ -504,7 +508,9 @@
/* Tooltip */
.tooltip-leather {
- @apply fixed p-4 rounded shadow-lg bg-primary-50 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border border-gray-200 dark:border-gray-700 transition-colors duration-200;
+ @apply fixed p-4 rounded shadow-lg bg-primary-50 dark:bg-primary-1000
+ text-gray-900 dark:text-gray-100 border border-gray-200
+ dark:border-gray-700 transition-colors duration-200;
max-width: 400px;
z-index: 1000;
}
@@ -544,11 +550,11 @@
}
h4 {
- @apply text-xl font-bold;
+ @apply text-xl font-bold;
}
h5 {
- @apply text-lg font-semibold;
+ @apply text-lg font-semibold;
}
h6 {
@@ -585,7 +591,8 @@
}
a {
- @apply underline cursor-pointer hover:text-primary-600 dark:hover:text-primary-400;
+ @apply underline cursor-pointer hover:text-primary-600
+ dark:hover:text-primary-400;
}
.imageblock {
@@ -728,14 +735,15 @@
input[type="tel"],
input[type="url"],
textarea {
- @apply bg-primary-50 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border-s-4 border-primary-200 rounded shadow-none;
+ @apply bg-primary-50 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
+ border-s-4 border-primary-200 rounded shadow-none;
@apply focus:border-primary-600 dark:focus:border-primary-400;
}
/* Table of Contents highlighting */
.toc-highlight {
- @apply bg-primary-200 dark:bg-primary-700 border-s-4 border-primary-600 rounded
- dark:border-primary-400 font-medium;
+ @apply bg-primary-200 dark:bg-primary-700 border-s-4 border-primary-600
+ rounded dark:border-primary-400 font-medium;
transition: all 0.2s ease-in-out;
}
diff --git a/src/app.html b/src/app.html
index c646c7a..0ccb5a2 100644
--- a/src/app.html
+++ b/src/app.html
@@ -1,4 +1,4 @@
-
+
@@ -19,7 +19,9 @@
-
+
-
+
%sveltekit.head%
diff --git a/src/lib/a/README.md b/src/lib/a/README.md
index d2ea9e3..943b4be 100644
--- a/src/lib/a/README.md
+++ b/src/lib/a/README.md
@@ -1,10 +1,11 @@
# Component Library
-This folder contains a component library.
-The idea is to have project-scoped reusable components that centralize theming and style rules,
-so that main pages and layouts focus on the functionalities.
+This folder contains a component library. The idea is to have project-scoped
+reusable components that centralize theming and style rules, so that main pages
+and layouts focus on the functionalities.
-All components are based on Flowbite Svelte components,
-which are built on top of Tailwind CSS.
+All components are based on Flowbite Svelte components, which are built on top
+of Tailwind CSS.
-Keeping all the styles in one place allows us to easily change the look and feel of the application by switching themes.
+Keeping all the styles in one place allows us to easily change the look and feel
+of the application by switching themes.
diff --git a/src/lib/components/event_input/eventServices.ts b/src/lib/components/event_input/eventServices.ts
index 570501b..e46f7c8 100644
--- a/src/lib/components/event_input/eventServices.ts
+++ b/src/lib/components/event_input/eventServices.ts
@@ -14,35 +14,47 @@ import { anonymousRelays } from "$lib/consts";
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk";
import { removeMetadataFromContent } from "$lib/utils/asciidoc_metadata";
import { build30040EventSet } from "$lib/utils/event_input_utils";
-import type { EventData, TagData, PublishResult, LoadEventResult } from "./types";
+import type {
+ EventData,
+ LoadEventResult,
+ PublishResult,
+ TagData,
+} from "./types";
/**
* Converts TagData array to NDK-compatible format
*/
function convertTagsToNDKFormat(tags: TagData[]): string[][] {
return tags
- .filter(tag => tag.key.trim() !== "")
- .map(tag => [tag.key, ...tag.values]);
+ .filter((tag) => tag.key.trim() !== "")
+ .map((tag) => [tag.key, ...tag.values]);
}
/**
* Publishes an event to relays
*/
-export async function publishEvent(ndk: any, eventData: EventData, tags: TagData[]): Promise {
+export async function publishEvent(
+ ndk: any,
+ eventData: EventData,
+ tags: TagData[],
+): Promise {
if (!ndk) {
return { success: false, error: "NDK context not available" };
}
-
+
const userState = get(userStore);
const pubkey = userState.pubkey;
-
+
if (!pubkey) {
return { success: false, error: "User not logged in." };
}
-
+
const pubkeyString = String(pubkey);
if (!/^[a-fA-F0-9]{64}$/.test(pubkeyString)) {
- return { success: false, error: "Invalid public key: must be a 64-character hex string." };
+ return {
+ success: false,
+ error: "Invalid public key: must be a 64-character hex string.",
+ };
}
const baseEvent = { pubkey: pubkeyString, created_at: eventData.createdAt };
@@ -56,48 +68,56 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
if (Number(eventData.kind) === 30040) {
console.log("=== 30040 EVENT CREATION START ===");
console.log("Creating 30040 event set with content:", eventData.content);
-
+
try {
// Get the current d and title values from the UI
- const dTagValue = tags.find(tag => tag.key === "d")?.values[0] || "";
- const titleTagValue = tags.find(tag => tag.key === "title")?.values[0] || "";
-
+ const dTagValue = tags.find((tag) => tag.key === "d")?.values[0] || "";
+ const titleTagValue = tags.find((tag) =>
+ tag.key === "title"
+ )?.values[0] || "";
+
// Convert multi-value tags to the format expected by build30040EventSet
// Filter out d and title tags since we'll add them manually
const compatibleTags: [string, string][] = tags
- .filter(tag => tag.key.trim() !== "" && tag.key !== "d" && tag.key !== "title")
- .map(tag => [tag.key, tag.values[0] || ""] as [string, string]);
-
+ .filter((tag) =>
+ tag.key.trim() !== "" && tag.key !== "d" && tag.key !== "title"
+ )
+ .map((tag) => [tag.key, tag.values[0] || ""] as [string, string]);
+
const { indexEvent, sectionEvents } = build30040EventSet(
eventData.content,
compatibleTags,
baseEvent,
ndk,
);
-
+
// Override the d and title tags with the UI values if they exist
- const finalTags = indexEvent.tags.filter(tag => tag[0] !== "d" && tag[0] !== "title");
+ const finalTags = indexEvent.tags.filter((tag) =>
+ tag[0] !== "d" && tag[0] !== "title"
+ );
if (dTagValue) {
finalTags.push(["d", dTagValue]);
}
if (titleTagValue) {
finalTags.push(["title", titleTagValue]);
}
-
+
// Update the index event with the correct tags
indexEvent.tags = finalTags;
console.log("Index event:", indexEvent);
console.log("Section events:", sectionEvents);
-
+
// Publish all 30041 section events first, then the 30040 index event
events = [...sectionEvents, indexEvent];
console.log("Total events to publish:", events.length);
console.log("=== 30040 EVENT CREATION END ===");
} catch (error) {
console.error("Error in build30040EventSet:", error);
- return {
- success: false,
- error: `Failed to build 30040 event set: ${error instanceof Error ? error.message : "Unknown error"}`
+ return {
+ success: false,
+ error: `Failed to build 30040 event set: ${
+ error instanceof Error ? error.message : "Unknown error"
+ }`,
};
}
} else {
@@ -109,7 +129,7 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
if (eventData.kind === 30040 || eventData.kind === 30041) {
finalContent = removeMetadataFromContent(eventData.content);
}
-
+
// Prefix Nostr addresses before publishing
const prefixedContent = prefixNostrAddresses(finalContent);
@@ -150,7 +170,7 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
tags: event.tags.map((tag) => tag.map(String)),
content: String(event.content),
};
-
+
if (
typeof window !== "undefined" &&
window.nostr &&
@@ -178,12 +198,15 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
...get(activeOutboxRelays),
...get(activeInboxRelays),
];
-
+
console.log("publishEvent: Publishing to relays:", relays);
console.log("publishEvent: Anonymous relays:", anonymousRelays);
- console.log("publishEvent: Active outbox relays:", get(activeOutboxRelays));
+ console.log(
+ "publishEvent: Active outbox relays:",
+ get(activeOutboxRelays),
+ );
console.log("publishEvent: Active inbox relays:", get(activeInboxRelays));
-
+
let published = false;
for (const relayUrl of relays) {
@@ -234,18 +257,20 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
}
} catch (signError) {
console.error("Error signing/publishing event:", signError);
- return {
- success: false,
- error: `Failed to sign event: ${signError instanceof Error ? signError.message : "Unknown error"}`
+ return {
+ success: false,
+ error: `Failed to sign event: ${
+ signError instanceof Error ? signError.message : "Unknown error"
+ }`,
};
}
}
if (atLeastOne) {
- return {
- success: true,
+ return {
+ success: true,
eventId: lastEventId || undefined,
- relays: relaysPublished
+ relays: relaysPublished,
};
} else {
return { success: false, error: "Failed to publish to any relay." };
@@ -255,16 +280,22 @@ export async function publishEvent(ndk: any, eventData: EventData, tags: TagData
/**
* Loads an event by its hex ID
*/
-export async function loadEvent(ndk: any, eventId: string): Promise {
+export async function loadEvent(
+ ndk: any,
+ eventId: string,
+): Promise {
if (!ndk) {
throw new Error("NDK context not available");
}
-
+
console.log("loadEvent: Starting search for event ID:", eventId);
- console.log("loadEvent: NDK pool relays:", Array.from(ndk.pool.relays.values()).map((r: any) => r.url));
+ console.log(
+ "loadEvent: NDK pool relays:",
+ Array.from(ndk.pool.relays.values()).map((r: any) => r.url),
+ );
console.log("loadEvent: Active inbox relays:", get(activeInboxRelays));
console.log("loadEvent: Active outbox relays:", get(activeOutboxRelays));
-
+
const foundEvent = await fetchEventWithFallback(ndk, eventId, 10000);
if (foundEvent) {
@@ -279,7 +310,7 @@ export async function loadEvent(ndk: any, eventId: string): Promise ({
key: tag[0] || "",
- values: tag.slice(1)
+ values: tag.slice(1),
}));
return { eventData, tags };
diff --git a/src/lib/components/event_input/types.ts b/src/lib/components/event_input/types.ts
index df7e8f9..8a10429 100644
--- a/src/lib/components/event_input/types.ts
+++ b/src/lib/components/event_input/types.ts
@@ -32,7 +32,7 @@ export interface LoadEventResult {
}
export interface EventPreview {
- type: 'standard_event' | '30040_index_event' | 'error';
+ type: "standard_event" | "30040_index_event" | "error";
event?: {
id: string;
pubkey: string;
diff --git a/src/lib/components/event_input/validation.ts b/src/lib/components/event_input/validation.ts
index 7fb6609..e983800 100644
--- a/src/lib/components/event_input/validation.ts
+++ b/src/lib/components/event_input/validation.ts
@@ -6,22 +6,25 @@ import { get } from "svelte/store";
import { userStore } from "$lib/stores/userStore";
import type { EventData, TagData, ValidationResult } from "./types";
import {
- validateNotAsciidoc,
- validateAsciiDoc,
validate30040EventSet,
+ validateAsciiDoc,
+ validateNotAsciidoc,
} from "$lib/utils/event_input_utils";
/**
* Validates an event and its tags
*/
-export function validateEvent(eventData: EventData, tags: TagData[]): ValidationResult {
+export function validateEvent(
+ eventData: EventData,
+ tags: TagData[],
+): ValidationResult {
const userState = get(userStore);
-
+
const pubkey = userState.pubkey;
if (!pubkey) {
return { valid: false, reason: "Not logged in." };
}
-
+
// Content validation - 30040 events don't require content
if (eventData.kind !== 30040 && !eventData.content.trim()) {
return { valid: false, reason: "Content required." };
@@ -32,25 +35,27 @@ export function validateEvent(eventData: EventData, tags: TagData[]): Validation
const v = validateNotAsciidoc(eventData.content);
if (!v.valid) return v;
}
-
+
if (eventData.kind === 30040) {
// Check for required tags
- const versionTag = tags.find(t => t.key === "version");
- const dTag = tags.find(t => t.key === "d");
- const titleTag = tags.find(t => t.key === "title");
-
- if (!versionTag || !versionTag.values[0] || versionTag.values[0].trim() === "") {
+ const versionTag = tags.find((t) => t.key === "version");
+ const dTag = tags.find((t) => t.key === "d");
+ const titleTag = tags.find((t) => t.key === "title");
+
+ if (
+ !versionTag || !versionTag.values[0] || versionTag.values[0].trim() === ""
+ ) {
return { valid: false, reason: "30040 events require a 'version' tag." };
}
-
+
if (!dTag || !dTag.values[0] || dTag.values[0].trim() === "") {
return { valid: false, reason: "30040 events require a 'd' tag." };
}
-
+
if (!titleTag || !titleTag.values[0] || titleTag.values[0].trim() === "") {
return { valid: false, reason: "30040 events require a 'title' tag." };
}
-
+
// Validate content format if present
if (eventData.content.trim()) {
const v = validate30040EventSet(eventData.content);
@@ -58,7 +63,7 @@ export function validateEvent(eventData: EventData, tags: TagData[]): Validation
if (v.warning) return { valid: true, warning: v.warning };
}
}
-
+
if (eventData.kind === 30041 || eventData.kind === 30818) {
const v = validateAsciiDoc(eventData.content);
if (!v.valid) return v;
@@ -86,5 +91,5 @@ export function isValidTagKey(key: string): boolean {
* Validates that a tag has at least one value
*/
export function isValidTag(tag: TagData): boolean {
- return isValidTagKey(tag.key) && tag.values.some(v => v.trim().length > 0);
+ return isValidTagKey(tag.key) && tag.values.some((v) => v.trim().length > 0);
}
diff --git a/src/lib/data_structures/publication_tree.ts b/src/lib/data_structures/publication_tree.ts
index 8f8dd48..72125e7 100644
--- a/src/lib/data_structures/publication_tree.ts
+++ b/src/lib/data_structures/publication_tree.ts
@@ -253,7 +253,10 @@ export class PublicationTree implements AsyncIterable {
// Clear all nodes except the root to force fresh loading
const rootAddress = this.#root.address;
this.#nodes.clear();
- this.#nodes.set(rootAddress, new Lazy(() => Promise.resolve(this.#root)));
+ this.#nodes.set(
+ rootAddress,
+ new Lazy(() => Promise.resolve(this.#root)),
+ );
// Clear events cache to ensure fresh data
this.#events.clear();
this.#eventCache.clear();
@@ -496,18 +499,20 @@ export class PublicationTree implements AsyncIterable {
if (!this.#cursor.target) {
return { done, value: null };
}
-
+
const address = this.#cursor.target.address;
-
+
// AI-NOTE: Check if this node has already been visited
if (this.#visitedNodes.has(address)) {
- console.debug(`[PublicationTree] Skipping already visited node: ${address}`);
+ console.debug(
+ `[PublicationTree] Skipping already visited node: ${address}`,
+ );
return { done: false, value: null };
}
-
+
// Mark this node as visited
this.#visitedNodes.add(address);
-
+
const value = (await this.getEvent(address)) ?? null;
return { done, value };
}
@@ -762,8 +767,10 @@ export class PublicationTree implements AsyncIterable {
#addNode(address: string, parentNode: PublicationTreeNode) {
// AI-NOTE: Add debugging to track node addition
- console.debug(`[PublicationTree] Adding node ${address} to parent ${parentNode.address}`);
-
+ console.debug(
+ `[PublicationTree] Adding node ${address} to parent ${parentNode.address}`,
+ );
+
const lazyNode = new Lazy(() =>
this.#resolveNode(address, parentNode)
);
@@ -902,7 +909,11 @@ export class PublicationTree implements AsyncIterable {
this.#eventCache.set(address, fetchedEvent);
this.#events.set(address, fetchedEvent);
- return await this.#buildNodeFromEvent(fetchedEvent, address, parentNode);
+ return await this.#buildNodeFromEvent(
+ fetchedEvent,
+ address,
+ parentNode,
+ );
}
} catch (error) {
console.debug(
@@ -1017,7 +1028,9 @@ export class PublicationTree implements AsyncIterable {
// AI-NOTE: Remove e-tag processing from synchronous method
// E-tags should be resolved asynchronously in #resolveNode method
// Adding raw event IDs here causes duplicate processing
- console.debug(`[PublicationTree] Found ${eTags.length} e-tags but skipping processing in buildNodeFromEvent`);
+ console.debug(
+ `[PublicationTree] Found ${eTags.length} e-tags but skipping processing in buildNodeFromEvent`,
+ );
}
const node: PublicationTreeNode = {
@@ -1033,13 +1046,18 @@ export class PublicationTree implements AsyncIterable {
// Now directly adds child nodes to current node's children array
// Add children in the order they appear in the a-tags to preserve section order
// Use sequential processing to ensure order is maintained
- console.log(`[PublicationTree] Adding ${childAddresses.length} children in order:`, childAddresses);
+ console.log(
+ `[PublicationTree] Adding ${childAddresses.length} children in order:`,
+ childAddresses,
+ );
for (const childAddress of childAddresses) {
console.log(`[PublicationTree] Adding child: ${childAddress}`);
try {
// Add the child node directly to the current node's children
this.#addNode(childAddress, node);
- console.log(`[PublicationTree] Successfully added child: ${childAddress}`);
+ console.log(
+ `[PublicationTree] Successfully added child: ${childAddress}`,
+ );
} catch (error) {
console.warn(
`[PublicationTree] Error adding child ${childAddress} for ${node.address}:`,
@@ -1060,24 +1078,44 @@ export class PublicationTree implements AsyncIterable {
if (event.kind === 30040) {
// Check if this 30040 has any children (a-tags only, since e-tags are handled separately)
const hasChildren = event.tags.some((tag) => tag[0] === "a");
-
- console.debug(`[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${event.tags.find(t => t[0] === 'd')?.[1]} - hasChildren: ${hasChildren}, type: ${hasChildren ? 'Branch' : 'Leaf'}`);
-
- return hasChildren ? PublicationTreeNodeType.Branch : PublicationTreeNodeType.Leaf;
+
+ console.debug(
+ `[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${
+ event.tags.find((t) => t[0] === "d")?.[1]
+ } - hasChildren: ${hasChildren}, type: ${
+ hasChildren ? "Branch" : "Leaf"
+ }`,
+ );
+
+ return hasChildren
+ ? PublicationTreeNodeType.Branch
+ : PublicationTreeNodeType.Leaf;
}
// Zettel kinds are always leaves
if ([30041, 30818, 30023].includes(event.kind)) {
- console.debug(`[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${event.tags.find(t => t[0] === 'd')?.[1]} - Zettel kind, type: Leaf`);
+ console.debug(
+ `[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${
+ event.tags.find((t) => t[0] === "d")?.[1]
+ } - Zettel kind, type: Leaf`,
+ );
return PublicationTreeNodeType.Leaf;
}
// For other kinds, check if they have children (a-tags only)
const hasChildren = event.tags.some((tag) => tag[0] === "a");
-
- console.debug(`[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${event.tags.find(t => t[0] === 'd')?.[1]} - hasChildren: ${hasChildren}, type: ${hasChildren ? 'Branch' : 'Leaf'}`);
-
- return hasChildren ? PublicationTreeNodeType.Branch : PublicationTreeNodeType.Leaf;
+
+ console.debug(
+ `[PublicationTree] Node type for ${event.kind}:${event.pubkey}:${
+ event.tags.find((t) => t[0] === "d")?.[1]
+ } - hasChildren: ${hasChildren}, type: ${
+ hasChildren ? "Branch" : "Leaf"
+ }`,
+ );
+
+ return hasChildren
+ ? PublicationTreeNodeType.Branch
+ : PublicationTreeNodeType.Leaf;
}
// #endregion
diff --git a/src/lib/nostr/event.ts b/src/lib/nostr/event.ts
index 77ff7b4..be76c27 100644
--- a/src/lib/nostr/event.ts
+++ b/src/lib/nostr/event.ts
@@ -1 +1,9 @@
-export type NostrEvent = { id:string; kind:number; pubkey:string; created_at:number; tags:string[][]; content:string; }; export type AddressPointer = string;
+export type NostrEvent = {
+ id: string;
+ kind: number;
+ pubkey: string;
+ created_at: number;
+ tags: string[][];
+ content: string;
+};
+export type AddressPointer = string;
diff --git a/src/lib/nostr/format.ts b/src/lib/nostr/format.ts
index 6b25ac0..08e7bf3 100644
--- a/src/lib/nostr/format.ts
+++ b/src/lib/nostr/format.ts
@@ -1 +1,22 @@
-export function shortenBech32(id:string, keepPrefix=true, head=8, tail=6){ if(!id) return ''; const i=id.indexOf('1'); const prefix=i>0? id.slice(0,i):''; const data=i>0? id.slice(i+1): id; const short = data.length>head+tail ? `${'${'}data.slice(0,head)}…${'${'}data.slice(-tail)}` : data; return keepPrefix && prefix ? `${'${'}prefix}1${'${'}short}` : short; } export function displayNameFrom(npub:string, p?:{ name?:string; display_name?:string; nip05?:string }){ return (p?.display_name?.trim() || p?.name?.trim() || (p?.nip05 && p.nip05.split('@')[0]) || shortenBech32(npub,true)); }
+export function shortenBech32(
+ id: string,
+ keepPrefix = true,
+ head = 8,
+ tail = 6,
+) {
+ if (!id) return "";
+ const i = id.indexOf("1");
+ const prefix = i > 0 ? id.slice(0, i) : "";
+ const data = i > 0 ? id.slice(i + 1) : id;
+ const short = data.length > head + tail
+ ? `${"${"}data.slice(0,head)}…${"${"}data.slice(-tail)}`
+ : data;
+ return keepPrefix && prefix ? `${"${"}prefix}1${"${"}short}` : short;
+}
+export function displayNameFrom(
+ npub: string,
+ p?: { name?: string; display_name?: string; nip05?: string },
+) {
+ return (p?.display_name?.trim() || p?.name?.trim() ||
+ (p?.nip05 && p.nip05.split("@")[0]) || shortenBech32(npub, true));
+}
diff --git a/src/lib/nostr/nip05.ts b/src/lib/nostr/nip05.ts
index e6f6177..05938b3 100644
--- a/src/lib/nostr/nip05.ts
+++ b/src/lib/nostr/nip05.ts
@@ -1 +1,20 @@
-export async function verifyNip05(nip05:string, pubkeyHex:string):Promise{ try{ if(!nip05||!pubkeyHex) return false; const [name,domain]=nip05.toLowerCase().split('@'); if(!name||!domain) return false; const url=`https://${'${'}domain}/.well-known/nostr.json?name=${'${'}encodeURIComponent(name)}`; const res=await fetch(url,{ headers:{ Accept:'application/json' } }); if(!res.ok) return false; const json=await res.json(); const found=json?.names?.[name]; return typeof found==='string' && found.toLowerCase()===pubkeyHex.toLowerCase(); }catch{ return false; } }
+export async function verifyNip05(
+ nip05: string,
+ pubkeyHex: string,
+): Promise {
+ try {
+ if (!nip05 || !pubkeyHex) return false;
+ const [name, domain] = nip05.toLowerCase().split("@");
+ if (!name || !domain) return false;
+ const url =
+ `https://${"${"}domain}/.well-known/nostr.json?name=${"${"}encodeURIComponent(name)}`;
+ const res = await fetch(url, { headers: { Accept: "application/json" } });
+ if (!res.ok) return false;
+ const json = await res.json();
+ const found = json?.names?.[name];
+ return typeof found === "string" &&
+ found.toLowerCase() === pubkeyHex.toLowerCase();
+ } catch {
+ return false;
+ }
+}
diff --git a/src/lib/nostr/nip58.ts b/src/lib/nostr/nip58.ts
index fa0a9fa..feb3c18 100644
--- a/src/lib/nostr/nip58.ts
+++ b/src/lib/nostr/nip58.ts
@@ -1 +1,147 @@
-import type { NostrEvent, AddressPointer } from './event'; export type BadgeDefinition={ kind:30009; id:string; pubkey:string; d:string; a:AddressPointer; name?:string; description?:string; image?:{ url:string; size?:string }|null; thumbs:{ url:string; size?:string }[]; }; export type BadgeAward={ kind:8; id:string; pubkey:string; a:AddressPointer; recipients:{ pubkey:string; relay?:string }[]; }; export type ProfileBadges={ kind:30008; id:string; pubkey:string; pairs:{ a:AddressPointer; awardId:string; relay?:string }[]; }; export const isKind=(e:NostrEvent,k:number)=>e.kind===k; const val=(tags:string[][],name:string)=>tags.find(t=>t[0]===name)?.[1]; const vals=(tags:string[][],name:string)=>tags.filter(t=>t[0]===name).map(t=>t.slice(1)); export function parseBadgeDefinition(e:NostrEvent):BadgeDefinition|null{ if(e.kind!==30009) return null; const d=val(e.tags,'d'); if(!d) return null; const a:AddressPointer=`30009:${'${'}e.pubkey}:${'${'}d}`; const name=val(e.tags,'name')||undefined; const description=val(e.tags,'description')||undefined; const imageTag=vals(e.tags,'image')[0]; const image=imageTag? { url:imageTag[0], size:imageTag[1] }: null; const thumbs=vals(e.tags,'thumb').map(([url,size])=>({ url, size })); return { kind:30009, id:e.id, pubkey:e.pubkey, d, a, name, description, image, thumbs }; } export function parseBadgeAward(e:NostrEvent):BadgeAward|null{ if(e.kind!==8) return null; const atag=vals(e.tags,'a')[0]; if(!atag) return null; const a:AddressPointer=atag[0]; const recipients=vals(e.tags,'p').map(([pubkey,relay])=>({ pubkey, relay })); return { kind:8, id:e.id, pubkey:e.pubkey, a, recipients }; } export function parseProfileBadges(e:NostrEvent):ProfileBadges|null{ if(e.kind!==30008) return null; const d=val(e.tags,'d'); if(d!=='profile_badges') return null; const pairs: { a:AddressPointer; awardId:string; relay?:string }[]=[]; for(let i=0;i(t.size||'').startsWith(p+'x')); if(t) return t.url; } return def.image?.url || null; } export function buildDisplayBadgesForUser(userPubkey:string, defs:BadgeDefinition[], awards:BadgeAward[], profileBadges?:ProfileBadges|null, opts:{ issuerWhitelist?:Set; max?:number }={}):DisplayBadge[]{ const byA=new Map(defs.map(d=>[d.a,d])); const byAwardId=new Map(awards.map(a=>[a.id,a])); const isWhitelisted=(issuer:string)=>!opts.issuerWhitelist || opts.issuerWhitelist.has(issuer); let out:DisplayBadge[]=[]; if(profileBadges && profileBadges.pubkey===userPubkey){ for(const {a,awardId} of profileBadges.pairs){ const def=byA.get(a); if(!def) continue; const award=byAwardId.get(awardId)||null; if(award && (award.a!==a || !award.recipients.find(r=>r.pubkey===userPubkey))) continue; if(!isWhitelisted(def.pubkey)) continue; out.push({ def, award, issuer:def.pubkey, thumbUrl: pickThumb(def), title: def.name || def.d }); } } else { for(const aw of awards){ if(!aw.recipients.find(r=>r.pubkey===userPubkey)) continue; const def=byA.get(aw.a); if(!def) continue; if(!isWhitelisted(def.pubkey)) continue; out.push({ def, award:aw, issuer:def.pubkey, thumbUrl: pickThumb(def), title: def.name || def.d }); } } if(opts.max && out.length>opts.max) out=out.slice(0,opts.max); return out; }
+import type { AddressPointer, NostrEvent } from "./event";
+export type BadgeDefinition = {
+ kind: 30009;
+ id: string;
+ pubkey: string;
+ d: string;
+ a: AddressPointer;
+ name?: string;
+ description?: string;
+ image?: { url: string; size?: string } | null;
+ thumbs: { url: string; size?: string }[];
+};
+export type BadgeAward = {
+ kind: 8;
+ id: string;
+ pubkey: string;
+ a: AddressPointer;
+ recipients: { pubkey: string; relay?: string }[];
+};
+export type ProfileBadges = {
+ kind: 30008;
+ id: string;
+ pubkey: string;
+ pairs: { a: AddressPointer; awardId: string; relay?: string }[];
+};
+export const isKind = (e: NostrEvent, k: number) => e.kind === k;
+const val = (tags: string[][], name: string) =>
+ tags.find((t) => t[0] === name)?.[1];
+const vals = (tags: string[][], name: string) =>
+ tags.filter((t) => t[0] === name).map((t) => t.slice(1));
+export function parseBadgeDefinition(e: NostrEvent): BadgeDefinition | null {
+ if (e.kind !== 30009) return null;
+ const d = val(e.tags, "d");
+ if (!d) return null;
+ const a: AddressPointer = `30009:${"${"}e.pubkey}:${"${"}d}`;
+ const name = val(e.tags, "name") || undefined;
+ const description = val(e.tags, "description") || undefined;
+ const imageTag = vals(e.tags, "image")[0];
+ const image = imageTag ? { url: imageTag[0], size: imageTag[1] } : null;
+ const thumbs = vals(e.tags, "thumb").map(([url, size]) => ({ url, size }));
+ return {
+ kind: 30009,
+ id: e.id,
+ pubkey: e.pubkey,
+ d,
+ a,
+ name,
+ description,
+ image,
+ thumbs,
+ };
+}
+export function parseBadgeAward(e: NostrEvent): BadgeAward | null {
+ if (e.kind !== 8) return null;
+ const atag = vals(e.tags, "a")[0];
+ if (!atag) return null;
+ const a: AddressPointer = atag[0];
+ const recipients = vals(e.tags, "p").map(([pubkey, relay]) => ({
+ pubkey,
+ relay,
+ }));
+ return { kind: 8, id: e.id, pubkey: e.pubkey, a, recipients };
+}
+export function parseProfileBadges(e: NostrEvent): ProfileBadges | null {
+ if (e.kind !== 30008) return null;
+ const d = val(e.tags, "d");
+ if (d !== "profile_badges") return null;
+ const pairs: { a: AddressPointer; awardId: string; relay?: string }[] = [];
+ for (let i = 0; i < e.tags.length; i++) {
+ const t = e.tags[i];
+ if (t[0] === "a") {
+ const a = t[1];
+ const nxt = e.tags[i + 1];
+ if (nxt && nxt[0] === "e") {
+ pairs.push({ a, awardId: nxt[1], relay: nxt[2] });
+ i++;
+ }
+ }
+ }
+ return { kind: 30008, id: e.id, pubkey: e.pubkey, pairs };
+}
+export type DisplayBadge = {
+ def: BadgeDefinition;
+ award: BadgeAward | null;
+ issuer: string;
+ thumbUrl: string | null;
+ title: string;
+};
+export function pickThumb(
+ def: BadgeDefinition,
+ prefer: ("16" | "32" | "64" | "256" | "512")[] = ["32", "64", "256"],
+): string | null {
+ for (const p of prefer) {
+ const t = def.thumbs.find((t) => (t.size || "").startsWith(p + "x"));
+ if (t) return t.url;
+ }
+ return def.image?.url || null;
+}
+export function buildDisplayBadgesForUser(
+ userPubkey: string,
+ defs: BadgeDefinition[],
+ awards: BadgeAward[],
+ profileBadges?: ProfileBadges | null,
+ opts: { issuerWhitelist?: Set; max?: number } = {},
+): DisplayBadge[] {
+ const byA = new Map(defs.map((d) => [d.a, d]));
+ const byAwardId = new Map(awards.map((a) => [a.id, a]));
+ const isWhitelisted = (issuer: string) =>
+ !opts.issuerWhitelist || opts.issuerWhitelist.has(issuer);
+ let out: DisplayBadge[] = [];
+ if (profileBadges && profileBadges.pubkey === userPubkey) {
+ for (const { a, awardId } of profileBadges.pairs) {
+ const def = byA.get(a);
+ if (!def) {
+ continue;
+ }
+ const award = byAwardId.get(awardId) || null;
+ if (
+ award &&
+ (award.a !== a ||
+ !award.recipients.find((r) => r.pubkey === userPubkey))
+ ) continue;
+ if (!isWhitelisted(def.pubkey)) continue;
+ out.push({
+ def,
+ award,
+ issuer: def.pubkey,
+ thumbUrl: pickThumb(def),
+ title: def.name || def.d,
+ });
+ }
+ } else {for (const aw of awards) {
+ if (!aw.recipients.find((r) => r.pubkey === userPubkey)) continue;
+ const def = byA.get(aw.a);
+ if (!def) continue;
+ if (!isWhitelisted(def.pubkey)) continue;
+ out.push({
+ def,
+ award: aw,
+ issuer: def.pubkey,
+ thumbUrl: pickThumb(def),
+ title: def.name || def.d,
+ });
+ }}
+ if (opts.max && out.length > opts.max) out = out.slice(0, opts.max);
+ return out;
+}
diff --git a/src/lib/nostr/types.ts b/src/lib/nostr/types.ts
index 911f04b..151e46b 100644
--- a/src/lib/nostr/types.ts
+++ b/src/lib/nostr/types.ts
@@ -1 +1,9 @@
-export type NostrProfile = { name?:string; display_name?:string; picture?:string; about?:string; nip05?:string; lud16?:string; badges?: Array<{ label:string; color?:string }>; };
+export type NostrProfile = {
+ name?: string;
+ display_name?: string;
+ picture?: string;
+ about?: string;
+ nip05?: string;
+ lud16?: string;
+ badges?: Array<{ label: string; color?: string }>;
+};
diff --git a/src/lib/stores/techStore.ts b/src/lib/stores/techStore.ts
index f147164..09b9093 100644
--- a/src/lib/stores/techStore.ts
+++ b/src/lib/stores/techStore.ts
@@ -1,16 +1,16 @@
-import { writable } from 'svelte/store';
-const KEY = 'alexandria/showTech';
+import { writable } from "svelte/store";
+const KEY = "alexandria/showTech";
// Default false unless explicitly set to 'true' in localStorage
-const initial = typeof localStorage !== 'undefined'
- ? localStorage.getItem(KEY) === 'true'
+const initial = typeof localStorage !== "undefined"
+ ? localStorage.getItem(KEY) === "true"
: false;
export const showTech = writable(initial);
-showTech.subscribe(v => {
- if (typeof document !== 'undefined') {
- document.documentElement.dataset.tech = v ? 'on' : 'off';
+showTech.subscribe((v) => {
+ if (typeof document !== "undefined") {
+ document.documentElement.dataset.tech = v ? "on" : "off";
localStorage.setItem(KEY, String(v));
}
});
diff --git a/src/lib/stores/themeStore.ts b/src/lib/stores/themeStore.ts
index 64d18b6..742c31c 100644
--- a/src/lib/stores/themeStore.ts
+++ b/src/lib/stores/themeStore.ts
@@ -1,18 +1,18 @@
-import { writable } from 'svelte/store';
+import { writable } from "svelte/store";
-const KEY = 'alexandria/theme';
+const KEY = "alexandria/theme";
const initial =
- (typeof localStorage !== 'undefined' && localStorage.getItem(KEY)) ||
- 'light';
+ (typeof localStorage !== "undefined" && localStorage.getItem(KEY)) ||
+ "light";
export const theme = writable(initial);
-theme.subscribe(v => {
- if (typeof document !== 'undefined') {
+theme.subscribe((v) => {
+ if (typeof document !== "undefined") {
document.documentElement.dataset.theme = String(v);
localStorage.setItem(KEY, String(v));
}
});
-export const setTheme = (t: string) => theme.set(t);
\ No newline at end of file
+export const setTheme = (t: string) => theme.set(t);
diff --git a/src/lib/stores/userStore.ts b/src/lib/stores/userStore.ts
index 93bc4a8..9ad19be 100644
--- a/src/lib/stores/userStore.ts
+++ b/src/lib/stores/userStore.ts
@@ -16,7 +16,6 @@ import {
import { loginStorageKey } from "../consts.ts";
import { nip19 } from "nostr-tools";
-
export interface UserState {
pubkey: string | null;
npub: string | null;
@@ -248,7 +247,11 @@ export async function loginWithExtension(ndk: NDK) {
/**
* Login with Amber (NIP-46)
*/
-export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser, ndk: NDK) {
+export async function loginWithAmber(
+ amberSigner: NDKSigner,
+ user: NDKUser,
+ ndk: NDK,
+) {
if (!ndk) throw new Error("NDK not initialized");
// Only clear previous login state after successful login
const npub = user.npub;
diff --git a/src/lib/styles/cva.ts b/src/lib/styles/cva.ts
index 53fe0ca..6975b77 100644
--- a/src/lib/styles/cva.ts
+++ b/src/lib/styles/cva.ts
@@ -1,3 +1,3 @@
-import { cva, type VariantProps } from 'class-variance-authority';
-import { twMerge } from 'tailwind-merge';
+import { cva, type VariantProps } from "class-variance-authority";
+import { twMerge } from "tailwind-merge";
export { cva, twMerge, type VariantProps };
diff --git a/src/lib/utils/asciidoc_metadata.ts b/src/lib/utils/asciidoc_metadata.ts
index 810c965..0d9ccec 100644
--- a/src/lib/utils/asciidoc_metadata.ts
+++ b/src/lib/utils/asciidoc_metadata.ts
@@ -135,7 +135,7 @@ function mapAttributesToMetadata(
function extractDocumentAuthors(sourceContent: string): string[] {
const authors: string[] = [];
const lines = sourceContent.split(/\r?\n/);
-
+
// Find the document title line
let titleLineIndex = -1;
for (let i = 0; i < lines.length; i++) {
@@ -144,21 +144,21 @@ function extractDocumentAuthors(sourceContent: string): string[] {
break;
}
}
-
+
if (titleLineIndex === -1) {
return authors;
}
-
+
// Look for authors in the lines immediately following the title
let i = titleLineIndex + 1;
while (i < lines.length) {
const line = lines[i];
-
+
// Stop if we hit a blank line, section header, or content that's not an author
if (line.trim() === "" || line.match(/^==\s+/)) {
break;
}
-
+
if (line.includes("<") && !line.startsWith(":")) {
// This is an author line like "John Doe "
const authorName = line.split("<")[0].trim();
@@ -172,10 +172,10 @@ function extractDocumentAuthors(sourceContent: string): string[] {
// Not an author line, stop looking
break;
}
-
+
i++;
}
-
+
return authors;
}
@@ -185,7 +185,7 @@ function extractDocumentAuthors(sourceContent: string): string[] {
function extractSectionAuthors(sectionContent: string): string[] {
const authors: string[] = [];
const lines = sectionContent.split(/\r?\n/);
-
+
// Find the section title line
let titleLineIndex = -1;
for (let i = 0; i < lines.length; i++) {
@@ -194,21 +194,21 @@ function extractSectionAuthors(sectionContent: string): string[] {
break;
}
}
-
+
if (titleLineIndex === -1) {
return authors;
}
-
+
// Look for authors in the lines immediately following the section title
let i = titleLineIndex + 1;
while (i < lines.length) {
const line = lines[i];
-
+
// Stop if we hit a blank line, another section header, or content that's not an author
if (line.trim() === "" || line.match(/^==\s+/)) {
break;
}
-
+
if (line.includes("<") && !line.startsWith(":")) {
// This is an author line like "John Doe "
const authorName = line.split("<")[0].trim();
@@ -217,7 +217,7 @@ function extractSectionAuthors(sectionContent: string): string[] {
}
} else if (
line.match(/^[A-Za-z\s]+$/) &&
- line.trim() !== "" &&
+ line.trim() !== "" &&
line.trim().split(/\s+/).length <= 2 &&
!line.startsWith(":")
) {
@@ -230,10 +230,10 @@ function extractSectionAuthors(sectionContent: string): string[] {
// Not an author line, stop looking
break;
}
-
+
i++;
}
-
+
return authors;
}
@@ -243,23 +243,23 @@ function extractSectionAuthors(sectionContent: string): string[] {
function stripDocumentHeader(content: string): string {
const lines = content.split(/\r?\n/);
let contentStart = 0;
-
+
// Find where the document header ends
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Skip title line, author line, revision line, and attribute lines
if (
- !line.match(/^=\s+/) &&
+ !line.match(/^=\s+/) &&
!line.includes("<") &&
!line.match(/^.+,\s*.+:\s*.+$/) &&
- !line.match(/^:[^:]+:\s*.+$/) &&
+ !line.match(/^:[^:]+:\s*.+$/) &&
line.trim() !== ""
) {
contentStart = i;
break;
}
}
-
+
// Filter out all attribute lines and author lines from the content
const contentLines = lines.slice(contentStart);
const filteredLines = contentLines.filter((line) => {
@@ -269,7 +269,7 @@ function stripDocumentHeader(content: string): string {
}
return true;
});
-
+
// Remove extra blank lines and normalize newlines
return filteredLines.join("\n").replace(/\n\s*\n\s*\n/g, "\n\n").replace(
/\n\s*\n/g,
@@ -283,23 +283,24 @@ function stripDocumentHeader(content: string): string {
function stripSectionHeader(sectionContent: string): string {
const lines = sectionContent.split(/\r?\n/);
let contentStart = 0;
-
+
// Find where the section header ends
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Skip section title line, author line, and attribute lines
if (
- !line.match(/^==\s+/) &&
+ !line.match(/^==\s+/) &&
!line.includes("<") &&
- !line.match(/^:[^:]+:\s*.+$/) &&
+ !line.match(/^:[^:]+:\s*.+$/) &&
line.trim() !== "" &&
- !(line.match(/^[A-Za-z\s]+$/) && line.trim() !== "" && line.trim().split(/\s+/).length <= 2)
+ !(line.match(/^[A-Za-z\s]+$/) && line.trim() !== "" &&
+ line.trim().split(/\s+/).length <= 2)
) {
contentStart = i;
break;
}
}
-
+
// Filter out all attribute lines, author lines, and section headers from the content
const contentLines = lines.slice(contentStart);
const filteredLines = contentLines.filter((line) => {
@@ -309,7 +310,7 @@ function stripSectionHeader(sectionContent: string): string {
}
// Skip author lines (simple names without email)
if (
- line.match(/^[A-Za-z\s]+$/) &&
+ line.match(/^[A-Za-z\s]+$/) &&
line.trim() !== "" &&
line.trim().split(/\s+/).length <= 2
) {
@@ -321,7 +322,7 @@ function stripSectionHeader(sectionContent: string): string {
}
return true;
});
-
+
// Remove extra blank lines and normalize newlines
return filteredLines.join("\n").replace(/\n\s*\n\s*\n/g, "\n\n").replace(
/\n\s*\n/g,
@@ -387,7 +388,7 @@ export function extractDocumentMetadata(inputContent: string): {
inDocumentHeader = false;
break;
}
-
+
// Process :author: attributes regardless of other content
if (inDocumentHeader) {
const match = line.match(/^:author:\s*(.+)$/);
@@ -464,7 +465,7 @@ export function extractSectionMetadata(inputSectionContent: string): {
// Extract authors from section content
const authors = extractSectionAuthors(inputSectionContent);
-
+
// Get authors from attributes (including multiple :author: lines)
const lines = inputSectionContent.split(/\r?\n/);
for (const line of lines) {
@@ -476,7 +477,7 @@ export function extractSectionMetadata(inputSectionContent: string): {
}
}
}
-
+
if (authors.length > 0) {
metadata.authors = authors;
}
diff --git a/src/lib/utils/cache_manager.ts b/src/lib/utils/cache_manager.ts
index 813e149..bce2a6b 100644
--- a/src/lib/utils/cache_manager.ts
+++ b/src/lib/utils/cache_manager.ts
@@ -1,11 +1,11 @@
-import { unifiedProfileCache } from './npubCache';
-import { searchCache } from './searchCache';
-import { indexEventCache } from './indexEventCache';
-import { clearRelaySetCache } from '../ndk';
+import { unifiedProfileCache } from "./npubCache";
+import { searchCache } from "./searchCache";
+import { indexEventCache } from "./indexEventCache";
+import { clearRelaySetCache } from "../ndk";
/**
* Clears all application caches
- *
+ *
* Clears:
* - unifiedProfileCache (profile metadata)
* - searchCache (search results)
@@ -13,18 +13,18 @@ import { clearRelaySetCache } from '../ndk';
* - relaySetCache (relay configuration)
*/
export function clearAllCaches(): void {
- console.log('[CacheManager] Clearing all application caches...');
-
+ console.log("[CacheManager] Clearing all application caches...");
+
// Clear in-memory caches
unifiedProfileCache.clear();
searchCache.clear();
indexEventCache.clear();
clearRelaySetCache();
-
+
// Clear localStorage caches
clearLocalStorageCaches();
-
- console.log('[CacheManager] All caches cleared successfully');
+
+ console.log("[CacheManager] All caches cleared successfully");
}
/**
@@ -32,41 +32,43 @@ export function clearAllCaches(): void {
* This is useful when profile pictures or metadata are stale
*/
export function clearProfileCaches(): void {
- console.log('[CacheManager] Clearing profile-specific caches...');
-
+ console.log("[CacheManager] Clearing profile-specific caches...");
+
// Clear unified profile cache
unifiedProfileCache.clear();
-
+
// Clear profile-related search results
// Note: searchCache doesn't have a way to clear specific types, so we clear all
// This is acceptable since profile searches are the most common
searchCache.clear();
-
- console.log('[CacheManager] Profile caches cleared successfully');
+
+ console.log("[CacheManager] Profile caches cleared successfully");
}
/**
* Clears localStorage caches
*/
function clearLocalStorageCaches(): void {
- if (typeof window === 'undefined') return;
-
+ if (typeof window === "undefined") return;
+
const keysToRemove: string[] = [];
-
+
// Find all localStorage keys that start with 'alexandria'
for (let i = 0; i < localStorage.length; i++) {
const key = localStorage.key(i);
- if (key && key.startsWith('alexandria')) {
+ if (key && key.startsWith("alexandria")) {
keysToRemove.push(key);
}
}
-
+
// Remove the keys
- keysToRemove.forEach(key => {
+ keysToRemove.forEach((key) => {
localStorage.removeItem(key);
});
-
- console.log(`[CacheManager] Cleared ${keysToRemove.length} localStorage items`);
+
+ console.log(
+ `[CacheManager] Cleared ${keysToRemove.length} localStorage items`,
+ );
}
/**
diff --git a/src/lib/utils/event_input_utils.ts b/src/lib/utils/event_input_utils.ts
index cdeb501..d0dc1fa 100644
--- a/src/lib/utils/event_input_utils.ts
+++ b/src/lib/utils/event_input_utils.ts
@@ -204,7 +204,6 @@ function extractMarkdownTopHeader(content: string): string | null {
// Event Construction
// =========================
-
/**
* Builds a set of events for a 30040 publication: one 30040 index event and one 30041 event per section.
* Each 30041 gets a d-tag (normalized section header) and a title tag (raw section header).
@@ -261,31 +260,33 @@ export function build30040EventSet(
console.log("Index event:", { documentTitle, indexDTag });
// Create section events with their metadata
- const sectionEvents: NDKEvent[] = parsed.sections.map((section: any, i: number) => {
- const sectionDTag = `${indexDTag}-${normalizeDTagValue(section.title)}`;
- console.log(`Creating section ${i}:`, {
- title: section.title,
- dTag: sectionDTag,
- content: section.content,
- metadata: section.metadata,
- });
-
- // Convert section metadata to tags
- const sectionMetadataTags = metadataToTags(section.metadata);
-
- return new NDKEventClass(ndk, {
- kind: 30041,
- content: section.content,
- tags: [
- ...tags,
- ...sectionMetadataTags,
- ["d", sectionDTag],
- ["title", section.title],
- ],
- pubkey: baseEvent.pubkey,
- created_at: baseEvent.created_at,
- });
- });
+ const sectionEvents: NDKEvent[] = parsed.sections.map(
+ (section: any, i: number) => {
+ const sectionDTag = `${indexDTag}-${normalizeDTagValue(section.title)}`;
+ console.log(`Creating section ${i}:`, {
+ title: section.title,
+ dTag: sectionDTag,
+ content: section.content,
+ metadata: section.metadata,
+ });
+
+ // Convert section metadata to tags
+ const sectionMetadataTags = metadataToTags(section.metadata);
+
+ return new NDKEventClass(ndk, {
+ kind: 30041,
+ content: section.content,
+ tags: [
+ ...tags,
+ ...sectionMetadataTags,
+ ["d", sectionDTag],
+ ["title", section.title],
+ ],
+ pubkey: baseEvent.pubkey,
+ created_at: baseEvent.created_at,
+ });
+ },
+ );
// Create proper a tags with format: kind:pubkey:d-tag
const aTags = sectionEvents.map((event) => {
diff --git a/src/lib/utils/event_search.ts b/src/lib/utils/event_search.ts
index 4ce3cd6..aed0471 100644
--- a/src/lib/utils/event_search.ts
+++ b/src/lib/utils/event_search.ts
@@ -10,7 +10,10 @@ import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
/**
* Search for a single event by ID or filter
*/
-export async function searchEvent(query: string, ndk: NDK): Promise {
+export async function searchEvent(
+ query: string,
+ ndk: NDK,
+): Promise {
if (!ndk) {
console.warn("[Search] No NDK instance available");
return null;
diff --git a/src/lib/utils/image_utils.ts b/src/lib/utils/image_utils.ts
index 054df41..e8b1797 100644
--- a/src/lib/utils/image_utils.ts
+++ b/src/lib/utils/image_utils.ts
@@ -21,4 +21,4 @@ export function generateDarkPastelColor(seed: string): string {
return `#${r.toString(16).padStart(2, "0")}${
g.toString(16).padStart(2, "0")
}${b.toString(16).padStart(2, "0")}`;
-}
\ No newline at end of file
+}
diff --git a/src/lib/utils/markup/advancedMarkupParser.ts b/src/lib/utils/markup/advancedMarkupParser.ts
index 480539b..fe519f5 100644
--- a/src/lib/utils/markup/advancedMarkupParser.ts
+++ b/src/lib/utils/markup/advancedMarkupParser.ts
@@ -399,31 +399,37 @@ function processInlineCodeMath(content: string): string {
// Check if the code content contains math expressions
const hasInlineMath = /\$((?:[^$\\]|\\.)*?)\$/.test(codeContent);
const hasDisplayMath = /\$\$[\s\S]*?\$\$/.test(codeContent);
-
+
if (!hasInlineMath && !hasDisplayMath) {
// No math found, return the original inline code
return match;
}
-
- // Process display math ($$...$$) first to avoid conflicts with inline math
- let processedContent = codeContent.replace(/\$\$([\s\S]*?)\$\$/g, (mathMatch: string, mathContent: string) => {
- // Skip empty math expressions
- if (!mathContent.trim()) {
- return mathMatch;
- }
- return `\\[${mathContent}\\]`;
- });
-
- // Process inline math ($...$) after display math
- // Use a more sophisticated regex that handles escaped dollar signs
- processedContent = processedContent.replace(/\$((?:[^$\\]|\\.)*?)\$/g, (mathMatch: string, mathContent: string) => {
- // Skip empty math expressions
- if (!mathContent.trim()) {
- return mathMatch;
- }
- return `\\(${mathContent}\\)`;
- });
-
+
+ // Process display math ($$...$$) first to avoid conflicts with inline math
+ let processedContent = codeContent.replace(
+ /\$\$([\s\S]*?)\$\$/g,
+ (mathMatch: string, mathContent: string) => {
+ // Skip empty math expressions
+ if (!mathContent.trim()) {
+ return mathMatch;
+ }
+ return `\\[${mathContent}\\]`;
+ },
+ );
+
+ // Process inline math ($...$) after display math
+ // Use a more sophisticated regex that handles escaped dollar signs
+ processedContent = processedContent.replace(
+ /\$((?:[^$\\]|\\.)*?)\$/g,
+ (mathMatch: string, mathContent: string) => {
+ // Skip empty math expressions
+ if (!mathContent.trim()) {
+ return mathMatch;
+ }
+ return `\\(${mathContent}\\)`;
+ },
+ );
+
return `\`${processedContent}\``;
});
}
diff --git a/src/lib/utils/markup/basicMarkupParser.ts b/src/lib/utils/markup/basicMarkupParser.ts
index be96f2f..472693d 100644
--- a/src/lib/utils/markup/basicMarkupParser.ts
+++ b/src/lib/utils/markup/basicMarkupParser.ts
@@ -7,8 +7,6 @@ import {
processWikilinks,
} from "./markupUtils.ts";
-
-
export function preProcessBasicMarkup(text: string): string {
try {
// Process basic text formatting first
@@ -26,7 +24,10 @@ export function preProcessBasicMarkup(text: string): string {
}
}
-export async function postProcessBasicMarkup(text: string, ndk?: NDK): Promise {
+export async function postProcessBasicMarkup(
+ text: string,
+ ndk?: NDK,
+): Promise {
try {
// Process Nostr identifiers last
let processedText = await processNostrIdentifiersInText(text, ndk);
@@ -40,7 +41,10 @@ export async function postProcessBasicMarkup(text: string, ndk?: NDK): Promise {
+export async function parseBasicMarkup(
+ text: string,
+ ndk?: NDK,
+): Promise {
if (!text) return "";
try {
diff --git a/src/lib/utils/markup/embeddedMarkupParser.ts b/src/lib/utils/markup/embeddedMarkupParser.ts
index 581df49..b8191aa 100644
--- a/src/lib/utils/markup/embeddedMarkupParser.ts
+++ b/src/lib/utils/markup/embeddedMarkupParser.ts
@@ -1,4 +1,7 @@
-import { postProcessBasicMarkup, preProcessBasicMarkup } from "./basicMarkupParser.ts";
+import {
+ postProcessBasicMarkup,
+ preProcessBasicMarkup,
+} from "./basicMarkupParser.ts";
import { processNostrIdentifiersWithEmbeddedEvents } from "./markupUtils.ts";
/**
diff --git a/src/lib/utils/markup/markupUtils.ts b/src/lib/utils/markup/markupUtils.ts
index 9417386..de38c23 100644
--- a/src/lib/utils/markup/markupUtils.ts
+++ b/src/lib/utils/markup/markupUtils.ts
@@ -80,7 +80,10 @@ export function replaceAlexandriaNostrLinks(text: string): string {
return text;
}
-export function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
+export function renderListGroup(
+ lines: string[],
+ typeHint?: "ol" | "ul",
+): string {
function parseList(
start: number,
indent: number,
@@ -322,7 +325,9 @@ export async function processNostrIdentifiersInText(
metadata = await getUserMetadata(identifier, ndk);
} else {
// Fallback when NDK is not available - just use the identifier
- metadata = { name: identifier.slice(0, 8) + "..." + identifier.slice(-4) };
+ metadata = {
+ name: identifier.slice(0, 8) + "..." + identifier.slice(-4),
+ };
}
const displayText = metadata.displayName || metadata.name;
const link = createProfileLink(identifier, displayText);
@@ -391,15 +396,19 @@ export function processAllNostrIdentifiers(text: string): string {
// Pattern for prefixed nostr identifiers (nostr:npub1, nostr:note1, etc.)
// This handles both full identifiers and partial ones that might appear in content
- const prefixedNostrPattern = /nostr:(npub|nprofile|note|nevent|naddr)[a-zA-Z0-9]{20,}/g;
-
+ const prefixedNostrPattern =
+ /nostr:(npub|nprofile|note|nevent|naddr)[a-zA-Z0-9]{20,}/g;
+
// Pattern for bare nostr identifiers (npub1, note1, nevent1, naddr1)
// Exclude matches that are part of URLs to avoid breaking existing links
- const bareNostrPattern = /(?= 0; i--) {
const match = prefixedMatches[i];
@@ -407,11 +416,12 @@ export function processAllNostrIdentifiers(text: string): string {
const matchIndex = match.index ?? 0;
// Create shortened display text
- const identifier = fullMatch.replace('nostr:', '');
+ const identifier = fullMatch.replace("nostr:", "");
const displayText = `${identifier.slice(0, 8)}...${identifier.slice(-4)}`;
-
+
// Create clickable link
- const replacement = `${displayText}`;
+ const replacement =
+ `${displayText}`;
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement +
@@ -420,7 +430,7 @@ export function processAllNostrIdentifiers(text: string): string {
// Process bare nostr identifiers
const bareMatches = Array.from(processedText.matchAll(bareNostrPattern));
-
+
// Process them in reverse order to avoid index shifting issues
for (let i = bareMatches.length - 1; i >= 0; i--) {
const match = bareMatches[i];
@@ -429,9 +439,10 @@ export function processAllNostrIdentifiers(text: string): string {
// Create shortened display text
const displayText = `${fullMatch.slice(0, 8)}...${fullMatch.slice(-4)}`;
-
+
// Create clickable link with nostr: prefix for the href
- const replacement = `${displayText}`;
+ const replacement =
+ `${displayText}`;
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement +
@@ -439,9 +450,12 @@ export function processAllNostrIdentifiers(text: string): string {
}
// Also handle any remaining truncated prefixed identifiers that might be cut off or incomplete
- const truncatedPrefixedPattern = /nostr:(npub|nprofile|note|nevent|naddr)[a-zA-Z0-9]{8,}/g;
- const truncatedPrefixedMatches = Array.from(processedText.matchAll(truncatedPrefixedPattern));
-
+ const truncatedPrefixedPattern =
+ /nostr:(npub|nprofile|note|nevent|naddr)[a-zA-Z0-9]{8,}/g;
+ const truncatedPrefixedMatches = Array.from(
+ processedText.matchAll(truncatedPrefixedPattern),
+ );
+
for (let i = truncatedPrefixedMatches.length - 1; i >= 0; i--) {
const match = truncatedPrefixedMatches[i];
const [fullMatch] = match;
@@ -451,11 +465,14 @@ export function processAllNostrIdentifiers(text: string): string {
if (fullMatch.length >= 30) continue; // Full identifiers are at least 30 chars
// Create display text for truncated identifiers
- const identifier = fullMatch.replace('nostr:', '');
- const displayText = identifier.length > 12 ? `${identifier.slice(0, 8)}...${identifier.slice(-4)}` : identifier;
-
+ const identifier = fullMatch.replace("nostr:", "");
+ const displayText = identifier.length > 12
+ ? `${identifier.slice(0, 8)}...${identifier.slice(-4)}`
+ : identifier;
+
// Create clickable link
- const replacement = `${displayText}`;
+ const replacement =
+ `${displayText}`;
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement +
@@ -463,9 +480,12 @@ export function processAllNostrIdentifiers(text: string): string {
}
// Handle truncated bare identifiers
- const truncatedBarePattern = /(?= 0; i--) {
const match = truncatedBareMatches[i];
const [fullMatch] = match;
@@ -475,10 +495,13 @@ export function processAllNostrIdentifiers(text: string): string {
if (fullMatch.length >= 30) continue; // Full identifiers are at least 30 chars
// Create display text for truncated identifiers
- const displayText = fullMatch.length > 12 ? `${fullMatch.slice(0, 8)}...${fullMatch.slice(-4)}` : fullMatch;
-
+ const displayText = fullMatch.length > 12
+ ? `${fullMatch.slice(0, 8)}...${fullMatch.slice(-4)}`
+ : fullMatch;
+
// Create clickable link
- const replacement = `${displayText}`;
+ const replacement =
+ `${displayText}`;
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement +
diff --git a/src/lib/utils/nostrUtils.ts b/src/lib/utils/nostrUtils.ts
index 256fd56..b97d567 100644
--- a/src/lib/utils/nostrUtils.ts
+++ b/src/lib/utils/nostrUtils.ts
@@ -7,9 +7,9 @@ import type { Filter } from "./search_types.ts";
import {
anonymousRelays,
communityRelays,
+ localRelays,
searchRelays,
secondaryRelays,
- localRelays,
} from "../consts.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
import { NDKRelaySet as NDKRelaySetFromNDK } from "@nostr-dev-kit/ndk";
@@ -205,22 +205,22 @@ export async function processNostrIdentifiers(
if (/https?:\/\/$|www\.$/i.test(before)) {
return true;
}
-
+
// Check if the match is part of a larger URL structure
// Look for common URL patterns that might contain nostr identifiers
const beforeContext = text.slice(Math.max(0, index - 50), index);
const afterContext = text.slice(index, Math.min(text.length, index + 50));
-
+
// Check if there's a URL-like structure around the match
const urlPatterns = [
- /https?:\/\/[^\s]*$/i, // URL starting with http(s)://
- /www\.[^\s]*$/i, // URL starting with www.
- /[^\s]*\.(com|org|net|io|eu|co|me|app|dev)[^\s]*$/i, // Common TLDs
- /[^\s]*\/[^\s]*$/i, // Path-like structures
+ /https?:\/\/[^\s]*$/i, // URL starting with http(s)://
+ /www\.[^\s]*$/i, // URL starting with www.
+ /[^\s]*\.(com|org|net|io|eu|co|me|app|dev)[^\s]*$/i, // Common TLDs
+ /[^\s]*\/[^\s]*$/i, // Path-like structures
];
-
+
const combinedContext = beforeContext + afterContext;
- return urlPatterns.some(pattern => pattern.test(combinedContext));
+ return urlPatterns.some((pattern) => pattern.test(combinedContext));
}
// Process profiles (npub and nprofile)
@@ -440,8 +440,8 @@ export async function fetchEventWithFallback(
// AI-NOTE: Include ALL available relays for comprehensive event discovery
// This ensures we don't miss events that might be on any available relay
allRelays = [
- ...secondaryRelays,
- ...searchRelays,
+ ...secondaryRelays,
+ ...searchRelays,
...anonymousRelays,
...inboxRelays, // Include user's inbox relays
...outboxRelays, // Include user's outbox relays
@@ -528,7 +528,7 @@ export async function fetchEventWithFallback(
* Converts various Nostr identifiers to npub format.
* Handles hex pubkeys, npub strings, and nprofile strings.
*/
- export function toNpub(pubkey: string | undefined): string | null {
+export function toNpub(pubkey: string | undefined): string | null {
if (!pubkey) return null;
try {
// If it's already an npub, return it
diff --git a/src/lib/utils/npubCache.ts b/src/lib/utils/npubCache.ts
index 4fcadfa..0b63a8c 100644
--- a/src/lib/utils/npubCache.ts
+++ b/src/lib/utils/npubCache.ts
@@ -59,14 +59,18 @@ class UnifiedProfileCache {
/**
* Get profile data, fetching fresh data if needed
*/
- async getProfile(identifier: string, ndk?: NDK, force = false): Promise {
+ async getProfile(
+ identifier: string,
+ ndk?: NDK,
+ force = false,
+ ): Promise {
const cleanId = identifier.replace(/^nostr:/, "");
-
+
// Check cache first (unless forced)
if (!force && this.cache.has(cleanId)) {
const entry = this.cache.get(cleanId)!;
const now = Date.now();
-
+
// Return cached data if not expired
if ((now - entry.timestamp) < this.maxAge) {
console.log("UnifiedProfileCache: Returning cached profile:", cleanId);
@@ -81,8 +85,13 @@ class UnifiedProfileCache {
/**
* Fetch profile from all available relays and cache it
*/
- private async fetchAndCacheProfile(identifier: string, ndk?: NDK): Promise {
- const fallback = { name: `${identifier.slice(0, 8)}...${identifier.slice(-4)}` };
+ private async fetchAndCacheProfile(
+ identifier: string,
+ ndk?: NDK,
+ ): Promise {
+ const fallback = {
+ name: `${identifier.slice(0, 8)}...${identifier.slice(-4)}`,
+ };
try {
if (!ndk) {
@@ -92,7 +101,10 @@ class UnifiedProfileCache {
const decoded = nip19.decode(identifier);
if (!decoded) {
- console.warn("UnifiedProfileCache: Failed to decode identifier:", identifier);
+ console.warn(
+ "UnifiedProfileCache: Failed to decode identifier:",
+ identifier,
+ );
return fallback;
}
@@ -103,11 +115,17 @@ class UnifiedProfileCache {
} else if (decoded.type === "nprofile") {
pubkey = decoded.data.pubkey;
} else {
- console.warn("UnifiedProfileCache: Unsupported identifier type:", decoded.type);
+ console.warn(
+ "UnifiedProfileCache: Unsupported identifier type:",
+ decoded.type,
+ );
return fallback;
}
- console.log("UnifiedProfileCache: Fetching fresh profile for pubkey:", pubkey);
+ console.log(
+ "UnifiedProfileCache: Fetching fresh profile for pubkey:",
+ pubkey,
+ );
// Use fetchEventWithFallback to search ALL available relays
const profileEvent = await fetchEventWithFallback(ndk, {
@@ -116,7 +134,10 @@ class UnifiedProfileCache {
});
if (!profileEvent || !profileEvent.content) {
- console.warn("UnifiedProfileCache: No profile event found for:", pubkey);
+ console.warn(
+ "UnifiedProfileCache: No profile event found for:",
+ pubkey,
+ );
return fallback;
}
@@ -147,7 +168,6 @@ class UnifiedProfileCache {
console.log("UnifiedProfileCache: Cached fresh profile:", metadata);
return metadata;
-
} catch (e) {
console.error("UnifiedProfileCache: Error fetching profile:", e);
return fallback;
@@ -160,7 +180,7 @@ class UnifiedProfileCache {
getCached(identifier: string): NpubMetadata | undefined {
const cleanId = identifier.replace(/^nostr:/, "");
const entry = this.cache.get(cleanId);
-
+
if (entry) {
const now = Date.now();
if ((now - entry.timestamp) < this.maxAge) {
@@ -170,14 +190,19 @@ class UnifiedProfileCache {
this.cache.delete(cleanId);
}
}
-
+
return undefined;
}
/**
* Set profile data in cache
*/
- set(identifier: string, profile: NpubMetadata, pubkey?: string, relaySource?: string): void {
+ set(
+ identifier: string,
+ profile: NpubMetadata,
+ pubkey?: string,
+ relaySource?: string,
+ ): void {
const cleanId = identifier.replace(/^nostr:/, "");
const entry: CacheEntry = {
profile,
@@ -199,7 +224,7 @@ class UnifiedProfileCache {
has(identifier: string): boolean {
const cleanId = identifier.replace(/^nostr:/, "");
const entry = this.cache.get(cleanId);
-
+
if (entry) {
const now = Date.now();
if ((now - entry.timestamp) < this.maxAge) {
@@ -209,7 +234,7 @@ class UnifiedProfileCache {
this.cache.delete(cleanId);
}
}
-
+
return false;
}
@@ -219,7 +244,7 @@ class UnifiedProfileCache {
delete(identifier: string): boolean {
const cleanId = identifier.replace(/^nostr:/, "");
const entry = this.cache.get(cleanId);
-
+
if (entry) {
this.cache.delete(cleanId);
if (entry.pubkey && entry.pubkey !== cleanId) {
@@ -228,7 +253,7 @@ class UnifiedProfileCache {
this.saveToStorage();
return true;
}
-
+
return false;
}
@@ -264,18 +289,20 @@ class UnifiedProfileCache {
cleanup(): void {
const now = Date.now();
const expiredKeys: string[] = [];
-
+
for (const [key, entry] of this.cache.entries()) {
if ((now - entry.timestamp) >= this.maxAge) {
expiredKeys.push(key);
}
}
-
- expiredKeys.forEach(key => this.cache.delete(key));
-
+
+ expiredKeys.forEach((key) => this.cache.delete(key));
+
if (expiredKeys.length > 0) {
this.saveToStorage();
- console.log(`UnifiedProfileCache: Cleaned up ${expiredKeys.length} expired entries`);
+ console.log(
+ `UnifiedProfileCache: Cleaned up ${expiredKeys.length} expired entries`,
+ );
}
}
}
@@ -294,7 +321,8 @@ if (typeof window !== "undefined") {
// but make it use the unified cache internally
export const npubCache = {
get: (key: string) => unifiedProfileCache.getCached(key),
- set: (key: string, value: NpubMetadata) => unifiedProfileCache.set(key, value),
+ set: (key: string, value: NpubMetadata) =>
+ unifiedProfileCache.set(key, value),
has: (key: string) => unifiedProfileCache.has(key),
delete: (key: string) => unifiedProfileCache.delete(key),
clear: () => unifiedProfileCache.clear(),
@@ -303,14 +331,19 @@ export const npubCache = {
};
// Legacy compatibility for old profileCache functions
-export async function getDisplayName(pubkey: string, ndk: NDK): Promise {
+export async function getDisplayName(
+ pubkey: string,
+ ndk: NDK,
+): Promise {
const profile = await unifiedProfileCache.getProfile(pubkey, ndk);
- return profile.displayName || profile.name || `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`;
+ return profile.displayName || profile.name ||
+ `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`;
}
export function getDisplayNameSync(pubkey: string): string {
const profile = unifiedProfileCache.getCached(pubkey);
- return profile?.displayName || profile?.name || `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`;
+ return profile?.displayName || profile?.name ||
+ `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`;
}
export async function batchFetchProfiles(
@@ -319,15 +352,15 @@ export async function batchFetchProfiles(
onProgress?: (fetched: number, total: number) => void,
): Promise {
const allProfileEvents: NDKEvent[] = [];
-
+
if (onProgress) onProgress(0, pubkeys.length);
-
+
// Fetch profiles in parallel using the unified cache
const fetchPromises = pubkeys.map(async (pubkey, index) => {
try {
const profile = await unifiedProfileCache.getProfile(pubkey, ndk);
if (onProgress) onProgress(index + 1, pubkeys.length);
-
+
// Create a mock NDKEvent for compatibility
const event = new NDKEvent(ndk);
event.content = JSON.stringify(profile);
@@ -338,14 +371,14 @@ export async function batchFetchProfiles(
return null;
}
});
-
+
const results = await Promise.allSettled(fetchPromises);
- results.forEach(result => {
- if (result.status === 'fulfilled' && result.value) {
+ results.forEach((result) => {
+ if (result.status === "fulfilled" && result.value) {
allProfileEvents.push(result.value);
}
});
-
+
return allProfileEvents;
}
diff --git a/src/lib/utils/profile_search.ts b/src/lib/utils/profile_search.ts
index 09067ce..d475f50 100644
--- a/src/lib/utils/profile_search.ts
+++ b/src/lib/utils/profile_search.ts
@@ -1,8 +1,17 @@
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
-import { getNpubFromNip05, getUserMetadata, fetchEventWithFallback } from "./nostrUtils.ts";
+import {
+ fetchEventWithFallback,
+ getNpubFromNip05,
+ getUserMetadata,
+} from "./nostrUtils.ts";
import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { searchCache } from "./searchCache.ts";
-import { communityRelays, searchRelays, secondaryRelays, anonymousRelays } from "../consts.ts";
+import {
+ anonymousRelays,
+ communityRelays,
+ searchRelays,
+ secondaryRelays,
+} from "../consts.ts";
import { get } from "svelte/store";
import type { NostrProfile, ProfileSearchResult } from "./search_types.ts";
import {
@@ -78,7 +87,7 @@ export async function searchProfiles(
const npub = await getNpubFromNip05(normalizedNip05);
if (npub) {
const metadata = await getUserMetadata(npub, ndk);
-
+
// AI-NOTE: Fetch the original event timestamp to preserve created_at
let created_at: number | undefined = undefined;
try {
@@ -94,9 +103,12 @@ export async function searchProfiles(
}
}
} catch (e) {
- console.warn("profile_search: Failed to fetch original event timestamp:", e);
+ console.warn(
+ "profile_search: Failed to fetch original event timestamp:",
+ e,
+ );
}
-
+
const profile: NostrProfile & { created_at?: number } = {
...metadata,
pubkey: npub,
@@ -207,7 +219,7 @@ async function searchNip05Domains(
npub,
);
const metadata = await getUserMetadata(npub, ndk);
-
+
// AI-NOTE: Fetch the original event timestamp to preserve created_at
let created_at: number | undefined = undefined;
try {
@@ -223,9 +235,12 @@ async function searchNip05Domains(
}
}
} catch (e) {
- console.warn("profile_search: Failed to fetch original event timestamp:", e);
+ console.warn(
+ "profile_search: Failed to fetch original event timestamp:",
+ e,
+ );
}
-
+
const profile: NostrProfile & { created_at?: number } = {
...metadata,
pubkey: npub,
@@ -259,7 +274,7 @@ async function searchNip05Domains(
if (npub) {
console.log("NIP-05 search: found npub for", nip05Address, ":", npub);
const metadata = await getUserMetadata(npub, ndk);
-
+
// AI-NOTE: Fetch the original event timestamp to preserve created_at
let created_at: number | undefined = undefined;
try {
@@ -275,9 +290,12 @@ async function searchNip05Domains(
}
}
} catch (e) {
- console.warn("profile_search: Failed to fetch original event timestamp:", e);
+ console.warn(
+ "profile_search: Failed to fetch original event timestamp:",
+ e,
+ );
}
-
+
const profile: NostrProfile & { created_at?: number } = {
...metadata,
pubkey: npub,
@@ -328,12 +346,14 @@ async function quickRelaySearch(
// AI-NOTE: Use ALL available relays for comprehensive profile discovery
// This ensures we don't miss profiles due to stale cache or limited relay coverage
-
+
// Get all available relays from NDK pool (most comprehensive)
- const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) => r.url) as string[];
+ const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) =>
+ r.url
+ ) as string[];
const userInboxRelays = get(activeInboxRelays);
const userOutboxRelays = get(activeOutboxRelays);
-
+
// Combine ALL available relays for maximum coverage
const allRelayUrls = [
...poolRelays, // All NDK pool relays
@@ -347,7 +367,10 @@ async function quickRelaySearch(
// Deduplicate relay URLs
const uniqueRelayUrls = [...new Set(allRelayUrls)];
- console.log("Using ALL available relays for profile search:", uniqueRelayUrls);
+ console.log(
+ "Using ALL available relays for profile search:",
+ uniqueRelayUrls,
+ );
console.log("Total relays for profile search:", uniqueRelayUrls.length);
// Create relay sets for parallel search
diff --git a/src/lib/utils/subscription_search.ts b/src/lib/utils/subscription_search.ts
index bb9a415..48e3bf1 100644
--- a/src/lib/utils/subscription_search.ts
+++ b/src/lib/utils/subscription_search.ts
@@ -27,12 +27,12 @@ const normalizeUrl = (url: string): string => {
// AI-NOTE: Define prioritized event kinds for subscription search
const PRIORITIZED_EVENT_KINDS = new Set([
- 1, // Text notes
+ 1, // Text notes
1111, // Comments
9802, // Highlights
- 20, // Article
- 21, // Article
- 22, // Article
+ 20, // Article
+ 21, // Article
+ 22, // Article
1222, // Long-form content
1244, // Long-form content
30023, // Long-form content
@@ -47,7 +47,7 @@ const PRIORITIZED_EVENT_KINDS = new Set([
* @param maxResults Maximum number of results to return
* @param ndk NDK instance for user list and community checks
* @returns Prioritized array of events
- *
+ *
* Priority tiers:
* 1. Prioritized event kinds (1, 1111, 9802, 20, 21, 22, 1222, 1244, 30023, 30040, 30041) + target pubkey events (n: searches only)
* 2. Events from user's follows (if logged in)
@@ -58,7 +58,7 @@ async function prioritizeSearchEvents(
events: NDKEvent[],
targetPubkey?: string,
maxResults: number = SEARCH_LIMITS.GENERAL_CONTENT,
- ndk?: NDK
+ ndk?: NDK,
): Promise {
if (events.length === 0) {
return [];
@@ -67,58 +67,75 @@ async function prioritizeSearchEvents(
// AI-NOTE: Get user lists and community status for prioritization
let userFollowPubkeys = new Set();
let communityMemberPubkeys = new Set();
-
+
// Only attempt user list and community checks if NDK is provided
if (ndk) {
try {
// Import user list functions dynamically to avoid circular dependencies
- const { fetchCurrentUserLists, getPubkeysFromListKind } = await import("./user_lists.ts");
+ const { fetchCurrentUserLists, getPubkeysFromListKind } = await import(
+ "./user_lists.ts"
+ );
const { checkCommunity } = await import("./community_checker.ts");
-
+
// Get current user's follow lists (if logged in)
const userLists = await fetchCurrentUserLists(undefined, ndk);
userFollowPubkeys = getPubkeysFromListKind(userLists, 3); // Kind 3 = follow list
-
+
// Check community status for unique pubkeys in events (limit to prevent hanging)
- const uniquePubkeys = new Set(events.map(e => e.pubkey).filter(Boolean));
+ const uniquePubkeys = new Set(
+ events.map((e) => e.pubkey).filter(Boolean),
+ );
const pubkeysToCheck = Array.from(uniquePubkeys).slice(0, 20); // Limit to first 20 pubkeys
-
- console.log(`subscription_search: Checking community status for ${pubkeysToCheck.length} pubkeys out of ${uniquePubkeys.size} total`);
-
+
+ console.log(
+ `subscription_search: Checking community status for ${pubkeysToCheck.length} pubkeys out of ${uniquePubkeys.size} total`,
+ );
+
const communityChecks = await Promise.allSettled(
pubkeysToCheck.map(async (pubkey) => {
try {
const isCommunityMember = await Promise.race([
checkCommunity(pubkey),
- new Promise((_, reject) =>
- setTimeout(() => reject(new Error('Community check timeout')), 2000)
- )
+ new Promise((_, reject) =>
+ setTimeout(
+ () => reject(new Error("Community check timeout")),
+ 2000,
+ )
+ ),
]);
return { pubkey, isCommunityMember };
} catch (error) {
- console.warn(`subscription_search: Community check failed for ${pubkey}:`, error);
+ console.warn(
+ `subscription_search: Community check failed for ${pubkey}:`,
+ error,
+ );
return { pubkey, isCommunityMember: false };
}
- })
+ }),
);
-
+
// Build set of community member pubkeys
- communityChecks.forEach(result => {
+ communityChecks.forEach((result) => {
if (result.status === "fulfilled" && result.value.isCommunityMember) {
communityMemberPubkeys.add(result.value.pubkey);
}
});
-
+
console.log("subscription_search: Prioritization data loaded:", {
userFollows: userFollowPubkeys.size,
communityMembers: communityMemberPubkeys.size,
- totalEvents: events.length
+ totalEvents: events.length,
});
} catch (error) {
- console.warn("subscription_search: Failed to load prioritization data:", error);
+ console.warn(
+ "subscription_search: Failed to load prioritization data:",
+ error,
+ );
}
} else {
- console.log("subscription_search: No NDK provided, skipping user list and community checks");
+ console.log(
+ "subscription_search: No NDK provided, skipping user list and community checks",
+ );
}
// Separate events into priority tiers
@@ -131,8 +148,10 @@ async function prioritizeSearchEvents(
const isFromTarget = targetPubkey && event.pubkey === targetPubkey;
const isPrioritizedKind = PRIORITIZED_EVENT_KINDS.has(event.kind || 0);
const isFromFollow = userFollowPubkeys.has(event.pubkey || "");
- const isFromCommunityMember = communityMemberPubkeys.has(event.pubkey || "");
-
+ const isFromCommunityMember = communityMemberPubkeys.has(
+ event.pubkey || "",
+ );
+
// AI-NOTE: Prioritized kinds are always in tier 1
// Target pubkey priority only applies to n: searches (when targetPubkey is provided)
if (isPrioritizedKind || isFromTarget) {
@@ -154,22 +173,22 @@ async function prioritizeSearchEvents(
// Combine tiers in priority order, respecting the limit
const result: NDKEvent[] = [];
-
+
// Add tier 1 events (highest priority)
result.push(...tier1);
-
+
// Add tier 2 events (follows) if we haven't reached the limit
const remainingAfterTier1 = maxResults - result.length;
if (remainingAfterTier1 > 0) {
result.push(...tier2.slice(0, remainingAfterTier1));
}
-
+
// Add tier 3 events (community members) if we haven't reached the limit
const remainingAfterTier2 = maxResults - result.length;
if (remainingAfterTier2 > 0) {
result.push(...tier3.slice(0, remainingAfterTier2));
}
-
+
// Add tier 4 events (others) if we haven't reached the limit
const remainingAfterTier3 = maxResults - result.length;
if (remainingAfterTier3 > 0) {
@@ -181,7 +200,7 @@ async function prioritizeSearchEvents(
tier2: tier2.length, // User follows
tier3: tier3.length, // Community members
tier4: tier4.length, // Others
- total: result.length
+ total: result.length,
});
return result;
@@ -221,61 +240,74 @@ export async function searchBySubscription(
const cachedResult = searchCache.get(searchType, normalizedSearchTerm);
if (cachedResult) {
console.log("subscription_search: Found cached result:", cachedResult);
-
+
// AI-NOTE: Ensure cached events have created_at property preserved
// This fixes the "Unknown date" issue when events are retrieved from cache
- const eventsWithCreatedAt = cachedResult.events.map(event => {
- if (event && typeof event === 'object' && !event.created_at) {
- console.warn("subscription_search: Event missing created_at, setting to 0:", event.id);
+ const eventsWithCreatedAt = cachedResult.events.map((event) => {
+ if (event && typeof event === "object" && !event.created_at) {
+ console.warn(
+ "subscription_search: Event missing created_at, setting to 0:",
+ event.id,
+ );
(event as any).created_at = 0;
}
return event;
});
-
- const secondOrderWithCreatedAt = cachedResult.secondOrder.map(event => {
- if (event && typeof event === 'object' && !event.created_at) {
- console.warn("subscription_search: Second order event missing created_at, setting to 0:", event.id);
+
+ const secondOrderWithCreatedAt = cachedResult.secondOrder.map((event) => {
+ if (event && typeof event === "object" && !event.created_at) {
+ console.warn(
+ "subscription_search: Second order event missing created_at, setting to 0:",
+ event.id,
+ );
(event as any).created_at = 0;
}
return event;
});
-
- const tTagEventsWithCreatedAt = cachedResult.tTagEvents.map(event => {
- if (event && typeof event === 'object' && !event.created_at) {
- console.warn("subscription_search: T-tag event missing created_at, setting to 0:", event.id);
+
+ const tTagEventsWithCreatedAt = cachedResult.tTagEvents.map((event) => {
+ if (event && typeof event === "object" && !event.created_at) {
+ console.warn(
+ "subscription_search: T-tag event missing created_at, setting to 0:",
+ event.id,
+ );
(event as any).created_at = 0;
}
return event;
});
-
+
const resultWithCreatedAt = {
...cachedResult,
events: eventsWithCreatedAt,
secondOrder: secondOrderWithCreatedAt,
- tTagEvents: tTagEventsWithCreatedAt
+ tTagEvents: tTagEventsWithCreatedAt,
};
-
+
// AI-NOTE: Return cached results immediately but trigger second-order search in background
// This ensures we get fast results while still updating second-order data
- console.log("subscription_search: Returning cached result immediately, triggering background second-order search");
-
- // Trigger second-order search in background for all search types
- if (ndk) {
- // Start second-order search in background for n and d searches only
- if (searchType === "n" || searchType === "d") {
- console.log("subscription_search: Triggering background second-order search for cached result");
- performSecondOrderSearchInBackground(
- searchType as "n" | "d",
- eventsWithCreatedAt,
- cachedResult.eventIds || new Set(),
- cachedResult.addresses || new Set(),
- ndk,
- searchType === "n" ? eventsWithCreatedAt[0]?.pubkey : undefined,
- callbacks
- );
- }
+ console.log(
+ "subscription_search: Returning cached result immediately, triggering background second-order search",
+ );
+
+ // Trigger second-order search in background for all search types
+ if (ndk) {
+ // Start second-order search in background for n and d searches only
+ if (searchType === "n" || searchType === "d") {
+ console.log(
+ "subscription_search: Triggering background second-order search for cached result",
+ );
+ performSecondOrderSearchInBackground(
+ searchType as "n" | "d",
+ eventsWithCreatedAt,
+ cachedResult.eventIds || new Set(),
+ cachedResult.addresses || new Set(),
+ ndk,
+ searchType === "n" ? eventsWithCreatedAt[0]?.pubkey : undefined,
+ callbacks,
+ );
}
-
+ }
+
return resultWithCreatedAt;
}
@@ -316,7 +348,10 @@ export async function searchBySubscription(
// AI-NOTE: Check for preloaded events first (for profile searches)
if (searchFilter.preloadedEvents && searchFilter.preloadedEvents.length > 0) {
- console.log("subscription_search: Using preloaded events:", searchFilter.preloadedEvents.length);
+ console.log(
+ "subscription_search: Using preloaded events:",
+ searchFilter.preloadedEvents.length,
+ );
processPrimaryRelayResults(
new Set(searchFilter.preloadedEvents),
searchType,
@@ -326,9 +361,11 @@ export async function searchBySubscription(
abortSignal,
cleanup,
);
-
+
if (hasResults(searchState, searchType)) {
- console.log("subscription_search: Found results from preloaded events, returning immediately");
+ console.log(
+ "subscription_search: Found results from preloaded events, returning immediately",
+ );
const immediateResult = createSearchResult(
searchState,
searchType,
@@ -367,19 +404,25 @@ export async function searchBySubscription(
"subscription_search: Searching primary relay with filter:",
searchFilter.filter,
);
-
+
// Add timeout to primary relay search
const primaryEventsPromise = ndk.fetchEvents(
searchFilter.filter,
{ closeOnEose: true },
primaryRelaySet,
);
-
+
const timeoutPromise = new Promise((_, reject) => {
- setTimeout(() => reject(new Error("Primary relay search timeout")), TIMEOUTS.SUBSCRIPTION_SEARCH);
+ setTimeout(
+ () => reject(new Error("Primary relay search timeout")),
+ TIMEOUTS.SUBSCRIPTION_SEARCH,
+ );
});
-
- const primaryEvents = await Promise.race([primaryEventsPromise, timeoutPromise]) as any;
+
+ const primaryEvents = await Promise.race([
+ primaryEventsPromise,
+ timeoutPromise,
+ ]) as any;
console.log(
"subscription_search: Primary relay returned",
@@ -429,7 +472,7 @@ export async function searchBySubscription(
console.log(
`subscription_search: Profile search completed in ${elapsed}ms`,
);
-
+
// Clear the main timeout since we're returning early
cleanup();
return immediateResult;
@@ -471,12 +514,18 @@ export async function searchBySubscription(
{ closeOnEose: true },
allRelaySet,
);
-
+
const fallbackTimeoutPromise = new Promise((_, reject) => {
- setTimeout(() => reject(new Error("Fallback search timeout")), TIMEOUTS.SUBSCRIPTION_SEARCH);
+ setTimeout(
+ () => reject(new Error("Fallback search timeout")),
+ TIMEOUTS.SUBSCRIPTION_SEARCH,
+ );
});
-
- const fallbackEvents = await Promise.race([fallbackEventsPromise, fallbackTimeoutPromise]) as any;
+
+ const fallbackEvents = await Promise.race([
+ fallbackEventsPromise,
+ fallbackTimeoutPromise,
+ ]) as any;
console.log(
"subscription_search: Fallback search returned",
@@ -508,7 +557,7 @@ export async function searchBySubscription(
console.log(
`subscription_search: Profile search completed in ${elapsed}ms (fallback)`,
);
-
+
// Clear the main timeout since we're returning early
cleanup();
return fallbackResult;
@@ -518,10 +567,15 @@ export async function searchBySubscription(
"subscription_search: Fallback search failed:",
fallbackError,
);
-
+
// If it's a timeout error, continue to return empty result
- if (fallbackError instanceof Error && fallbackError.message.includes("timeout")) {
- console.log("subscription_search: Fallback search timed out, returning empty result");
+ if (
+ fallbackError instanceof Error &&
+ fallbackError.message.includes("timeout")
+ ) {
+ console.log(
+ "subscription_search: Fallback search timed out, returning empty result",
+ );
}
}
@@ -538,7 +592,7 @@ export async function searchBySubscription(
console.log(
`subscription_search: Profile search completed in ${elapsed}ms (not found)`,
);
-
+
// Clear the main timeout since we're returning early
cleanup();
return emptyResult;
@@ -553,10 +607,12 @@ export async function searchBySubscription(
`subscription_search: Error searching primary relay:`,
error,
);
-
+
// If it's a timeout error, continue to Phase 2 instead of failing
if (error instanceof Error && error.message.includes("timeout")) {
- console.log("subscription_search: Primary relay search timed out, continuing to Phase 2");
+ console.log(
+ "subscription_search: Primary relay search timed out, continuing to Phase 2",
+ );
} else {
// For other errors, we might want to fail the search
throw error;
@@ -669,12 +725,12 @@ async function createSearchFilter(
// This properly handles NIP-05 lookups and name searches
const { searchProfiles } = await import("./profile_search.ts");
const profileResult = await searchProfiles(normalizedSearchTerm, ndk);
-
+
// Convert profile results to events for compatibility
const events = profileResult.profiles.map((profile) => {
const event = new NDKEvent(ndk);
event.content = JSON.stringify(profile);
-
+
// AI-NOTE: Convert npub to hex public key for compatibility with nprofileEncode
// The profile.pubkey is an npub (bech32-encoded), but nprofileEncode expects hex-encoded public key
let hexPubkey = profile.pubkey || "";
@@ -685,26 +741,36 @@ async function createSearchFilter(
hexPubkey = decoded.data as string;
}
} catch (e) {
- console.warn("subscription_search: Failed to decode npub:", profile.pubkey, e);
+ console.warn(
+ "subscription_search: Failed to decode npub:",
+ profile.pubkey,
+ e,
+ );
}
}
event.pubkey = hexPubkey;
event.kind = 0;
-
+
// AI-NOTE: Use the preserved created_at timestamp from the profile
// This ensures the profile cards show the actual creation date instead of "Unknown date"
if ((profile as any).created_at) {
event.created_at = (profile as any).created_at;
- console.log("subscription_search: Using preserved timestamp:", event.created_at);
+ console.log(
+ "subscription_search: Using preserved timestamp:",
+ event.created_at,
+ );
} else {
// Fallback to current timestamp if no preserved timestamp
event.created_at = Math.floor(Date.now() / 1000);
- console.log("subscription_search: Using fallback timestamp:", event.created_at);
+ console.log(
+ "subscription_search: Using fallback timestamp:",
+ event.created_at,
+ );
}
-
+
return event;
});
-
+
// Return a mock filter since we're using the profile search directly
const nFilter = {
filter: { kinds: [0], limit: 1 }, // Dummy filter
@@ -712,7 +778,10 @@ async function createSearchFilter(
searchTerm: normalizedSearchTerm,
preloadedEvents: events, // AI-NOTE: Pass preloaded events
};
- console.log("subscription_search: Created profile filter with preloaded events:", nFilter);
+ console.log(
+ "subscription_search: Created profile filter with preloaded events:",
+ nFilter,
+ );
return nFilter;
}
default: {
@@ -721,8 +790,6 @@ async function createSearchFilter(
}
}
-
-
/**
* Create primary relay set for search operations
* AI-NOTE: Updated to use all available relays to prevent search failures
@@ -816,7 +883,9 @@ function processPrimaryRelayResults(
for (const event of events) {
// Check if we've reached the event limit
if (processedCount >= maxEvents) {
- console.log(`subscription_search: Reached event limit of ${maxEvents} in primary relay processing`);
+ console.log(
+ `subscription_search: Reached event limit of ${maxEvents} in primary relay processing`,
+ );
break;
}
@@ -1029,13 +1098,15 @@ function searchOtherRelaysInBackground(
sub.on("event", (event: NDKEvent) => {
// Check if we've reached the event limit
if (eventCount >= maxEvents) {
- console.log(`subscription_search: Reached event limit of ${maxEvents}, stopping event processing`);
+ console.log(
+ `subscription_search: Reached event limit of ${maxEvents}, stopping event processing`,
+ );
sub.stop();
return;
}
eventCount++;
-
+
try {
if (searchType === "n") {
processProfileEvent(
@@ -1054,11 +1125,13 @@ function searchOtherRelaysInBackground(
return new Promise((resolve) => {
let resolved = false;
-
+
// Add timeout to prevent hanging
const timeoutId = setTimeout(async () => {
if (!resolved) {
- console.log("subscription_search: Background search timeout, resolving with current results");
+ console.log(
+ "subscription_search: Background search timeout, resolving with current results",
+ );
resolved = true;
sub.stop();
const result = await processEoseResults(
@@ -1073,7 +1146,7 @@ function searchOtherRelaysInBackground(
resolve(result);
}
}, TIMEOUTS.SUBSCRIPTION_SEARCH);
-
+
sub.on("eose", async () => {
if (!resolved) {
resolved = true;
@@ -1106,7 +1179,12 @@ async function processEoseResults(
if (searchType === "n") {
return processProfileEoseResults(searchState, searchFilter, ndk, callbacks);
} else if (searchType === "d") {
- return await processContentEoseResults(searchState, searchType, ndk, callbacks);
+ return await processContentEoseResults(
+ searchState,
+ searchType,
+ ndk,
+ callbacks,
+ );
} else if (searchType === "t") {
return await processTTagEoseResults(searchState, ndk);
}
@@ -1242,7 +1320,7 @@ async function processContentEoseResults(
dedupedEvents,
undefined, // No specific target pubkey for d-tag searches
SEARCH_LIMITS.GENERAL_CONTENT,
- ndk
+ ndk,
);
// AI-NOTE: Attach profile data to first-order events for display
@@ -1276,7 +1354,10 @@ async function processContentEoseResults(
/**
* Process t-tag EOSE results
*/
-async function processTTagEoseResults(searchState: any, ndk?: NDK): Promise {
+async function processTTagEoseResults(
+ searchState: any,
+ ndk?: NDK,
+): Promise {
if (searchState.tTagEvents.length === 0) {
return createEmptySearchResult("t", searchState.normalizedSearchTerm);
}
@@ -1287,7 +1368,7 @@ async function processTTagEoseResults(searchState: any, ndk?: NDK): Promise();
allSecondOrderEvents.forEach((event) => {
@@ -1484,18 +1567,18 @@ async function performSecondOrderSearchInBackground(
deduplicatedSecondOrder,
targetPubkey,
SEARCH_LIMITS.SECOND_ORDER_RESULTS,
- ndk
+ ndk,
);
-
+
const prioritizationTimeoutPromise = new Promise((_, reject) => {
- setTimeout(() => reject(new Error('Prioritization timeout')), 15000); // 15 second timeout
+ setTimeout(() => reject(new Error("Prioritization timeout")), 15000); // 15 second timeout
});
-
+
let prioritizedSecondOrder: NDKEvent[];
try {
prioritizedSecondOrder = await Promise.race([
prioritizationPromise,
- prioritizationTimeoutPromise
+ prioritizationTimeoutPromise,
]) as NDKEvent[];
console.log(
@@ -1504,7 +1587,10 @@ async function performSecondOrderSearchInBackground(
"prioritized results",
);
} catch (error) {
- console.warn("subscription_search: Prioritization failed, using simple sorting:", error);
+ console.warn(
+ "subscription_search: Prioritization failed, using simple sorting:",
+ error,
+ );
// Fallback to simple sorting if prioritization fails
prioritizedSecondOrder = deduplicatedSecondOrder.sort((a, b) => {
// Prioritize events from target pubkey first (for n: searches)
@@ -1514,17 +1600,17 @@ async function performSecondOrderSearchInBackground(
if (aIsTarget && !bIsTarget) return -1;
if (!aIsTarget && bIsTarget) return 1;
}
-
+
// Prioritize by event kind (for t: searches and general prioritization)
const aIsPrioritized = PRIORITIZED_EVENT_KINDS.has(a.kind || 0);
const bIsPrioritized = PRIORITIZED_EVENT_KINDS.has(b.kind || 0);
if (aIsPrioritized && !bIsPrioritized) return -1;
if (!aIsPrioritized && bIsPrioritized) return 1;
-
+
// Then sort by creation time (newest first)
return (b.created_at || 0) - (a.created_at || 0);
}).slice(0, SEARCH_LIMITS.SECOND_ORDER_RESULTS);
-
+
console.log(
"subscription_search: Using fallback sorting with",
prioritizedSecondOrder.length,
@@ -1577,20 +1663,27 @@ async function performSecondOrderSearchInBackground(
* @param ndk NDK instance for fetching profile data
* @returns Promise that resolves when profile data is attached
*/
-async function attachProfileDataToEvents(events: NDKEvent[], ndk: NDK): Promise {
+async function attachProfileDataToEvents(
+ events: NDKEvent[],
+ ndk: NDK,
+): Promise {
if (events.length === 0) {
return;
}
- console.log(`subscription_search: Attaching profile data to ${events.length} events`);
+ console.log(
+ `subscription_search: Attaching profile data to ${events.length} events`,
+ );
try {
// Import user list functions dynamically to avoid circular dependencies
- const { fetchCurrentUserLists, isPubkeyInUserLists } = await import("./user_lists.ts");
-
+ const { fetchCurrentUserLists, isPubkeyInUserLists } = await import(
+ "./user_lists.ts"
+ );
+
// Get current user's lists for user list status
const userLists = await fetchCurrentUserLists(undefined, ndk);
-
+
// Get unique pubkeys from events
const uniquePubkeys = new Set();
events.forEach((event) => {
@@ -1599,39 +1692,46 @@ async function attachProfileDataToEvents(events: NDKEvent[], ndk: NDK): Promise<
}
});
- console.log(`subscription_search: Found ${uniquePubkeys.size} unique pubkeys to fetch profiles for`);
+ console.log(
+ `subscription_search: Found ${uniquePubkeys.size} unique pubkeys to fetch profiles for`,
+ );
// Fetch profile data for each unique pubkey
const profilePromises = Array.from(uniquePubkeys).map(async (pubkey) => {
try {
// Import getUserMetadata dynamically to avoid circular dependencies
const { getUserMetadata } = await import("./nostrUtils.ts");
- const npub = await import("./nostrUtils.ts").then(m => m.toNpub(pubkey));
-
+ const npub = await import("./nostrUtils.ts").then((m) =>
+ m.toNpub(pubkey)
+ );
+
if (npub) {
const profileData = await getUserMetadata(npub, ndk, true);
if (profileData) {
// Check if this pubkey is in user's lists
const isInLists = isPubkeyInUserLists(pubkey, userLists);
-
+
// Return profile data with user list status
return {
pubkey,
profileData: {
...profileData,
- isInUserLists: isInLists
- }
+ isInUserLists: isInLists,
+ },
};
}
}
} catch (error) {
- console.warn(`subscription_search: Failed to fetch profile for ${pubkey}:`, error);
+ console.warn(
+ `subscription_search: Failed to fetch profile for ${pubkey}:`,
+ error,
+ );
}
return null;
});
const profileResults = await Promise.allSettled(profilePromises);
-
+
// Create a map of pubkey to profile data
const profileMap = new Map();
profileResults.forEach((result) => {
@@ -1640,7 +1740,9 @@ async function attachProfileDataToEvents(events: NDKEvent[], ndk: NDK): Promise<
}
});
- console.log(`subscription_search: Successfully fetched ${profileMap.size} profiles`);
+ console.log(
+ `subscription_search: Successfully fetched ${profileMap.size} profiles`,
+ );
// Attach profile data to each event
events.forEach((event) => {
diff --git a/src/lib/utils/user_lists.ts b/src/lib/utils/user_lists.ts
index dc71be9..4eb3344 100644
--- a/src/lib/utils/user_lists.ts
+++ b/src/lib/utils/user_lists.ts
@@ -1,4 +1,4 @@
-import { getNdkContext, activeInboxRelays } from "../ndk.ts";
+import { activeInboxRelays, getNdkContext } from "../ndk.ts";
import { get } from "svelte/store";
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type NDK from "@nostr-dev-kit/ndk";
@@ -11,15 +11,15 @@ import { npubCache } from "./npubCache.ts";
* @see https://github.com/nostr-protocol/nips/blob/master/51.md
*/
export const NIP51_LIST_KINDS = {
- FOLLOWS: 3, // Follow list
- MUTED: 10000, // Mute list
- PINNED: 10001, // Pin list
- RELAYS: 10002, // Relay list
- PEOPLE: 30000, // Categorized people list
- BOOKMARKS: 30001, // Categorized bookmark list
- COMMUNITIES: 34550, // Community definition
- STARTER_PACKS: 39089, // Starter packs
- MEDIA_STARTER_PACKS: 39092, // Media starter packs
+ FOLLOWS: 3, // Follow list
+ MUTED: 10000, // Mute list
+ PINNED: 10001, // Pin list
+ RELAYS: 10002, // Relay list
+ PEOPLE: 30000, // Categorized people list
+ BOOKMARKS: 30001, // Categorized bookmark list
+ COMMUNITIES: 34550, // Community definition
+ STARTER_PACKS: 39089, // Starter packs
+ MEDIA_STARTER_PACKS: 39092, // Media starter packs
} as const;
/**
@@ -52,7 +52,7 @@ export interface UserListEvent {
export async function fetchUserLists(
pubkey: string,
listKinds: number[] = [...PEOPLE_LIST_KINDS],
- ndk?: NDK
+ ndk?: NDK,
): Promise {
const ndkInstance = ndk || getNdkContext();
if (!ndkInstance) {
@@ -60,7 +60,10 @@ export async function fetchUserLists(
return [];
}
- console.log(`fetchUserLists: Fetching lists for ${pubkey}, kinds:`, listKinds);
+ console.log(
+ `fetchUserLists: Fetching lists for ${pubkey}, kinds:`,
+ listKinds,
+ );
try {
const events = await ndkInstance.fetchEvents({
@@ -72,10 +75,10 @@ export async function fetchUserLists(
for (const event of events) {
const pubkeys: string[] = [];
-
+
// Extract pubkeys from p-tags
- event.tags.forEach(tag => {
- if (tag[0] === 'p' && tag[1]) {
+ event.tags.forEach((tag) => {
+ if (tag[0] === "p" && tag[1]) {
pubkeys.push(tag[1]);
}
});
@@ -83,7 +86,7 @@ export async function fetchUserLists(
// Extract list metadata from content if available
let listName: string | undefined;
let listDescription: string | undefined;
-
+
if (event.content) {
try {
const content = JSON.parse(event.content);
@@ -96,7 +99,7 @@ export async function fetchUserLists(
// Get list name from d-tag if available (for addressable lists)
if (!listName && event.kind >= 30000 && event.kind < 40000) {
- const dTag = event.getMatchingTags('d')[0]?.[1];
+ const dTag = event.getMatchingTags("d")[0]?.[1];
if (dTag) {
listName = dTag;
}
@@ -111,7 +114,11 @@ export async function fetchUserLists(
});
}
- console.log(`fetchUserLists: Found ${userLists.length} lists with ${userLists.reduce((sum, list) => sum + list.pubkeys.length, 0)} total pubkeys`);
+ console.log(
+ `fetchUserLists: Found ${userLists.length} lists with ${
+ userLists.reduce((sum, list) => sum + list.pubkeys.length, 0)
+ } total pubkeys`,
+ );
return userLists;
} catch (error) {
console.error("fetchUserLists: Error fetching user lists:", error);
@@ -127,10 +134,10 @@ export async function fetchUserLists(
*/
export async function fetchCurrentUserLists(
listKinds: number[] = [...PEOPLE_LIST_KINDS],
- ndk?: NDK
+ ndk?: NDK,
): Promise {
const userState = get(userStore);
-
+
if (!userState.signedIn || !userState.pubkey) {
console.warn("fetchCurrentUserLists: No active user found in userStore");
return [];
@@ -145,11 +152,13 @@ export async function fetchCurrentUserLists(
* @param userLists - Array of UserListEvent objects
* @returns Set of unique pubkeys
*/
-export function getPubkeysFromUserLists(userLists: UserListEvent[]): Set {
+export function getPubkeysFromUserLists(
+ userLists: UserListEvent[],
+): Set {
const pubkeys = new Set();
-
- userLists.forEach(list => {
- list.pubkeys.forEach(pubkey => {
+
+ userLists.forEach((list) => {
+ list.pubkeys.forEach((pubkey) => {
pubkeys.add(pubkey);
});
});
@@ -163,12 +172,15 @@ export function getPubkeysFromUserLists(userLists: UserListEvent[]): Set
* @param kind - The list kind to filter by
* @returns Set of unique pubkeys from the specified list kind
*/
-export function getPubkeysFromListKind(userLists: UserListEvent[], kind: number): Set {
+export function getPubkeysFromListKind(
+ userLists: UserListEvent[],
+ kind: number,
+): Set {
const pubkeys = new Set();
-
- userLists.forEach(list => {
+
+ userLists.forEach((list) => {
if (list.kind === kind) {
- list.pubkeys.forEach(pubkey => {
+ list.pubkeys.forEach((pubkey) => {
pubkeys.add(pubkey);
});
}
@@ -183,11 +195,22 @@ export function getPubkeysFromListKind(userLists: UserListEvent[], kind: number)
* @param userLists - Array of UserListEvent objects
* @returns True if the pubkey is in any list
*/
-export function isPubkeyInUserLists(pubkey: string, userLists: UserListEvent[]): boolean {
- const result = userLists.some(list => list.pubkeys.includes(pubkey));
- console.log(`isPubkeyInUserLists: Checking ${pubkey} against ${userLists.length} lists, result: ${result}`);
+export function isPubkeyInUserLists(
+ pubkey: string,
+ userLists: UserListEvent[],
+): boolean {
+ const result = userLists.some((list) => list.pubkeys.includes(pubkey));
+ console.log(
+ `isPubkeyInUserLists: Checking ${pubkey} against ${userLists.length} lists, result: ${result}`,
+ );
if (result) {
- console.log(`isPubkeyInUserLists: Found ${pubkey} in lists:`, userLists.filter(list => list.pubkeys.includes(pubkey)).map(list => ({ kind: list.kind, name: list.listName })));
+ console.log(
+ `isPubkeyInUserLists: Found ${pubkey} in lists:`,
+ userLists.filter((list) => list.pubkeys.includes(pubkey)).map((list) => ({
+ kind: list.kind,
+ name: list.listName,
+ })),
+ );
}
return result;
}
@@ -198,10 +221,13 @@ export function isPubkeyInUserLists(pubkey: string, userLists: UserListEvent[]):
* @param userLists - Array of UserListEvent objects
* @returns Array of list kinds that contain the pubkey
*/
-export function getListKindsForPubkey(pubkey: string, userLists: UserListEvent[]): number[] {
+export function getListKindsForPubkey(
+ pubkey: string,
+ userLists: UserListEvent[],
+): number[] {
return userLists
- .filter(list => list.pubkeys.includes(pubkey))
- .map(list => list.kind);
+ .filter((list) => list.pubkeys.includes(pubkey))
+ .map((list) => list.kind);
}
/**
@@ -209,29 +235,32 @@ export function getListKindsForPubkey(pubkey: string, userLists: UserListEvent[]
* This ensures follows are always cached and prioritized
* @param pubkeys - Array of pubkeys to cache profiles for
*/
-export async function updateProfileCacheForPubkeys(pubkeys: string[], ndk?: NDK): Promise {
+export async function updateProfileCacheForPubkeys(
+ pubkeys: string[],
+ ndk?: NDK,
+): Promise {
if (pubkeys.length === 0) return;
-
+
try {
console.log(`Updating profile cache for ${pubkeys.length} pubkeys`);
-
+
const ndkInstance = ndk || getNdkContext();
if (!ndkInstance) {
console.warn("updateProfileCacheForPubkeys: No NDK instance available");
return;
}
-
+
// Fetch profiles for all pubkeys in batches
const batchSize = 20;
for (let i = 0; i < pubkeys.length; i += batchSize) {
const batch = pubkeys.slice(i, i + batchSize);
-
+
try {
const events = await ndkInstance.fetchEvents({
kinds: [0],
authors: batch,
});
-
+
// Cache each profile
for (const event of events) {
if (event.content) {
@@ -249,7 +278,7 @@ export async function updateProfileCacheForPubkeys(pubkeys: string[], ndk?: NDK)
console.warn("Failed to fetch batch of profiles:", error);
}
}
-
+
console.log("Profile cache update completed");
} catch (error) {
console.warn("Failed to update profile cache:", error);
diff --git a/src/routes/publication/[type]/[identifier]/+layout.server.ts b/src/routes/publication/[type]/[identifier]/+layout.server.ts
index 4670248..19c6e48 100644
--- a/src/routes/publication/[type]/[identifier]/+layout.server.ts
+++ b/src/routes/publication/[type]/[identifier]/+layout.server.ts
@@ -1,6 +1,5 @@
import type { LayoutServerLoad } from "./$types";
-
export const load: LayoutServerLoad = ({ url }: { url: URL }) => {
const currentUrl = `${url.origin}${url.pathname}`;
diff --git a/src/routes/publication/[type]/[identifier]/+page.ts b/src/routes/publication/[type]/[identifier]/+page.ts
index 5a4a288..1fd63d1 100644
--- a/src/routes/publication/[type]/[identifier]/+page.ts
+++ b/src/routes/publication/[type]/[identifier]/+page.ts
@@ -49,7 +49,8 @@ export const load: PageLoad = async (
// AI-NOTE: Return null for indexEvent during SSR or when fetch fails
// The component will handle client-side loading and error states
- const publicationType = indexEvent?.tags.find((tag) => tag[0] === "type")?.[1] ?? "";
+ const publicationType =
+ indexEvent?.tags.find((tag) => tag[0] === "type")?.[1] ?? "";
const result = {
publicationType,
diff --git a/src/styles/notifications.css b/src/styles/notifications.css
index 8fcedca..c11a0ea 100644
--- a/src/styles/notifications.css
+++ b/src/styles/notifications.css
@@ -107,15 +107,11 @@
}
.message-container:hover {
- box-shadow:
- 0 4px 6px -1px rgb(0 0 0 / 0.1),
- 0 2px 4px -2px rgb(0 0 0 / 0.1);
+ box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1);
}
.dark .message-container:hover {
- box-shadow:
- 0 4px 6px -1px rgb(0 0 0 / 0.3),
- 0 2px 4px -2px rgb(0 0 0 / 0.2);
+ box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.3), 0 2px 4px -2px rgb(0 0 0 / 0.2);
}
/* Filter indicator styling */
diff --git a/src/styles/scrollbar.css b/src/styles/scrollbar.css
index 6fa2e85..c337549 100644
--- a/src/styles/scrollbar.css
+++ b/src/styles/scrollbar.css
@@ -1,7 +1,8 @@
@layer components {
/* Global scrollbar styles */
* {
- scrollbar-color: rgba(87, 66, 41, 0.8) transparent; /* Transparent track, default scrollbar thumb */
+ scrollbar-color: rgba(87, 66, 41, 0.8)
+ transparent; /* Transparent track, default scrollbar thumb */
}
/* Webkit Browsers (Chrome, Safari, Edge) */
diff --git a/tests/unit/mathProcessing.test.ts b/tests/unit/mathProcessing.test.ts
index acf7378..12123dd 100644
--- a/tests/unit/mathProcessing.test.ts
+++ b/tests/unit/mathProcessing.test.ts
@@ -3,29 +3,38 @@ import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupPa
describe("Math Processing in Advanced Markup Parser", () => {
it("should process inline math inside code blocks", async () => {
- const input = "Here is some inline math: `$x^2 + y^2 = z^2$` in a sentence.";
+ const input =
+ "Here is some inline math: `$x^2 + y^2 = z^2$` in a sentence.";
const result = await parseAdvancedmarkup(input);
-
- expect(result).toContain('\\(x^2 + y^2 = z^2\\)');
+
+ expect(result).toContain(
+ '\\(x^2 + y^2 = z^2\\)',
+ );
expect(result).toContain("Here is some inline math:");
expect(result).toContain("in a sentence.");
});
it("should process display math inside code blocks", async () => {
- const input = "Here is a display equation:\n\n`$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$`\n\nThis is after the equation.";
+ const input =
+ "Here is a display equation:\n\n`$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$`\n\nThis is after the equation.";
const result = await parseAdvancedmarkup(input);
-
- expect(result).toContain('\\[\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n\\]');
+
+ expect(result).toContain(
+ '\\[\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n\\]',
+ );
expect(result).toContain('Here is a display equation:
');
expect(result).toContain('This is after the equation.
');
});
it("should process both inline and display math in the same code block", async () => {
- const input = "Mixed math: `$\\alpha$ and $$\\beta = \\frac{1}{2}$$` in one block.";
+ const input =
+ "Mixed math: `$\\alpha$ and $$\\beta = \\frac{1}{2}$$` in one block.";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('\\(\\alpha\\)');
- expect(result).toContain('\\[\\beta = \\frac{1}{2}\\]');
+ expect(result).toContain(
+ '\\[\\beta = \\frac{1}{2}\\]',
+ );
expect(result).toContain("Mixed math:");
expect(result).toContain("in one block.");
});
@@ -33,34 +42,39 @@ describe("Math Processing in Advanced Markup Parser", () => {
it("should NOT process math outside of code blocks", async () => {
const input = "This math $x^2 + y^2 = z^2$ should not be processed.";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain("$x^2 + y^2 = z^2$");
expect(result).not.toContain('');
expect(result).not.toContain('');
});
it("should NOT process display math outside of code blocks", async () => {
- const input = "This display math $$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$ should not be processed.";
+ const input =
+ "This display math $$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$ should not be processed.";
const result = await parseAdvancedmarkup(input);
-
- expect(result).toContain("$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$");
+
+ expect(result).toContain(
+ "$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$",
+ );
expect(result).not.toContain('');
expect(result).not.toContain('');
});
it("should handle code blocks without math normally", async () => {
- const input = "Here is some code: `console.log('hello world')` that should not be processed.";
+ const input =
+ "Here is some code: `console.log('hello world')` that should not be processed.";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain("`console.log('hello world')`");
expect(result).not.toContain('');
expect(result).not.toContain('');
});
it("should handle complex math expressions with nested structures", async () => {
- const input = "Complex math: `$$\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix} \\cdot \\begin{pmatrix} x \\\\ y \\end{pmatrix} = \\begin{pmatrix} ax + by \\\\ cx + dy \\end{pmatrix}$$`";
+ const input =
+ "Complex math: `$$\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix} \\cdot \\begin{pmatrix} x \\\\ y \\end{pmatrix} = \\begin{pmatrix} ax + by \\\\ cx + dy \\end{pmatrix}$$`";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('');
expect(result).toContain("\\begin{pmatrix}");
expect(result).toContain("\\end{pmatrix}");
@@ -68,25 +82,34 @@ describe("Math Processing in Advanced Markup Parser", () => {
});
it("should handle inline math with special characters", async () => {
- const input = "Special chars: `$\\alpha, \\beta, \\gamma, \\delta$` and `$\\sum_{i=1}^{n} x_i$`";
+ const input =
+ "Special chars: `$\\alpha, \\beta, \\gamma, \\delta$` and `$\\sum_{i=1}^{n} x_i$`";
const result = await parseAdvancedmarkup(input);
-
- expect(result).toContain('\\(\\alpha, \\beta, \\gamma, \\delta\\)');
- expect(result).toContain('\\(\\sum_{i=1}^{n} x_i\\)');
+
+ expect(result).toContain(
+ '\\(\\alpha, \\beta, \\gamma, \\delta\\)',
+ );
+ expect(result).toContain(
+ '\\(\\sum_{i=1}^{n} x_i\\)',
+ );
});
it("should handle multiple math expressions in separate code blocks", async () => {
- const input = "First: `$E = mc^2$` and second: `$$F = G\\frac{m_1 m_2}{r^2}$$`";
+ const input =
+ "First: `$E = mc^2$` and second: `$$F = G\\frac{m_1 m_2}{r^2}$$`";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('\\(E = mc^2\\)');
- expect(result).toContain('\\[F = G\\frac{m_1 m_2}{r^2}\\]');
+ expect(result).toContain(
+ '\\[F = G\\frac{m_1 m_2}{r^2}\\]',
+ );
});
it("should handle math expressions with line breaks in display mode", async () => {
- const input = "Multi-line: `$$\n\\begin{align}\nx &= a + b \\\\\ny &= c + d\n\\end{align}\n$$`";
+ const input =
+ "Multi-line: `$$\n\\begin{align}\nx &= a + b \\\\\ny &= c + d\n\\end{align}\n$$`";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('');
expect(result).toContain("\\begin{align}");
expect(result).toContain("\\end{align}");
@@ -97,7 +120,7 @@ describe("Math Processing in Advanced Markup Parser", () => {
it("should handle edge case with empty math expressions", async () => {
const input = "Empty math: `$$` and `$`";
const result = await parseAdvancedmarkup(input);
-
+
// Should not crash and should preserve the original content
expect(result).toContain("`$$`");
expect(result).toContain("`$`");
@@ -115,16 +138,18 @@ And display math: \`$$\n\\int_0^1 x^2 dx = \\frac{1}{3}\n$$\`
And more regular text.`;
const result = await parseAdvancedmarkup(input);
-
+
// Should preserve regular text
expect(result).toContain("This is a paragraph with regular text.");
expect(result).toContain("And more regular text.");
-
+
// Should preserve regular code blocks
expect(result).toContain("`console.log('hello')`");
-
+
// Should process math
- expect(result).toContain('\\(\\pi \\approx 3.14159\\)');
+ expect(result).toContain(
+ '\\(\\pi \\approx 3.14159\\)',
+ );
expect(result).toContain('');
expect(result).toContain("\\int_0^1 x^2 dx = \\frac{1}{3}");
});
@@ -132,15 +157,16 @@ And more regular text.`;
it("should handle math expressions with dollar signs in the content", async () => {
const input = "Price math: `$\\text{Price} = \\$19.99$`";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('');
expect(result).toContain("\\text{Price} = \\$19.99");
});
it("should handle display math with dollar signs in the content", async () => {
- const input = "Price display: `$$\n\\text{Total} = \\$19.99 + \\$5.99 = \\$25.98\n$$`";
+ const input =
+ "Price display: `$$\n\\text{Total} = \\$19.99 + \\$5.99 = \\$25.98\n$$`";
const result = await parseAdvancedmarkup(input);
-
+
expect(result).toContain('');
expect(result).toContain("\\text{Total} = \\$19.99 + \\$5.99 = \\$25.98");
});
@@ -149,34 +175,37 @@ And more regular text.`;
// Simulate content from JSON where backslashes are escaped
const jsonContent = "Math from JSON: `$\\\\alpha + \\\\beta = \\\\gamma$`";
const result = await parseAdvancedmarkup(jsonContent);
-
+
expect(result).toContain('');
expect(result).toContain("\\\\alpha + \\\\beta = \\\\gamma");
});
it("should handle JSON content with escaped display math", async () => {
// Simulate content from JSON where backslashes are escaped
- const jsonContent = "Display math from JSON: `$$\\\\int_0^1 x^2 dx = \\\\frac{1}{3}$$`";
+ const jsonContent =
+ "Display math from JSON: `$$\\\\int_0^1 x^2 dx = \\\\frac{1}{3}$$`";
const result = await parseAdvancedmarkup(jsonContent);
-
+
expect(result).toContain('');
expect(result).toContain("\\\\int_0^1 x^2 dx = \\\\frac{1}{3}");
});
it("should handle JSON content with escaped dollar signs", async () => {
// Simulate content from JSON where dollar signs are escaped
- const jsonContent = "Price math from JSON: `$\\\\text{Price} = \\\\\\$19.99$`";
+ const jsonContent =
+ "Price math from JSON: `$\\\\text{Price} = \\\\\\$19.99$`";
const result = await parseAdvancedmarkup(jsonContent);
-
+
expect(result).toContain('');
expect(result).toContain("\\\\text{Price} = \\\\\\$19.99");
});
it("should handle complex JSON content with multiple escaped characters", async () => {
// Simulate complex content from JSON
- const jsonContent = "Complex JSON math: `$$\\\\begin{pmatrix} a & b \\\\\\\\ c & d \\\\end{pmatrix} \\\\cdot \\\\begin{pmatrix} x \\\\\\\\ y \\\\end{pmatrix}$$`";
+ const jsonContent =
+ "Complex JSON math: `$$\\\\begin{pmatrix} a & b \\\\\\\\ c & d \\\\end{pmatrix} \\\\cdot \\\\begin{pmatrix} x \\\\\\\\ y \\\\end{pmatrix}$$`";
const result = await parseAdvancedmarkup(jsonContent);
-
+
expect(result).toContain('');
expect(result).toContain("\\\\begin{pmatrix}");
expect(result).toContain("\\\\end{pmatrix}");
diff --git a/tests/unit/tagExpansion.test.ts b/tests/unit/tagExpansion.test.ts
index e47f74b..16283a9 100644
--- a/tests/unit/tagExpansion.test.ts
+++ b/tests/unit/tagExpansion.test.ts
@@ -335,7 +335,9 @@ describe("Tag Expansion Tests", () => {
);
// Should not include events without tags
- expect(result.publications.map((p: any) => p.id)).not.toContain("no-tags");
+ expect(result.publications.map((p: any) => p.id)).not.toContain(
+ "no-tags",
+ );
});
});
@@ -512,7 +514,9 @@ describe("Tag Expansion Tests", () => {
// Should handle d-tags with colons correctly
expect(result.publications).toHaveLength(3);
- expect(result.contentEvents.map((c: any) => c.id)).toContain("colon-content");
+ expect(result.contentEvents.map((c: any) => c.id)).toContain(
+ "colon-content",
+ );
});
});
});