diff --git a/Dockerfile b/Dockerfile index c8ecacc..12ad673 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,7 @@ -FROM denoland/deno:alpine AS build +FROM denoland/deno:alpine-2.4.2 AS build WORKDIR /app/src COPY . . + RUN deno install RUN deno task build @@ -14,4 +15,4 @@ ENV ORIGIN=http://localhost:3000 RUN deno cache --import-map=import_map.json ./build/index.js EXPOSE 3000 -CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--import-map=import_map.json", "./build/index.js" ] +CMD [ "deno", "run", "--allow-env", "--allow-read", "--allow-net", "--allow-sys", "--import-map=import_map.json", "./build/index.js" ] diff --git a/README.md b/README.md index 274657e..3273302 100644 --- a/README.md +++ b/README.md @@ -3,19 +3,31 @@ # Alexandria Alexandria is a reader and writer for curated publications, including e-books. -For a thorough introduction, please refer to our [project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1), viewable on Alexandria, or to the Alexandria [About page](https://next-alexandria.gitcitadel.eu/about). +For a thorough introduction, please refer to our +[project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1), +viewable on Alexandria, or to the Alexandria +[About page](https://next-alexandria.gitcitadel.eu/about). -It also contains a [universal event viewer](https://next-alexandria.gitcitadel.eu/events), with which you can search our relays, some aggregator relays, and your own relay list, to find and view event data. +It also contains a +[universal event viewer](https://next-alexandria.gitcitadel.eu/events), with +which you can search our relays, some aggregator relays, and your own relay +list, to find and view event data. ## Issues and Patches -If you would like to suggest a feature or report a bug, please use the [Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact). +If you would like to suggest a feature or report a bug, please use the +[Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact). -You can also contact us [on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg), directly. +You can also contact us +[on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg), +directly. ## Developing -Make sure that you have [Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or [Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2) installed. +Make sure that you have +[Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or +[Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2) +installed. Once you've cloned this repo, install dependencies with NPM: @@ -43,7 +55,8 @@ deno task dev ## Building -Alexandria is configured to run on a Node server. The [Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well. +Alexandria is configured to run on a Node server. The +[Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well. To build a production version of your app with Node, use: @@ -71,7 +84,8 @@ deno task preview ## Docker + Deno -This application is configured to use the Deno runtime. A Docker container is provided to handle builds and deployments. +This application is configured to use the Deno runtime. A Docker container is +provided to handle builds and deployments. To build the app for local development: @@ -87,9 +101,11 @@ docker run -d -p 3000:3000 local-alexandria ## Testing -_These tests are under development, but will run. They will later be added to the container._ +_These tests are under development, but will run. They will later be added to +the container._ -To run the Vitest suite we've built, install the program locally and run the tests. +To run the Vitest suite we've built, install the program locally and run the +tests. ```bash npm run test @@ -103,4 +119,8 @@ npx playwright test ## Markup Support -Alexandria supports both Markdown and AsciiDoc markup for different content types. For a detailed list of supported tags and features in the basic and advanced markdown parsers, as well as information about AsciiDoc usage for publications and wikis, see [MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md). +Alexandria supports both Markdown and AsciiDoc markup for different content +types. For a detailed list of supported tags and features in the basic and +advanced markdown parsers, as well as information about AsciiDoc usage for +publications and wikis, see +[MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md). diff --git a/deno.json b/deno.json index 9e2ecc6..350316e 100644 --- a/deno.json +++ b/deno.json @@ -2,5 +2,15 @@ "importMap": "./import_map.json", "compilerOptions": { "lib": ["dom", "dom.iterable", "dom.asynciterable", "deno.ns"] + }, + "tasks": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "lint": "prettier --plugin-search-dir . --check . && eslint .", + "format": "prettier --plugin-search-dir . --write .", + "test": "vitest" } } diff --git a/deno.lock b/deno.lock index ef86772..201902a 100644 --- a/deno.lock +++ b/deno.lock @@ -4,10 +4,14 @@ "npm:@noble/curves@^1.9.4": "1.9.4", "npm:@noble/hashes@^1.8.0": "1.8.0", "npm:@nostr-dev-kit/ndk-cache-dexie@2.6": "2.6.33_nostr-tools@2.15.1__typescript@5.8.3_typescript@5.8.3", - "npm:@nostr-dev-kit/ndk-cache-dexie@^2.6.33": "2.6.33_nostr-tools@2.15.1__typescript@5.8.3_typescript@5.8.3", "npm:@nostr-dev-kit/ndk@^2.14.32": "2.14.32_nostr-tools@2.15.1__typescript@5.8.3_typescript@5.8.3", "npm:@playwright/test@^1.54.1": "1.54.1", "npm:@popperjs/core@2.11": "2.11.8", + "npm:@sveltejs/adapter-auto@^6.0.1": "6.0.1_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15", + "npm:@sveltejs/adapter-node@^5.2.13": "5.2.13_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_rollup@4.45.1_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15", + "npm:@sveltejs/adapter-static@3": "3.0.8_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15", + "npm:@sveltejs/kit@^2.25.0": "2.25.1_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_acorn@8.15.0_@types+node@24.0.15", + "npm:@sveltejs/vite-plugin-svelte@^6.1.0": "6.1.0_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15", "npm:@tailwindcss/forms@0.5": "0.5.10_tailwindcss@3.4.17__postcss@8.5.6", "npm:@tailwindcss/typography@0.5": "0.5.16_tailwindcss@3.4.17__postcss@8.5.6", "npm:@types/d3@^7.4.3": "7.4.3", @@ -18,20 +22,15 @@ "npm:asciidoctor@3.0": "3.0.4_@asciidoctor+core@3.0.4", "npm:autoprefixer@^10.4.21": "10.4.21_postcss@8.5.6", "npm:bech32@2": "2.0.0", - "npm:d3@7.9": "7.9.0_d3-selection@3.0.0", "npm:d3@^7.9.0": "7.9.0_d3-selection@3.0.0", "npm:eslint-plugin-svelte@^3.11.0": "3.11.0_eslint@9.31.0_svelte@5.36.8__acorn@8.15.0_postcss@8.5.6", "npm:flowbite-svelte-icons@2.1": "2.1.1_svelte@5.36.8__acorn@8.15.0_tailwind-merge@3.3.1", - "npm:flowbite-svelte-icons@^2.2.1": "2.2.1_svelte@5.36.8__acorn@8.15.0", "npm:flowbite-svelte@0.48": "0.48.6_svelte@5.36.8__acorn@8.15.0", - "npm:flowbite-svelte@^1.10.10": "1.10.10_svelte@5.36.8__acorn@8.15.0_tailwindcss@3.4.17__postcss@8.5.6", "npm:flowbite@2": "2.5.2", - "npm:flowbite@^3.1.2": "3.1.2", "npm:he@1.2": "1.2.0", "npm:highlight.js@^11.11.1": "11.11.1", "npm:node-emoji@^2.2.0": "2.2.0", "npm:nostr-tools@2.15": "2.15.1_typescript@5.8.3", - "npm:nostr-tools@^2.15.1": "2.15.1_typescript@5.8.3", "npm:plantuml-encoder@^1.4.0": "1.4.0", "npm:playwright@^1.50.1": "1.54.1", "npm:playwright@^1.54.1": "1.54.1", @@ -45,7 +44,9 @@ "npm:tailwind-merge@^3.3.1": "3.3.1", "npm:tailwindcss@^3.4.17": "3.4.17_postcss@8.5.6", "npm:tslib@2.8": "2.8.1", - "npm:typescript@^5.8.3": "5.8.3" + "npm:typescript@^5.8.3": "5.8.3", + "npm:vite@^6.3.5": "6.3.5_@types+node@24.0.15_picomatch@4.0.3", + "npm:vitest@^3.1.3": "3.2.4_@types+node@24.0.15_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3" }, "npm": { "@alloc/quick-lru@5.2.0": { @@ -434,9 +435,38 @@ ], "bin": true }, + "@polka/url@1.0.0-next.29": { + "integrity": "sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==" + }, "@popperjs/core@2.11.8": { "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==" }, + "@rollup/plugin-commonjs@28.0.6_rollup@4.45.1_picomatch@4.0.3": { + "integrity": "sha512-XSQB1K7FUU5QP+3lOQmVCE3I0FcbbNvmNT4VJSj93iUjayaARrTQeoRdiYQoftAJBLrR9t2agwAd3ekaTgHNlw==", + "dependencies": [ + "@rollup/pluginutils", + "commondir", + "estree-walker@2.0.2", + "fdir", + "is-reference@1.2.1", + "magic-string", + "picomatch@4.0.3", + "rollup" + ], + "optionalPeers": [ + "rollup" + ] + }, + "@rollup/plugin-json@6.1.0_rollup@4.45.1": { + "integrity": "sha512-EGI2te5ENk1coGeADSIwZ7G2Q8CJS2sF120T7jLw4xFw9n7wIOXHo+kIYRAoVpJAN+kmqZSoO3Fp4JtoNF4ReA==", + "dependencies": [ + "@rollup/pluginutils", + "rollup" + ], + "optionalPeers": [ + "rollup" + ] + }, "@rollup/plugin-node-resolve@15.3.1": { "integrity": "sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA==", "dependencies": [ @@ -447,11 +477,25 @@ "resolve" ] }, + "@rollup/plugin-node-resolve@16.0.1_rollup@4.45.1": { + "integrity": "sha512-tk5YCxJWIG81umIvNkSod2qK5KyQW19qcBF/B78n1bjtOON6gzKoVeSzAE8yHCZEDmqkHKkxplExA8KzdJLJpA==", + "dependencies": [ + "@rollup/pluginutils", + "@types/resolve", + "deepmerge", + "is-module", + "resolve", + "rollup" + ], + "optionalPeers": [ + "rollup" + ] + }, "@rollup/pluginutils@5.2.0_rollup@4.45.1": { "integrity": "sha512-qWJ2ZTbmumwiLFomfzTyt5Kng4hwPi9rwCYN4SHb6eaRU1KNO4ccxINHr/VhH4GgPlt1XfSTLX2LBTme8ne4Zw==", "dependencies": [ "@types/estree", - "estree-walker", + "estree-walker@2.0.2", "picomatch@4.0.3", "rollup" ], @@ -589,32 +633,69 @@ "acorn@8.15.0" ] }, - "@svgdotjs/svg.draggable.js@3.0.6_@svgdotjs+svg.js@3.2.4": { - "integrity": "sha512-7iJFm9lL3C40HQcqzEfezK2l+dW2CpoVY3b77KQGqc8GXWa6LhhmX5Ckv7alQfUXBuZbjpICZ+Dvq1czlGx7gA==", + "@sveltejs/adapter-auto@6.0.1_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-mcWud3pYGPWM2Pphdj8G9Qiq24nZ8L4LB7coCUckUEy5Y7wOWGJ/enaZ4AtJTcSm5dNK1rIkBRoqt+ae4zlxcQ==", "dependencies": [ - "@svgdotjs/svg.js" + "@sveltejs/kit" ] }, - "@svgdotjs/svg.filter.js@3.0.9": { - "integrity": "sha512-/69XMRCDoam2HgC4ldHIaDgeQf1ViHIsa0Ld4uWgiXtZ+E24DWHe/9Ib6kbNiZ7WRIdlVokUDR1Fg0kjIpkfbw==", + "@sveltejs/adapter-node@5.2.13_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_rollup@4.45.1_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-yS2TVFmIrxjGhYaV5/iIUrJ3mJl6zjaYn0lBD70vTLnYvJeqf3cjvLXeXCUCuYinhSBoyF4DpfGla49BnIy7sQ==", "dependencies": [ - "@svgdotjs/svg.js" + "@rollup/plugin-commonjs", + "@rollup/plugin-json", + "@rollup/plugin-node-resolve@16.0.1_rollup@4.45.1", + "@sveltejs/kit", + "rollup" ] }, - "@svgdotjs/svg.js@3.2.4": { - "integrity": "sha512-BjJ/7vWNowlX3Z8O4ywT58DqbNRyYlkk6Yz/D13aB7hGmfQTvGX4Tkgtm/ApYlu9M7lCQi15xUEidqMUmdMYwg==" + "@sveltejs/adapter-static@3.0.8_@sveltejs+kit@2.25.1__@sveltejs+vite-plugin-svelte@6.1.0___svelte@5.36.8____acorn@8.15.0___vite@6.3.5____@types+node@24.0.15____picomatch@4.0.3___@types+node@24.0.15__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__acorn@8.15.0__@types+node@24.0.15_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-YaDrquRpZwfcXbnlDsSrBQNCChVOT9MGuSg+dMAyfsAa1SmiAhrA5jUYUiIMC59G92kIbY/AaQOWcBdq+lh+zg==", + "dependencies": [ + "@sveltejs/kit" + ] }, - "@svgdotjs/svg.resize.js@2.0.5_@svgdotjs+svg.js@3.2.4_@svgdotjs+svg.select.js@4.0.3__@svgdotjs+svg.js@3.2.4": { - "integrity": "sha512-4heRW4B1QrJeENfi7326lUPYBCevj78FJs8kfeDxn5st0IYPIRXoTtOSYvTzFWgaWWXd3YCDE6ao4fmv91RthA==", + "@sveltejs/kit@2.25.1_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_acorn@8.15.0_@types+node@24.0.15": { + "integrity": "sha512-8H+fxDEp7Xq6tLFdrGdS5fLu6ONDQQ9DgyjboXpChubuFdfH9QoFX09ypssBpyNkJNZFt9eW3yLmXIc9CesPCA==", "dependencies": [ - "@svgdotjs/svg.js", - "@svgdotjs/svg.select.js" + "@sveltejs/acorn-typescript", + "@sveltejs/vite-plugin-svelte", + "@types/cookie", + "acorn@8.15.0", + "cookie", + "devalue", + "esm-env", + "kleur", + "magic-string", + "mrmime", + "sade", + "set-cookie-parser", + "sirv", + "svelte", + "vite" + ], + "bin": true + }, + "@sveltejs/vite-plugin-svelte-inspector@5.0.0_@sveltejs+vite-plugin-svelte@6.1.0__svelte@5.36.8___acorn@8.15.0__vite@6.3.5___@types+node@24.0.15___picomatch@4.0.3__@types+node@24.0.15_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-iwQ8Z4ET6ZFSt/gC+tVfcsSBHwsqc6RumSaiLUkAurW3BCpJam65cmHw0oOlDMTO0u+PZi9hilBRYN+LZNHTUQ==", + "dependencies": [ + "@sveltejs/vite-plugin-svelte", + "debug", + "svelte", + "vite" ] }, - "@svgdotjs/svg.select.js@4.0.3_@svgdotjs+svg.js@3.2.4": { - "integrity": "sha512-qkMgso1sd2hXKd1FZ1weO7ANq12sNmQJeGDjs46QwDVsxSRcHmvWKL2NDF7Yimpwf3sl5esOLkPqtV2bQ3v/Jg==", + "@sveltejs/vite-plugin-svelte@6.1.0_svelte@5.36.8__acorn@8.15.0_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-+U6lz1wvGEG/BvQyL4z/flyNdQ9xDNv5vrh+vWBWTHaebqT0c9RNggpZTo/XSPoHsSCWBlYaTlRX8pZ9GATXCw==", "dependencies": [ - "@svgdotjs/svg.js" + "@sveltejs/vite-plugin-svelte-inspector", + "debug", + "deepmerge", + "kleur", + "magic-string", + "svelte", + "vite", + "vitefu" ] }, "@tailwindcss/forms@0.5.10_tailwindcss@3.4.17__postcss@8.5.6": { @@ -634,6 +715,15 @@ "tailwindcss" ] }, + "@types/chai@5.2.2": { + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dependencies": [ + "@types/deep-eql" + ] + }, + "@types/cookie@0.6.0": { + "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==" + }, "@types/d3-array@3.2.1": { "integrity": "sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==" }, @@ -794,6 +884,9 @@ "@types/d3-zoom" ] }, + "@types/deep-eql@4.0.2": { + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==" + }, "@types/estree@1.0.8": { "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==" }, @@ -830,6 +923,64 @@ "@types/resolve@1.20.2": { "integrity": "sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==" }, + "@vitest/expect@3.2.4": { + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dependencies": [ + "@types/chai", + "@vitest/spy", + "@vitest/utils", + "chai", + "tinyrainbow" + ] + }, + "@vitest/mocker@3.2.4_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dependencies": [ + "@vitest/spy", + "estree-walker@3.0.3", + "magic-string", + "vite" + ], + "optionalPeers": [ + "vite" + ] + }, + "@vitest/pretty-format@3.2.4": { + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dependencies": [ + "tinyrainbow" + ] + }, + "@vitest/runner@3.2.4": { + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dependencies": [ + "@vitest/utils", + "pathe", + "strip-literal" + ] + }, + "@vitest/snapshot@3.2.4": { + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dependencies": [ + "@vitest/pretty-format", + "magic-string", + "pathe" + ] + }, + "@vitest/spy@3.2.4": { + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dependencies": [ + "tinyspy" + ] + }, + "@vitest/utils@3.2.4": { + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dependencies": [ + "@vitest/pretty-format", + "loupe", + "tinyrainbow" + ] + }, "@yr/monotone-cubic-spline@1.0.3": { "integrity": "sha512-FQXkOta0XBSUPHndIKON2Y9JeQz5ZeMqLYZVVK93FliNBFm7LNMIZmY6FrMEB9XPcDbE2bekMbZD6kzDkxwYjA==" }, @@ -896,17 +1047,6 @@ "svg.select.js@3.0.1" ] }, - "apexcharts@4.7.0_@svgdotjs+svg.js@3.2.4_@svgdotjs+svg.select.js@4.0.3__@svgdotjs+svg.js@3.2.4": { - "integrity": "sha512-iZSrrBGvVlL+nt2B1NpqfDuBZ9jX61X9I2+XV0hlYXHtTwhwLTHDKGXjNXAgFBDLuvSYCB/rq2nPWVPRv2DrGA==", - "dependencies": [ - "@svgdotjs/svg.draggable.js", - "@svgdotjs/svg.filter.js", - "@svgdotjs/svg.js", - "@svgdotjs/svg.resize.js", - "@svgdotjs/svg.select.js", - "@yr/monotone-cubic-spline" - ] - }, "arg@5.0.2": { "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" }, @@ -934,6 +1074,9 @@ "assert-never@1.4.0": { "integrity": "sha512-5oJg84os6NMQNl27T9LnZkvvqzvAnHu03ShCnoj6bsJwS7L8AO4lf+C/XjK/nvzEqQB744moC6V128RucQd1jA==" }, + "assertion-error@2.0.1": { + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==" + }, "async@3.2.6": { "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" }, @@ -997,6 +1140,9 @@ ], "bin": true }, + "cac@6.7.14": { + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==" + }, "call-bind-apply-helpers@1.0.2": { "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", "dependencies": [ @@ -1023,6 +1169,16 @@ "caniuse-lite@1.0.30001727": { "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==" }, + "chai@5.2.1": { + "integrity": "sha512-5nFxhUrX0PqtyogoYOA8IPswy5sZFTOsBFl/9bNsmDLgsxYTzSZQJDPppDnZPTQbzSEm0hqGjWPzRemQCYbD6A==", + "dependencies": [ + "assertion-error", + "check-error", + "deep-eql", + "loupe", + "pathval" + ] + }, "chalk@4.1.2": { "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": [ @@ -1039,6 +1195,9 @@ "is-regex" ] }, + "check-error@2.1.1": { + "integrity": "sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==" + }, "chokidar@3.6.0": { "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "dependencies": [ @@ -1097,6 +1256,9 @@ "commander@7.2.0": { "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" }, + "commondir@1.0.1": { + "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==" + }, "concat-map@0.0.1": { "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, @@ -1107,6 +1269,9 @@ "@babel/types" ] }, + "cookie@0.6.0": { + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==" + }, "cross-spawn@7.0.6": { "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", "dependencies": [ @@ -1322,9 +1487,6 @@ "d3-zoom" ] }, - "date-fns@4.1.0": { - "integrity": "sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==" - }, "debug@4.4.1": { "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dependencies": [ @@ -1334,6 +1496,9 @@ "decamelize@1.2.0": { "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==" }, + "deep-eql@5.0.2": { + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==" + }, "deep-is@0.1.4": { "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" }, @@ -1346,6 +1511,9 @@ "robust-predicates" ] }, + "devalue@5.1.1": { + "integrity": "sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==" + }, "dexie@4.0.11": { "integrity": "sha512-SOKO002EqlvBYYKQSew3iymBoN2EQ4BDw/3yprjh7kAfFzjBYkaMNa/pZvcA7HSWlcKSQb9XhPe3wKyQ0x4A8A==" }, @@ -1397,12 +1565,48 @@ "es-errors@1.3.0": { "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" }, + "es-module-lexer@1.7.0": { + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==" + }, "es-object-atoms@1.1.1": { "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", "dependencies": [ "es-errors" ] }, + "esbuild@0.25.7": { + "integrity": "sha512-daJB0q2dmTzo90L9NjRaohhRWrCzYxWNFTjEi72/h+p5DcY3yn4MacWfDakHmaBaDzDiuLJsCh0+6LK/iX+c+Q==", + "optionalDependencies": [ + "@esbuild/aix-ppc64", + "@esbuild/android-arm", + "@esbuild/android-arm64", + "@esbuild/android-x64", + "@esbuild/darwin-arm64", + "@esbuild/darwin-x64", + "@esbuild/freebsd-arm64", + "@esbuild/freebsd-x64", + "@esbuild/linux-arm", + "@esbuild/linux-arm64", + "@esbuild/linux-ia32", + "@esbuild/linux-loong64", + "@esbuild/linux-mips64el", + "@esbuild/linux-ppc64", + "@esbuild/linux-riscv64", + "@esbuild/linux-s390x", + "@esbuild/linux-x64", + "@esbuild/netbsd-arm64", + "@esbuild/netbsd-x64", + "@esbuild/openbsd-arm64", + "@esbuild/openbsd-x64", + "@esbuild/openharmony-arm64", + "@esbuild/sunos-x64", + "@esbuild/win32-arm64", + "@esbuild/win32-ia32", + "@esbuild/win32-x64" + ], + "scripts": true, + "bin": true + }, "escalade@3.2.0": { "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==" }, @@ -1518,9 +1722,18 @@ "estree-walker@2.0.2": { "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==" }, + "estree-walker@3.0.3": { + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": [ + "@types/estree" + ] + }, "esutils@2.0.3": { "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" }, + "expect-type@1.2.2": { + "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==" + }, "fast-deep-equal@3.1.3": { "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, @@ -1600,7 +1813,7 @@ "flowbite-datepicker@1.3.2": { "integrity": "sha512-6Nfm0MCVX3mpaR7YSCjmEO2GO8CDt6CX8ZpQnGdeu03WUCWtEPQ/uy0PUiNtIJjJZWnX0Cm3H55MOhbD1g+E/g==", "dependencies": [ - "@rollup/plugin-node-resolve", + "@rollup/plugin-node-resolve@15.3.1", "flowbite@2.5.2" ] }, @@ -1608,40 +1821,17 @@ "integrity": "sha512-VNNMcekjbM1bQEGgbdGsdYR9mRdTj/L0A5ba0P1tiFv5QB9GvbvJMABJoiD80eqpZUkfR2QVOmiZfgCwHicT/Q==", "dependencies": [ "svelte", - "tailwind-merge@3.3.1" - ] - }, - "flowbite-svelte-icons@2.2.1_svelte@5.36.8__acorn@8.15.0": { - "integrity": "sha512-SH59319zN4TFpmvFMD7+0ETyDxez4Wyw3mgz7hkjhvrx8HawNAS3Fp7au84pZEs1gniX4hvXIg54U+4YybV2rA==", - "dependencies": [ - "clsx", - "svelte", - "tailwind-merge@3.3.1" + "tailwind-merge" ] }, "flowbite-svelte@0.48.6_svelte@5.36.8__acorn@8.15.0": { "integrity": "sha512-/PmeR3ipHHvda8vVY9MZlymaRoJsk8VddEeoLzIygfYwJV68ey8gHuQPC1dq9J6NDCTE5+xOPtBiYUtVjCfvZw==", "dependencies": [ "@floating-ui/dom", - "apexcharts@3.54.1", + "apexcharts", "flowbite@3.1.2", "svelte", - "tailwind-merge@3.3.1" - ] - }, - "flowbite-svelte@1.10.10_svelte@5.36.8__acorn@8.15.0_tailwindcss@3.4.17__postcss@8.5.6": { - "integrity": "sha512-9YCB3EqQKlu7in9pxE46eeA+zt98vhUK1nb0eR2o5wpRfsWj60u9v43lMtfhpxSTsh2Jebh+wVLNYyyrYa0UGA==", - "dependencies": [ - "@floating-ui/dom", - "@floating-ui/utils", - "apexcharts@4.7.0_@svgdotjs+svg.js@3.2.4_@svgdotjs+svg.select.js@4.0.3__@svgdotjs+svg.js@3.2.4", - "clsx", - "date-fns", - "flowbite@3.1.2", - "svelte", - "tailwind-merge@3.3.1", - "tailwind-variants", - "tailwindcss" + "tailwind-merge" ] }, "flowbite@2.5.2": { @@ -1867,6 +2057,12 @@ "is-promise@2.2.2": { "integrity": "sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==" }, + "is-reference@1.2.1": { + "integrity": "sha512-U82MsXXiFIrjCK4otLT+o2NA2Cd2g5MLoOVXUZjIOhLurrRxpEXzI8O0KZHr3IjLvlAH1kTPYSuqer5T9ZVBKQ==", + "dependencies": [ + "@types/estree" + ] + }, "is-reference@3.0.3": { "integrity": "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw==", "dependencies": [ @@ -1911,6 +2107,9 @@ "js-stringify@1.0.2": { "integrity": "sha512-rtS5ATOo2Q5k1G+DADISilDA6lv79zIiwFd6CcjuIxGKLFm5C+RLImRscVap9k55i+MOZwgliw+NejvkLuGD5g==" }, + "js-tokens@9.0.1": { + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==" + }, "js-yaml@4.1.0": { "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dependencies": [ @@ -1940,6 +2139,9 @@ "json-buffer" ] }, + "kleur@4.1.5": { + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==" + }, "known-css-properties@0.37.0": { "integrity": "sha512-JCDrsP4Z1Sb9JwG0aJ8Eo2r7k4Ou5MwmThS/6lcIe1ICyb7UBJKGRIUUdqc2ASdE/42lgz6zFUnzAIhtXnBVrQ==" }, @@ -1989,6 +2191,9 @@ "lodash.merge@4.6.2": { "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" }, + "loupe@3.1.4": { + "integrity": "sha512-wJzkKwJrheKtknCOKNEtDK4iqg/MxmZheEMtSTYvnzRdEYaZzmgH976nenp8WdJRdx5Vc1X/9MO0Oszl6ezeXg==" + }, "lru-cache@10.4.3": { "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" }, @@ -2042,6 +2247,9 @@ "mri@1.2.0": { "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==" }, + "mrmime@2.0.1": { + "integrity": "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==" + }, "ms@2.1.3": { "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, @@ -2184,6 +2392,12 @@ "minipass" ] }, + "pathe@2.0.3": { + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==" + }, + "pathval@2.0.1": { + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==" + }, "picocolors@1.1.1": { "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==" }, @@ -2535,6 +2749,9 @@ "set-blocking@2.0.0": { "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, + "set-cookie-parser@2.7.1": { + "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ==" + }, "shebang-command@2.0.0": { "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": [ @@ -2544,9 +2761,20 @@ "shebang-regex@3.0.0": { "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" }, + "siginfo@2.0.0": { + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==" + }, "signal-exit@4.1.0": { "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" }, + "sirv@3.0.1": { + "integrity": "sha512-FoqMu0NCGBLCcAkS1qA+XJIQTR6/JHfQXl+uGteNCQ76T91DMUjPa9xfmeqMY3z80nLSg9yQmNjK0Px6RWsH/A==", + "dependencies": [ + "@polka/url", + "mrmime", + "totalist" + ] + }, "skin-tone@2.0.0": { "integrity": "sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==", "dependencies": [ @@ -2559,6 +2787,12 @@ "source-map@0.6.1": { "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, + "stackback@0.0.2": { + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==" + }, + "std-env@3.9.0": { + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==" + }, "string-width@4.2.3": { "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": [ @@ -2590,6 +2824,12 @@ "strip-json-comments@3.1.1": { "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" }, + "strip-literal@3.0.0": { + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dependencies": [ + "js-tokens" + ] + }, "sucrase@3.35.0": { "integrity": "sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA==", "dependencies": [ @@ -2653,7 +2893,7 @@ "clsx", "esm-env", "esrap", - "is-reference", + "is-reference@3.0.3", "locate-character", "magic-string", "zimmerframe" @@ -2705,19 +2945,9 @@ "svg.js" ] }, - "tailwind-merge@3.0.2": { - "integrity": "sha512-l7z+OYZ7mu3DTqrL88RiKrKIqO3NcpEO8V/Od04bNpvk0kiIFndGEoqfuzvj4yuhRkHKjRkII2z+KS2HfPcSxw==" - }, "tailwind-merge@3.3.1": { "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==" }, - "tailwind-variants@1.0.0_tailwindcss@3.4.17__postcss@8.5.6": { - "integrity": "sha512-2WSbv4ulEEyuBKomOunut65D8UZwxrHoRfYnxGcQNnHqlSCp2+B7Yz2W+yrNDrxRodOXtGD/1oCcKGNBnUqMqA==", - "dependencies": [ - "tailwind-merge@3.0.2", - "tailwindcss" - ] - }, "tailwindcss@3.4.17_postcss@8.5.6": { "integrity": "sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og==", "dependencies": [ @@ -2758,6 +2988,28 @@ "any-promise" ] }, + "tinybench@2.9.0": { + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==" + }, + "tinyexec@0.3.2": { + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==" + }, + "tinyglobby@0.2.14_picomatch@4.0.3": { + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dependencies": [ + "fdir", + "picomatch@4.0.3" + ] + }, + "tinypool@1.1.1": { + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==" + }, + "tinyrainbow@2.0.0": { + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==" + }, + "tinyspy@4.0.3": { + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==" + }, "to-regex-range@5.0.1": { "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dependencies": [ @@ -2767,6 +3019,9 @@ "token-stream@1.0.0": { "integrity": "sha512-VSsyNPPW74RpHwR8Fc21uubwHY7wMDeJLys2IX5zJNih+OnAnaifKHo+1LHT7DAdloQ7apeaaWg8l7qnf/TnEg==" }, + "totalist@3.0.1": { + "integrity": "sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==" + }, "ts-interface-checker@0.1.13": { "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==" }, @@ -2823,6 +3078,78 @@ "util-deprecate@1.0.2": { "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "vite-node@3.2.4_@types+node@24.0.15": { + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dependencies": [ + "cac", + "debug", + "es-module-lexer", + "pathe", + "vite" + ], + "bin": true + }, + "vite@6.3.5_@types+node@24.0.15_picomatch@4.0.3": { + "integrity": "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==", + "dependencies": [ + "@types/node@24.0.15", + "esbuild", + "fdir", + "picomatch@4.0.3", + "postcss", + "rollup", + "tinyglobby" + ], + "optionalDependencies": [ + "fsevents@2.3.3" + ], + "optionalPeers": [ + "@types/node@24.0.15" + ], + "bin": true + }, + "vitefu@1.1.1_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3_@types+node@24.0.15": { + "integrity": "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ==", + "dependencies": [ + "vite" + ], + "optionalPeers": [ + "vite" + ] + }, + "vitest@3.2.4_@types+node@24.0.15_vite@6.3.5__@types+node@24.0.15__picomatch@4.0.3": { + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dependencies": [ + "@types/chai", + "@types/node@24.0.15", + "@vitest/expect", + "@vitest/mocker", + "@vitest/pretty-format", + "@vitest/runner", + "@vitest/snapshot", + "@vitest/spy", + "@vitest/utils", + "chai", + "debug", + "expect-type", + "magic-string", + "pathe", + "picomatch@4.0.3", + "std-env", + "tinybench", + "tinyexec", + "tinyglobby", + "tinypool", + "tinyrainbow", + "vite", + "vite-node", + "why-is-node-running" + ], + "optionalPeers": [ + "@types/node@24.0.15" + ], + "bin": true + }, "void-elements@3.1.0": { "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==" }, @@ -2836,6 +3163,14 @@ ], "bin": true }, + "why-is-node-running@2.3.0": { + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dependencies": [ + "siginfo", + "stackback" + ], + "bin": true + }, "with@7.0.2": { "integrity": "sha512-RNGKj82nUPg3g5ygxkQl0R937xLyho1J24ItRCBTr/m1YnZkzJy1hUiHUJrc/VlsDQzsCnInEGSg3bci0Lmd4w==", "dependencies": [ @@ -2945,18 +3280,25 @@ }, "workspace": { "dependencies": [ - "npm:@nostr-dev-kit/ndk-cache-dexie@^2.6.33", + "npm:@noble/curves@^1.9.4", + "npm:@noble/hashes@^1.8.0", + "npm:@nostr-dev-kit/ndk-cache-dexie@2.6", "npm:@nostr-dev-kit/ndk@^2.14.32", "npm:@popperjs/core@2.11", "npm:@tailwindcss/forms@0.5", "npm:@tailwindcss/typography@0.5", "npm:asciidoctor@3.0", - "npm:d3@7.9", - "npm:flowbite-svelte-icons@^2.2.1", - "npm:flowbite-svelte@^1.10.10", - "npm:flowbite@^3.1.2", + "npm:bech32@2", + "npm:d3@^7.9.0", + "npm:flowbite-svelte-icons@2.1", + "npm:flowbite-svelte@0.48", + "npm:flowbite@2", "npm:he@1.2", - "npm:nostr-tools@^2.15.1", + "npm:highlight.js@^11.11.1", + "npm:node-emoji@^2.2.0", + "npm:nostr-tools@2.15", + "npm:plantuml-encoder@^1.4.0", + "npm:qrcode@^1.5.4", "npm:svelte@^5.36.8", "npm:tailwind-merge@^3.3.1" ], diff --git a/import_map.json b/import_map.json index b5aa95c..3c9c287 100644 --- a/import_map.json +++ b/import_map.json @@ -2,18 +2,29 @@ "imports": { "he": "npm:he@1.2.x", "@nostr-dev-kit/ndk": "npm:@nostr-dev-kit/ndk@^2.14.32", - "@nostr-dev-kit/ndk-cache-dexie": "npm:@nostr-dev-kit/ndk-cache-dexie@^2.6.33", + "@nostr-dev-kit/ndk-cache-dexie": "npm:@nostr-dev-kit/ndk-cache-dexie@2.6.x", "@popperjs/core": "npm:@popperjs/core@2.11.x", "@tailwindcss/forms": "npm:@tailwindcss/forms@0.5.x", "@tailwindcss/typography": "npm:@tailwindcss/typography@0.5.x", "asciidoctor": "npm:asciidoctor@3.0.x", - "d3": "npm:d3@7.9.x", - "nostr-tools": "npm:nostr-tools@^2.15.1", + "d3": "npm:d3@^7.9.0", + "nostr-tools": "npm:nostr-tools@2.15.x", "tailwind-merge": "npm:tailwind-merge@^3.3.1", "svelte": "npm:svelte@^5.36.8", - "flowbite": "npm:flowbite@^3.1.2", - "flowbite-svelte": "npm:flowbite-svelte@^1.10.10", - "flowbite-svelte-icons": "npm:flowbite-svelte-icons@^2.2.1", - "child_process": "node:child_process" + "flowbite": "npm:flowbite@2.x", + "flowbite-svelte": "npm:flowbite-svelte@0.48.x", + "flowbite-svelte-icons": "npm:flowbite-svelte-icons@2.1.x", + "@noble/curves": "npm:@noble/curves@^1.9.4", + "@noble/curves/secp256k1": "npm:@noble/curves@^1.9.4/secp256k1", + "@noble/hashes": "npm:@noble/hashes@^1.8.0", + "@noble/hashes/sha2.js": "npm:@noble/hashes@^1.8.0/sha2.js", + "@noble/hashes/utils": "npm:@noble/hashes@^1.8.0/utils", + "bech32": "npm:bech32@^2.0.0", + "highlight.js": "npm:highlight.js@^11.11.1", + "node-emoji": "npm:node-emoji@^2.2.0", + "plantuml-encoder": "npm:plantuml-encoder@^1.4.0", + "qrcode": "npm:qrcode@^1.5.4", + "child_process": "node:child_process", + "process": "node:process" } } diff --git a/package-lock.json b/package-lock.json index 003bf33..efc32c1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -69,20 +69,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", - "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, "node_modules/@asciidoctor/cli": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/@asciidoctor/cli/-/cli-4.0.0.tgz", @@ -149,12 +135,12 @@ } }, "node_modules/@babel/parser": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz", - "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==", + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.3.tgz", + "integrity": "sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==", "license": "MIT", "dependencies": { - "@babel/types": "^7.28.0" + "@babel/types": "^7.28.2" }, "bin": { "parser": "bin/babel-parser.js" @@ -177,9 +163,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.8.tgz", - "integrity": "sha512-urAvrUedIqEiFR3FYSLTWQgLu5tb+m0qZw0NBEasUeo6wuqatkMDaRT+1uABiGXEu5vqgPd7FGE1BhsAIy9QVA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", + "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", "cpu": [ "ppc64" ], @@ -194,9 +180,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.8.tgz", - "integrity": "sha512-RONsAvGCz5oWyePVnLdZY/HHwA++nxYWIX1atInlaW6SEkwq6XkP3+cb825EUcRs5Vss/lGh/2YxAb5xqc07Uw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", + "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", "cpu": [ "arm" ], @@ -211,9 +197,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.8.tgz", - "integrity": "sha512-OD3p7LYzWpLhZEyATcTSJ67qB5D+20vbtr6vHlHWSQYhKtzUYrETuWThmzFpZtFsBIxRvhO07+UgVA9m0i/O1w==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", + "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", "cpu": [ "arm64" ], @@ -228,9 +214,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.8.tgz", - "integrity": "sha512-yJAVPklM5+4+9dTeKwHOaA+LQkmrKFX96BM0A/2zQrbS6ENCmxc4OVoBs5dPkCCak2roAD+jKCdnmOqKszPkjA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", + "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", "cpu": [ "x64" ], @@ -245,9 +231,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.8.tgz", - "integrity": "sha512-Jw0mxgIaYX6R8ODrdkLLPwBqHTtYHJSmzzd+QeytSugzQ0Vg4c5rDky5VgkoowbZQahCbsv1rT1KW72MPIkevw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", + "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", "cpu": [ "arm64" ], @@ -262,9 +248,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.8.tgz", - "integrity": "sha512-Vh2gLxxHnuoQ+GjPNvDSDRpoBCUzY4Pu0kBqMBDlK4fuWbKgGtmDIeEC081xi26PPjn+1tct+Bh8FjyLlw1Zlg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", + "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", "cpu": [ "x64" ], @@ -279,9 +265,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.8.tgz", - "integrity": "sha512-YPJ7hDQ9DnNe5vxOm6jaie9QsTwcKedPvizTVlqWG9GBSq+BuyWEDazlGaDTC5NGU4QJd666V0yqCBL2oWKPfA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", + "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", "cpu": [ "arm64" ], @@ -296,9 +282,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.8.tgz", - "integrity": "sha512-MmaEXxQRdXNFsRN/KcIimLnSJrk2r5H8v+WVafRWz5xdSVmWLoITZQXcgehI2ZE6gioE6HirAEToM/RvFBeuhw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", + "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", "cpu": [ "x64" ], @@ -313,9 +299,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.8.tgz", - "integrity": "sha512-FuzEP9BixzZohl1kLf76KEVOsxtIBFwCaLupVuk4eFVnOZfU+Wsn+x5Ryam7nILV2pkq2TqQM9EZPsOBuMC+kg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", + "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", "cpu": [ "arm" ], @@ -330,9 +316,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.8.tgz", - "integrity": "sha512-WIgg00ARWv/uYLU7lsuDK00d/hHSfES5BzdWAdAig1ioV5kaFNrtK8EqGcUBJhYqotlUByUKz5Qo6u8tt7iD/w==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", + "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", "cpu": [ "arm64" ], @@ -347,9 +333,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.8.tgz", - "integrity": "sha512-A1D9YzRX1i+1AJZuFFUMP1E9fMaYY+GnSQil9Tlw05utlE86EKTUA7RjwHDkEitmLYiFsRd9HwKBPEftNdBfjg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", + "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", "cpu": [ "ia32" ], @@ -364,9 +350,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.8.tgz", - "integrity": "sha512-O7k1J/dwHkY1RMVvglFHl1HzutGEFFZ3kNiDMSOyUrB7WcoHGf96Sh+64nTRT26l3GMbCW01Ekh/ThKM5iI7hQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", + "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", "cpu": [ "loong64" ], @@ -381,9 +367,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.8.tgz", - "integrity": "sha512-uv+dqfRazte3BzfMp8PAQXmdGHQt2oC/y2ovwpTteqrMx2lwaksiFZ/bdkXJC19ttTvNXBuWH53zy/aTj1FgGw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", + "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", "cpu": [ "mips64el" ], @@ -398,9 +384,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.8.tgz", - "integrity": "sha512-GyG0KcMi1GBavP5JgAkkstMGyMholMDybAf8wF5A70CALlDM2p/f7YFE7H92eDeH/VBtFJA5MT4nRPDGg4JuzQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", + "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", "cpu": [ "ppc64" ], @@ -415,9 +401,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.8.tgz", - "integrity": "sha512-rAqDYFv3yzMrq7GIcen3XP7TUEG/4LK86LUPMIz6RT8A6pRIDn0sDcvjudVZBiiTcZCY9y2SgYX2lgK3AF+1eg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", + "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", "cpu": [ "riscv64" ], @@ -432,9 +418,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.8.tgz", - "integrity": "sha512-Xutvh6VjlbcHpsIIbwY8GVRbwoviWT19tFhgdA7DlenLGC/mbc3lBoVb7jxj9Z+eyGqvcnSyIltYUrkKzWqSvg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", + "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", "cpu": [ "s390x" ], @@ -449,9 +435,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.8.tgz", - "integrity": "sha512-ASFQhgY4ElXh3nDcOMTkQero4b1lgubskNlhIfJrsH5OKZXDpUAKBlNS0Kx81jwOBp+HCeZqmoJuihTv57/jvQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", + "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", "cpu": [ "x64" ], @@ -466,9 +452,9 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.8.tgz", - "integrity": "sha512-d1KfruIeohqAi6SA+gENMuObDbEjn22olAR7egqnkCD9DGBG0wsEARotkLgXDu6c4ncgWTZJtN5vcgxzWRMzcw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", + "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", "cpu": [ "arm64" ], @@ -483,9 +469,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.8.tgz", - "integrity": "sha512-nVDCkrvx2ua+XQNyfrujIG38+YGyuy2Ru9kKVNyh5jAys6n+l44tTtToqHjino2My8VAY6Lw9H7RI73XFi66Cg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", + "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", "cpu": [ "x64" ], @@ -500,9 +486,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.8.tgz", - "integrity": "sha512-j8HgrDuSJFAujkivSMSfPQSAa5Fxbvk4rgNAS5i3K+r8s1X0p1uOO2Hl2xNsGFppOeHOLAVgYwDVlmxhq5h+SQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", + "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", "cpu": [ "arm64" ], @@ -517,9 +503,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.8.tgz", - "integrity": "sha512-1h8MUAwa0VhNCDp6Af0HToI2TJFAn1uqT9Al6DJVzdIBAd21m/G0Yfc77KDM3uF3T/YaOgQq3qTJHPbTOInaIQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", + "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", "cpu": [ "x64" ], @@ -534,9 +520,9 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.8.tgz", - "integrity": "sha512-r2nVa5SIK9tSWd0kJd9HCffnDHKchTGikb//9c7HX+r+wHYCpQrSgxhlY6KWV1nFo1l4KFbsMlHk+L6fekLsUg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", + "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", "cpu": [ "arm64" ], @@ -551,9 +537,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.8.tgz", - "integrity": "sha512-zUlaP2S12YhQ2UzUfcCuMDHQFJyKABkAjvO5YSndMiIkMimPmxA+BYSBikWgsRpvyxuRnow4nS5NPnf9fpv41w==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", + "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", "cpu": [ "x64" ], @@ -568,9 +554,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.8.tgz", - "integrity": "sha512-YEGFFWESlPva8hGL+zvj2z/SaK+pH0SwOM0Nc/d+rVnW7GSTFlLBGzZkuSU9kFIGIo8q9X3ucpZhu8PDN5A2sQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", + "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", "cpu": [ "arm64" ], @@ -585,9 +571,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.8.tgz", - "integrity": "sha512-hiGgGC6KZ5LZz58OL/+qVVoZiuZlUYlYHNAmczOm7bs2oE1XriPFi5ZHHrS8ACpV5EjySrnoCKmcbQMN+ojnHg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", + "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", "cpu": [ "ia32" ], @@ -602,9 +588,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.8.tgz", - "integrity": "sha512-cn3Yr7+OaaZq1c+2pe+8yxC8E144SReCQjN6/2ynubzYjvyqZjTXfQJpAcQpsdJq3My7XADANiYGHoFC69pLQw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", + "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", "cpu": [ "x64" ], @@ -678,9 +664,9 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.0.tgz", - "integrity": "sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==", + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz", + "integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==", "dev": true, "license": "Apache-2.0", "peer": true, @@ -689,9 +675,9 @@ } }, "node_modules/@eslint/core": { - "version": "0.15.1", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.1.tgz", - "integrity": "sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==", + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz", + "integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==", "dev": true, "license": "Apache-2.0", "peer": true, @@ -728,9 +714,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.32.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.32.0.tgz", - "integrity": "sha512-BBpRFZK3eX6uMLKz8WxFOBIFFcGFJ/g8XuwjTHCqHROSIsopI+ddn/d5Cfh36+7+e5edVS8dbSHnBNhrLEX0zg==", + "version": "9.33.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.33.0.tgz", + "integrity": "sha512-5K1/mKhWaMfreBGJTwval43JJmkip0RmM+3+IuqupeSKNC/Th2Kc7ucaq5ovTSra/OOKB9c58CGSz3QMVbWt0A==", "dev": true, "license": "MIT", "peer": true, @@ -753,14 +739,14 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.4.tgz", - "integrity": "sha512-Ul5l+lHEcw3L5+k8POx6r74mxEYKG5kOb6Xpy2gCRW6zweT6TEhAf8vhxGgjhqrd/VO/Dirhsb+1hNpD1ue9hw==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz", + "integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==", "dev": true, "license": "Apache-2.0", "peer": true, "dependencies": { - "@eslint/core": "^0.15.1", + "@eslint/core": "^0.15.2", "levn": "^0.4.1" }, "engines": { @@ -884,9 +870,9 @@ } }, "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.0.tgz", + "integrity": "sha512-TKY5pyBkHyADOPYlRT9Lx6F544mPl0vS5Ew7BJ45hA08Q+t3GjbueLliBWN3sMICk6+y7HdyxSzC4bWS8baBdg==", "license": "MIT", "engines": { "node": ">=12" @@ -963,15 +949,26 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.12", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz", - "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==", + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", "license": "MIT", "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", @@ -982,15 +979,15 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.4", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz", - "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz", - "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "license": "MIT", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", @@ -1007,9 +1004,9 @@ } }, "node_modules/@noble/curves": { - "version": "1.9.6", - "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.9.6.tgz", - "integrity": "sha512-GIKz/j99FRthB8icyJQA51E8Uk5hXmdyThjgQXRKiv9h0zeRlzSCLIzFw6K1LotZ3XuB7yzlf76qk7uBmTdFqA==", + "version": "1.9.7", + "resolved": "https://registry.npmjs.org/@noble/curves/-/curves-1.9.7.tgz", + "integrity": "sha512-gbKGcRUYIjA3/zCCNaWDciTMFI0dCkvou3TL8Zmy5Nc7sJ47a0jtOeZoTaMxkuqRo9cRhjOdZJXegxYE5FN/xw==", "license": "MIT", "dependencies": { "@noble/hashes": "1.8.0" @@ -1123,13 +1120,13 @@ } }, "node_modules/@playwright/test": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.54.2.tgz", - "integrity": "sha512-A+znathYxPf+72riFd1r1ovOLqsIIB0jKIoPjyK2kqEIe30/6jF6BC7QNluHuwUmsD2tv1XZVugN8GqfTMOxsA==", + "version": "1.55.0", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.55.0.tgz", + "integrity": "sha512-04IXzPwHrW69XusN/SIdDdKZBzMfOT9UNT/YiJit/xpy2VuAoB8NHc8Aplb96zsWDddLnbkPL3TsmrS04ZU2xQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright": "1.54.2" + "playwright": "1.55.0" }, "bin": { "playwright": "cli.js" @@ -1252,9 +1249,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.46.2.tgz", - "integrity": "sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.46.4.tgz", + "integrity": "sha512-B2wfzCJ+ps/OBzRjeds7DlJumCU3rXMxJJS1vzURyj7+KBHGONm7c9q1TfdBl4vCuNMkDvARn3PBl2wZzuR5mw==", "cpu": [ "arm" ], @@ -1266,9 +1263,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.46.2.tgz", - "integrity": "sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.46.4.tgz", + "integrity": "sha512-FGJYXvYdn8Bs6lAlBZYT5n+4x0ciEp4cmttsvKAZc/c8/JiPaQK8u0c/86vKX8lA7OY/+37lIQSe0YoAImvBAA==", "cpu": [ "arm64" ], @@ -1280,9 +1277,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.46.2.tgz", - "integrity": "sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.46.4.tgz", + "integrity": "sha512-/9qwE/BM7ATw/W/OFEMTm3dmywbJyLQb4f4v5nmOjgYxPIGpw7HaxRi6LnD4Pjn/q7k55FGeHe1/OD02w63apA==", "cpu": [ "arm64" ], @@ -1294,9 +1291,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.46.2.tgz", - "integrity": "sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.46.4.tgz", + "integrity": "sha512-QkWfNbeRuzFnv2d0aPlrzcA3Ebq2mE8kX/5Pl7VdRShbPBjSnom7dbT8E3Jmhxo2RL784hyqGvR5KHavCJQciw==", "cpu": [ "x64" ], @@ -1308,9 +1305,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.46.2.tgz", - "integrity": "sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.46.4.tgz", + "integrity": "sha512-+ToyOMYnSfV8D+ckxO6NthPln/PDNp1P6INcNypfZ7muLmEvPKXqduUiD8DlJpMMT8LxHcE5W0dK9kXfJke9Zw==", "cpu": [ "arm64" ], @@ -1322,9 +1319,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.46.2.tgz", - "integrity": "sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.46.4.tgz", + "integrity": "sha512-cGT6ey/W+sje6zywbLiqmkfkO210FgRz7tepWAzzEVgQU8Hn91JJmQWNqs55IuglG8sJdzk7XfNgmGRtcYlo1w==", "cpu": [ "x64" ], @@ -1336,9 +1333,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.46.2.tgz", - "integrity": "sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.46.4.tgz", + "integrity": "sha512-9fhTJyOb275w5RofPSl8lpr4jFowd+H4oQKJ9XTYzD1JWgxdZKE8bA6d4npuiMemkecQOcigX01FNZNCYnQBdA==", "cpu": [ "arm" ], @@ -1350,9 +1347,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.46.2.tgz", - "integrity": "sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.46.4.tgz", + "integrity": "sha512-+6kCIM5Zjvz2HwPl/udgVs07tPMIp1VU2Y0c72ezjOvSvEfAIWsUgpcSDvnC7g9NrjYR6X9bZT92mZZ90TfvXw==", "cpu": [ "arm" ], @@ -1364,9 +1361,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.46.2.tgz", - "integrity": "sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.46.4.tgz", + "integrity": "sha512-SWuXdnsayCZL4lXoo6jn0yyAj7TTjWE4NwDVt9s7cmu6poMhtiras5c8h6Ih6Y0Zk6Z+8t/mLumvpdSPTWub2Q==", "cpu": [ "arm64" ], @@ -1378,9 +1375,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.46.2.tgz", - "integrity": "sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.46.4.tgz", + "integrity": "sha512-vDknMDqtMhrrroa5kyX6tuC0aRZZlQ+ipDfbXd2YGz5HeV2t8HOl/FDAd2ynhs7Ki5VooWiiZcCtxiZ4IjqZwQ==", "cpu": [ "arm64" ], @@ -1392,9 +1389,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.46.2.tgz", - "integrity": "sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.46.4.tgz", + "integrity": "sha512-mCBkjRZWhvjtl/x+Bd4fQkWZT8canStKDxGrHlBiTnZmJnWygGcvBylzLVCZXka4dco5ymkWhZlLwKCGFF4ivw==", "cpu": [ "loong64" ], @@ -1406,9 +1403,9 @@ ] }, "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.46.2.tgz", - "integrity": "sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.46.4.tgz", + "integrity": "sha512-YMdz2phOTFF+Z66dQfGf0gmeDSi5DJzY5bpZyeg9CPBkV9QDzJ1yFRlmi/j7WWRf3hYIWrOaJj5jsfwgc8GTHQ==", "cpu": [ "ppc64" ], @@ -1420,9 +1417,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.46.2.tgz", - "integrity": "sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.46.4.tgz", + "integrity": "sha512-r0WKLSfFAK8ucG024v2yiLSJMedoWvk8yWqfNICX28NHDGeu3F/wBf8KG6mclghx4FsLePxJr/9N8rIj1PtCnw==", "cpu": [ "riscv64" ], @@ -1434,9 +1431,9 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.46.2.tgz", - "integrity": "sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.46.4.tgz", + "integrity": "sha512-IaizpPP2UQU3MNyPH1u0Xxbm73D+4OupL0bjo4Hm0496e2wg3zuvoAIhubkD1NGy9fXILEExPQy87mweujEatA==", "cpu": [ "riscv64" ], @@ -1448,9 +1445,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.46.2.tgz", - "integrity": "sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.46.4.tgz", + "integrity": "sha512-aCM29orANR0a8wk896p6UEgIfupReupnmISz6SUwMIwTGaTI8MuKdE0OD2LvEg8ondDyZdMvnaN3bW4nFbATPA==", "cpu": [ "s390x" ], @@ -1462,9 +1459,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.46.2.tgz", - "integrity": "sha512-Jj5a9RUoe5ra+MEyERkDKLwTXVu6s3aACP51nkfnK9wJTraCC8IMe3snOfALkrjTYd2G1ViE1hICj0fZ7ALBPA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.46.4.tgz", + "integrity": "sha512-0Xj1vZE3cbr/wda8d/m+UeuSL+TDpuozzdD4QaSzu/xSOMK0Su5RhIkF7KVHFQsobemUNHPLEcYllL7ZTCP/Cg==", "cpu": [ "x64" ], @@ -1476,9 +1473,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.46.2.tgz", - "integrity": "sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.46.4.tgz", + "integrity": "sha512-kM/orjpolfA5yxsx84kI6bnK47AAZuWxglGKcNmokw2yy9i5eHY5UAjcX45jemTJnfHAWo3/hOoRqEeeTdL5hw==", "cpu": [ "x64" ], @@ -1490,9 +1487,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.46.2.tgz", - "integrity": "sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.46.4.tgz", + "integrity": "sha512-cNLH4psMEsWKILW0isbpQA2OvjXLbKvnkcJFmqAptPQbtLrobiapBJVj6RoIvg6UXVp5w0wnIfd/Q56cNpF+Ew==", "cpu": [ "arm64" ], @@ -1504,9 +1501,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.46.2.tgz", - "integrity": "sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.46.4.tgz", + "integrity": "sha512-OiEa5lRhiANpv4SfwYVgQ3opYWi/QmPDC5ve21m8G9pf6ZO+aX1g2EEF1/IFaM1xPSP7mK0msTRXlPs6mIagkg==", "cpu": [ "ia32" ], @@ -1518,9 +1515,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.46.2.tgz", - "integrity": "sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.46.4.tgz", + "integrity": "sha512-IKL9mewGZ5UuuX4NQlwOmxPyqielvkAPUS2s1cl6yWjjQvyN3h5JTdVFGD5Jr5xMjRC8setOfGQDVgX8V+dkjg==", "cpu": [ "x64" ], @@ -1663,9 +1660,9 @@ } }, "node_modules/@sveltejs/adapter-auto": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@sveltejs/adapter-auto/-/adapter-auto-6.0.1.tgz", - "integrity": "sha512-mcWud3pYGPWM2Pphdj8G9Qiq24nZ8L4LB7coCUckUEy5Y7wOWGJ/enaZ4AtJTcSm5dNK1rIkBRoqt+ae4zlxcQ==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-auto/-/adapter-auto-6.1.0.tgz", + "integrity": "sha512-shOuLI5D2s+0zTv2ab5M5PqfknXqWbKi+0UwB9yLTRIdzsK1R93JOO8jNhIYSHdW+IYXIYnLniu+JZqXs7h9Wg==", "dev": true, "license": "MIT", "peerDependencies": { @@ -1673,9 +1670,9 @@ } }, "node_modules/@sveltejs/adapter-node": { - "version": "5.2.13", - "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-5.2.13.tgz", - "integrity": "sha512-yS2TVFmIrxjGhYaV5/iIUrJ3mJl6zjaYn0lBD70vTLnYvJeqf3cjvLXeXCUCuYinhSBoyF4DpfGla49BnIy7sQ==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-node/-/adapter-node-5.3.1.tgz", + "integrity": "sha512-PSoGfa9atkmuixe7jvuS2tsUohVZF20So87ASzfMRGTTNqEd8s48KAodlv3CzHwq9XO/BM8KsQLpqqsr/6dmuA==", "dev": true, "license": "MIT", "dependencies": { @@ -1689,9 +1686,9 @@ } }, "node_modules/@sveltejs/adapter-static": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.8.tgz", - "integrity": "sha512-YaDrquRpZwfcXbnlDsSrBQNCChVOT9MGuSg+dMAyfsAa1SmiAhrA5jUYUiIMC59G92kIbY/AaQOWcBdq+lh+zg==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@sveltejs/adapter-static/-/adapter-static-3.0.9.tgz", + "integrity": "sha512-aytHXcMi7lb9ljsWUzXYQ0p5X1z9oWud2olu/EpmH7aCu4m84h7QLvb5Wp+CFirKcwoNnYvYWhyP/L8Vh1ztdw==", "dev": true, "license": "MIT", "peerDependencies": { @@ -1699,9 +1696,9 @@ } }, "node_modules/@sveltejs/kit": { - "version": "2.27.0", - "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.27.0.tgz", - "integrity": "sha512-pEX1Z2Km8tqmkni+ykIIou+ojp/7gb3M9tpllN5nDWNo9zlI0dI8/hDKFyBwQvb4jYR+EyLriFtrmgJ6GvbnBA==", + "version": "2.35.0", + "resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.35.0.tgz", + "integrity": "sha512-9QV1Wcr+tLW8/9qjG+Bd7Wg0jbxh3n/tjmaLggMeHxhinDzG4Z2kNFkNr8MuKoDHhQt50aFEcMiEMwAuAjhgUg==", "dev": true, "license": "MIT", "dependencies": { @@ -1726,19 +1723,25 @@ "node": ">=18.13" }, "peerDependencies": { + "@opentelemetry/api": "^1.0.0", "@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0", "svelte": "^4.0.0 || ^5.0.0-next.0", "vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + } } }, "node_modules/@sveltejs/vite-plugin-svelte": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.1.0.tgz", - "integrity": "sha512-+U6lz1wvGEG/BvQyL4z/flyNdQ9xDNv5vrh+vWBWTHaebqT0c9RNggpZTo/XSPoHsSCWBlYaTlRX8pZ9GATXCw==", + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-6.1.3.tgz", + "integrity": "sha512-3pppgIeIZs6nrQLazzKcdnTJ2IWiui/UucEPXKyFG35TKaHQrfkWBnv6hyJcLxFuR90t+LaoecrqTs8rJKWfSQ==", "dev": true, "license": "MIT", "dependencies": { - "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0-next.1", + "@sveltejs/vite-plugin-svelte-inspector": "^5.0.0", "debug": "^4.4.1", "deepmerge": "^4.3.1", "kleur": "^4.1.5", @@ -1754,9 +1757,9 @@ } }, "node_modules/@sveltejs/vite-plugin-svelte-inspector": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.0.tgz", - "integrity": "sha512-iwQ8Z4ET6ZFSt/gC+tVfcsSBHwsqc6RumSaiLUkAurW3BCpJam65cmHw0oOlDMTO0u+PZi9hilBRYN+LZNHTUQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-5.0.1.tgz", + "integrity": "sha512-ubWshlMk4bc8mkwWbg6vNvCeT7lGQojE3ijDh3QTR6Zr/R+GXxsGbyH4PExEPpiFmqPhYiVSVmHBjUcVc1JIrA==", "dev": true, "license": "MIT", "dependencies": { @@ -2143,13 +2146,13 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "24.1.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.1.0.tgz", - "integrity": "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==", + "version": "24.3.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.0.tgz", + "integrity": "sha512-aPTXCrfwnDLj4VvXrm+UUCQjNEvJgNA8s5F1cvwQU+3KNltTOkBm1j30uNLyqqPNe7gE3KFzImYoZEfLhp4Yow==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~7.8.0" + "undici-types": "~7.10.0" } }, "node_modules/@types/qrcode": { @@ -2581,7 +2584,9 @@ "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, "license": "MIT", + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -2600,9 +2605,9 @@ } }, "node_modules/browserslist": { - "version": "4.25.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz", - "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==", + "version": "4.25.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.3.tgz", + "integrity": "sha512-cDGv1kkDI4/0e5yON9yM5G/0A5u8sf5TnmdX5C9qHzI9PPu++sQ9zjm1k9NiOrf3riY4OkK0zSGqfvJyJsgCBQ==", "dev": true, "funding": [ { @@ -2620,8 +2625,8 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001726", - "electron-to-chromium": "^1.5.173", + "caniuse-lite": "^1.0.30001735", + "electron-to-chromium": "^1.5.204", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, @@ -2701,9 +2706,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001731", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001731.tgz", - "integrity": "sha512-lDdp2/wrOmTRWuoB5DpfNkC0rJDU8DqRa6nYL6HK6sytw70QMopt/NIc/9SM7ylItlBWfACXk0tEn37UWM/+mg==", + "version": "1.0.30001735", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001735.tgz", + "integrity": "sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==", "dev": true, "funding": [ { @@ -2722,9 +2727,9 @@ "license": "CC-BY-4.0" }, "node_modules/chai": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.2.1.tgz", - "integrity": "sha512-5nFxhUrX0PqtyogoYOA8IPswy5sZFTOsBFl/9bNsmDLgsxYTzSZQJDPppDnZPTQbzSEm0hqGjWPzRemQCYbD6A==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.1.tgz", + "integrity": "sha512-48af6xm9gQK8rhIcOxWwdGzIervm8BVTin+yRp9HEvU20BtVZ2lBywlIJBzwaDtvo0FvjeL7QdCADoUoqIbV3A==", "dev": true, "license": "MIT", "dependencies": { @@ -2742,7 +2747,9 @@ "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, "license": "MIT", + "peer": true, "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" @@ -2877,7 +2884,9 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "license": "MIT" + "dev": true, + "license": "MIT", + "peer": true }, "node_modules/constantinople": { "version": "4.0.1", @@ -3397,9 +3406,9 @@ "license": "MIT" }, "node_modules/dexie": { - "version": "4.0.11", - "resolved": "https://registry.npmjs.org/dexie/-/dexie-4.0.11.tgz", - "integrity": "sha512-SOKO002EqlvBYYKQSew3iymBoN2EQ4BDw/3yprjh7kAfFzjBYkaMNa/pZvcA7HSWlcKSQb9XhPe3wKyQ0x4A8A==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/dexie/-/dexie-4.2.0.tgz", + "integrity": "sha512-OSeyyWOUetDy9oFWeddJgi83OnRA3hSFh3RrbltmPgqHszE9f24eUCVLI4mPg0ifsWk0lQTdnS+jyGNrPMvhDA==", "license": "Apache-2.0" }, "node_modules/didyoumean": { @@ -3462,9 +3471,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.5.194", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.194.tgz", - "integrity": "sha512-SdnWJwSUot04UR51I2oPD8kuP2VI37/CADR1OHsFOUzZIvfWJBO6q11k5P/uKNyTT3cdOsnyjkrZ+DDShqYqJA==", + "version": "1.5.207", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.207.tgz", + "integrity": "sha512-mryFrrL/GXDTmAtIVMVf+eIXM09BBPlO5IQ7lUyKmK8d+A4VpRGG+M3ofoVef6qyF8s60rJei8ymlJxjUA8Faw==", "dev": true, "license": "ISC" }, @@ -3518,9 +3527,9 @@ } }, "node_modules/esbuild": { - "version": "0.25.8", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.8.tgz", - "integrity": "sha512-vVC0USHGtMi8+R4Kz8rt6JhEWLxsv9Rnu/lGYbPR8u47B+DCBksq9JarW0zOO7bs37hyOK1l2/oqtbciutL5+Q==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", + "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -3531,32 +3540,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.8", - "@esbuild/android-arm": "0.25.8", - "@esbuild/android-arm64": "0.25.8", - "@esbuild/android-x64": "0.25.8", - "@esbuild/darwin-arm64": "0.25.8", - "@esbuild/darwin-x64": "0.25.8", - "@esbuild/freebsd-arm64": "0.25.8", - "@esbuild/freebsd-x64": "0.25.8", - "@esbuild/linux-arm": "0.25.8", - "@esbuild/linux-arm64": "0.25.8", - "@esbuild/linux-ia32": "0.25.8", - "@esbuild/linux-loong64": "0.25.8", - "@esbuild/linux-mips64el": "0.25.8", - "@esbuild/linux-ppc64": "0.25.8", - "@esbuild/linux-riscv64": "0.25.8", - "@esbuild/linux-s390x": "0.25.8", - "@esbuild/linux-x64": "0.25.8", - "@esbuild/netbsd-arm64": "0.25.8", - "@esbuild/netbsd-x64": "0.25.8", - "@esbuild/openbsd-arm64": "0.25.8", - "@esbuild/openbsd-x64": "0.25.8", - "@esbuild/openharmony-arm64": "0.25.8", - "@esbuild/sunos-x64": "0.25.8", - "@esbuild/win32-arm64": "0.25.8", - "@esbuild/win32-ia32": "0.25.8", - "@esbuild/win32-x64": "0.25.8" + "@esbuild/aix-ppc64": "0.25.9", + "@esbuild/android-arm": "0.25.9", + "@esbuild/android-arm64": "0.25.9", + "@esbuild/android-x64": "0.25.9", + "@esbuild/darwin-arm64": "0.25.9", + "@esbuild/darwin-x64": "0.25.9", + "@esbuild/freebsd-arm64": "0.25.9", + "@esbuild/freebsd-x64": "0.25.9", + "@esbuild/linux-arm": "0.25.9", + "@esbuild/linux-arm64": "0.25.9", + "@esbuild/linux-ia32": "0.25.9", + "@esbuild/linux-loong64": "0.25.9", + "@esbuild/linux-mips64el": "0.25.9", + "@esbuild/linux-ppc64": "0.25.9", + "@esbuild/linux-riscv64": "0.25.9", + "@esbuild/linux-s390x": "0.25.9", + "@esbuild/linux-x64": "0.25.9", + "@esbuild/netbsd-arm64": "0.25.9", + "@esbuild/netbsd-x64": "0.25.9", + "@esbuild/openbsd-arm64": "0.25.9", + "@esbuild/openbsd-x64": "0.25.9", + "@esbuild/openharmony-arm64": "0.25.9", + "@esbuild/sunos-x64": "0.25.9", + "@esbuild/win32-arm64": "0.25.9", + "@esbuild/win32-ia32": "0.25.9", + "@esbuild/win32-x64": "0.25.9" } }, "node_modules/escalade": { @@ -3583,9 +3592,9 @@ } }, "node_modules/eslint": { - "version": "9.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.32.0.tgz", - "integrity": "sha512-LSehfdpgMeWcTZkWZVIJl+tkZ2nuSkyyB9C27MZqFWXuph7DvaowgcTvKqxvpLW1JZIk8PN7hFY3Rj9LQ7m7lg==", + "version": "9.33.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.33.0.tgz", + "integrity": "sha512-TS9bTNIryDzStCpJN93aC5VRSW3uTx9sClUn4B87pwiCaJh220otoI0X8mJKr+VcPtniMdN8GKjlwgWGUv5ZKA==", "dev": true, "license": "MIT", "peer": true, @@ -3593,11 +3602,11 @@ "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.21.0", - "@eslint/config-helpers": "^0.3.0", - "@eslint/core": "^0.15.0", + "@eslint/config-helpers": "^0.3.1", + "@eslint/core": "^0.15.2", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.32.0", - "@eslint/plugin-kit": "^0.3.4", + "@eslint/js": "9.33.0", + "@eslint/plugin-kit": "^0.3.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -3932,11 +3941,14 @@ } }, "node_modules/fdir": { - "version": "6.4.6", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz", - "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==", + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", "dev": true, "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -4340,7 +4352,9 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=8" } @@ -4631,15 +4645,14 @@ } }, "node_modules/jake": { - "version": "10.9.2", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", - "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "version": "10.9.4", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.4.tgz", + "integrity": "sha512-wpHYzhxiVQL+IV05BLE2Xn34zW1S223hvjtqk0+gsPrwd/8JNLXJgZZM/iPFsYc1xyphF+6M6EvdE5E9MBGkDA==", "license": "Apache-2.0", "dependencies": { - "async": "^3.2.3", - "chalk": "^4.0.2", + "async": "^3.2.6", "filelist": "^1.0.4", - "minimatch": "^3.1.2" + "picocolors": "^1.1.1" }, "bin": { "jake": "bin/cli.js" @@ -4921,7 +4934,9 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "license": "ISC", + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -5378,13 +5393,13 @@ "license": "MIT" }, "node_modules/playwright": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.54.2.tgz", - "integrity": "sha512-Hu/BMoA1NAdRUuulyvQC0pEqZ4vQbGfn8f7wPXcnqQmM+zct9UliKxsIkLNmz/ku7LElUNqmaiv1TG/aL5ACsw==", + "version": "1.55.0", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.0.tgz", + "integrity": "sha512-sdCWStblvV1YU909Xqx0DhOjPZE4/5lJsIS84IfN9dAZfcl/CIZ5O8l3o0j7hPMjDvqoTF8ZUcc+i/GL5erstA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "playwright-core": "1.54.2" + "playwright-core": "1.55.0" }, "bin": { "playwright": "cli.js" @@ -5397,9 +5412,9 @@ } }, "node_modules/playwright-core": { - "version": "1.54.2", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.54.2.tgz", - "integrity": "sha512-n5r4HFbMmWsB4twG7tJLDN9gmBUeSPcsBZiWSE4DnYz9mJMAFqr2ID7+eGC9kpEnxExJ1epttwR59LEWCk8mtA==", + "version": "1.55.0", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.0.tgz", + "integrity": "sha512-GvZs4vU3U5ro2nZpeiwyb0zuFaqb9sUiAJuyrWpcGouD8y9/HLgGbNRjIph7zU9D3hnPaisMl9zG9CgFi/biIg==", "dev": true, "license": "Apache-2.0", "bin": { @@ -6069,9 +6084,9 @@ "license": "Unlicense" }, "node_modules/rollup": { - "version": "4.46.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.2.tgz", - "integrity": "sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==", + "version": "4.46.4", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.46.4.tgz", + "integrity": "sha512-YbxoxvoqNg9zAmw4+vzh1FkGAiZRK+LhnSrbSrSXMdZYsRPDWoshcSd/pldKRO6lWzv/e9TiJAVQyirYIeSIPQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6085,26 +6100,26 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.46.2", - "@rollup/rollup-android-arm64": "4.46.2", - "@rollup/rollup-darwin-arm64": "4.46.2", - "@rollup/rollup-darwin-x64": "4.46.2", - "@rollup/rollup-freebsd-arm64": "4.46.2", - "@rollup/rollup-freebsd-x64": "4.46.2", - "@rollup/rollup-linux-arm-gnueabihf": "4.46.2", - "@rollup/rollup-linux-arm-musleabihf": "4.46.2", - "@rollup/rollup-linux-arm64-gnu": "4.46.2", - "@rollup/rollup-linux-arm64-musl": "4.46.2", - "@rollup/rollup-linux-loongarch64-gnu": "4.46.2", - "@rollup/rollup-linux-ppc64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-gnu": "4.46.2", - "@rollup/rollup-linux-riscv64-musl": "4.46.2", - "@rollup/rollup-linux-s390x-gnu": "4.46.2", - "@rollup/rollup-linux-x64-gnu": "4.46.2", - "@rollup/rollup-linux-x64-musl": "4.46.2", - "@rollup/rollup-win32-arm64-msvc": "4.46.2", - "@rollup/rollup-win32-ia32-msvc": "4.46.2", - "@rollup/rollup-win32-x64-msvc": "4.46.2", + "@rollup/rollup-android-arm-eabi": "4.46.4", + "@rollup/rollup-android-arm64": "4.46.4", + "@rollup/rollup-darwin-arm64": "4.46.4", + "@rollup/rollup-darwin-x64": "4.46.4", + "@rollup/rollup-freebsd-arm64": "4.46.4", + "@rollup/rollup-freebsd-x64": "4.46.4", + "@rollup/rollup-linux-arm-gnueabihf": "4.46.4", + "@rollup/rollup-linux-arm-musleabihf": "4.46.4", + "@rollup/rollup-linux-arm64-gnu": "4.46.4", + "@rollup/rollup-linux-arm64-musl": "4.46.4", + "@rollup/rollup-linux-loongarch64-gnu": "4.46.4", + "@rollup/rollup-linux-ppc64-gnu": "4.46.4", + "@rollup/rollup-linux-riscv64-gnu": "4.46.4", + "@rollup/rollup-linux-riscv64-musl": "4.46.4", + "@rollup/rollup-linux-s390x-gnu": "4.46.4", + "@rollup/rollup-linux-x64-gnu": "4.46.4", + "@rollup/rollup-linux-x64-musl": "4.46.4", + "@rollup/rollup-win32-arm64-msvc": "4.46.4", + "@rollup/rollup-win32-ia32-msvc": "4.46.4", + "@rollup/rollup-win32-x64-msvc": "4.46.4", "fsevents": "~2.3.2" } }, @@ -6441,7 +6456,9 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, "license": "MIT", + "peer": true, "dependencies": { "has-flag": "^4.0.0" }, @@ -6462,13 +6479,13 @@ } }, "node_modules/svelte": { - "version": "5.37.3", - "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.37.3.tgz", - "integrity": "sha512-7t/ejshehHd+95z3Z7ebS7wsqHDQxi/8nBTuTRwpMgNegfRBfuitCSKTUDKIBOExqfT2+DhQ2VLG8Xn+cBXoaQ==", + "version": "5.38.2", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-5.38.2.tgz", + "integrity": "sha512-iAcp/oFAWauVSGILdD67n7DiwgLHXZzWZIdzl7araRxu72jUr7PFAo2Iie7gXt0IbnlYvhxCb9GT3ZJUquO3PA==", "dev": true, "license": "MIT", "dependencies": { - "@ampproject/remapping": "^2.3.0", + "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", @@ -6488,9 +6505,9 @@ } }, "node_modules/svelte-check": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.0.tgz", - "integrity": "sha512-Iz8dFXzBNAM7XlEIsUjUGQhbEE+Pvv9odb9+0+ITTgFWZBGeJRRYqHUUglwe2EkLD5LIsQaAc4IUJyvtKuOO5w==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/svelte-check/-/svelte-check-4.3.1.tgz", + "integrity": "sha512-lkh8gff5gpHLjxIV+IaApMxQhTGnir2pNUAqcNgeKkvK5bT/30Ey/nzBxNLDlkztCH4dP7PixkMt9SWEKFPBWg==", "dev": true, "license": "MIT", "dependencies": { @@ -6966,9 +6983,9 @@ } }, "node_modules/undici-types": { - "version": "7.8.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz", - "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==", + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", "dev": true, "license": "MIT" }, diff --git a/package.json b/package.json index 2225a7a..5426173 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "type": "module", "scripts": { "dev": "vite dev", + "dev:debug": "DEBUG_RELAYS=true vite dev", "dev:node": "node --version && vite dev", "build": "vite build", "preview": "vite preview", diff --git a/playwright.config.ts b/playwright.config.ts index 5779001..bd4b2c4 100644 --- a/playwright.config.ts +++ b/playwright.config.ts @@ -27,7 +27,7 @@ export default defineConfig({ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ use: { /* Base URL to use in actions like `await page.goto('/')`. */ - baseURL: 'http://localhost:5173', + baseURL: "http://localhost:5173", /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ trace: "on-first-retry", @@ -49,7 +49,6 @@ export default defineConfig({ name: "webkit", use: { ...devices["Desktop Safari"] }, }, - /* Test against mobile viewports. */ // { // name: 'Mobile Chrome', @@ -73,8 +72,8 @@ export default defineConfig({ /* Run your local dev server before starting the tests */ webServer: { - command: 'npm run dev', - url: 'http://localhost:5173', + command: "npm run dev", + url: "http://localhost:5173", reuseExistingServer: !process.env.CI, }, diff --git a/src/app.css b/src/app.css index b5169ae..c036bb1 100644 --- a/src/app.css +++ b/src/app.css @@ -2,7 +2,6 @@ @import "./styles/scrollbar.css"; @import "./styles/publications.css"; @import "./styles/visualize.css"; -@import "./styles/events.css"; @import "./styles/asciidoc.css"; /* Custom styles */ @@ -28,7 +27,9 @@ } div[role="tooltip"] button.btn-leather { - @apply hover:text-primary-600 dark:hover:text-primary-400 hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200 dark:hover:bg-gray-700; + @apply hover:text-primary-600 dark:hover:text-primary-400 + hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200 + dark:hover:bg-gray-700; } .image-border { @@ -36,8 +37,10 @@ } div.card-leather { - @apply shadow-none text-primary-1000 border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700; - @apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; + @apply shadow-none text-primary-1000 border-s-4 bg-highlight + border-primary-200 has-[:hover]:border-primary-700; + @apply dark:bg-primary-1000 dark:border-primary-800 + dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; } div.card-leather h1, @@ -46,11 +49,13 @@ div.card-leather h4, div.card-leather h5, div.card-leather h6 { - @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; + @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 + dark:hover:text-primary-400; } div.card-leather .font-thin { - @apply text-gray-900 hover:text-primary-700 dark:text-gray-100 dark:hover:text-primary-300; + @apply text-gray-900 hover:text-primary-700 dark:text-gray-100 + dark:hover:text-primary-300; } main { @@ -74,7 +79,8 @@ div.note-leather, p.note-leather, section.note-leather { - @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 p-2 rounded; + @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 + p-2 rounded; } .edit div.note-leather:hover:not(:has(.note-leather:hover)), @@ -117,7 +123,8 @@ } div.modal-leather > div { - @apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100 dark:border-primary-600; + @apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100 + dark:border-primary-600; } div.modal-leather > div > h1, @@ -126,11 +133,14 @@ div.modal-leather > div > h4, div.modal-leather > div > h5, div.modal-leather > div > h6 { - @apply text-gray-900 hover:text-gray-900 dark:text-gray-100 dark:hover:text-gray-100; + @apply text-gray-900 hover:text-gray-900 dark:text-gray-100 + dark:hover:text-gray-100; } div.modal-leather button { - @apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950 dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; + @apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950 + dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600 + dark:text-gray-100 dark:hover:text-primary-400; } /* Navbar */ @@ -143,7 +153,8 @@ } nav.navbar-leather svg { - @apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100 dark:hover:fill-primary-400; + @apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100 + dark:hover:fill-primary-400; } nav.navbar-leather h1, @@ -152,7 +163,8 @@ nav.navbar-leather h4, nav.navbar-leather h5, nav.navbar-leather h6 { - @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; + @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 + dark:hover:text-primary-400; } div.skeleton-leather div { @@ -201,16 +213,16 @@ .network-node-content { @apply fill-primary-100; } - + /* Person link colors */ .person-link-signed { @apply stroke-green-500; } - + .person-link-referenced { @apply stroke-blue-400; } - + /* Person anchor node */ .person-anchor-node { @apply fill-green-400 stroke-green-600; @@ -272,11 +284,13 @@ /* Lists */ .ol-leather li a, .ul-leather li a { - @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; + @apply text-gray-900 hover:text-primary-600 dark:text-gray-100 + dark:hover:text-primary-400; } .link { - @apply underline cursor-pointer hover:text-primary-600 dark:hover:text-primary-400; + @apply underline cursor-pointer hover:text-primary-600 + dark:hover:text-primary-400; } /* Card with transition */ @@ -290,11 +304,14 @@ } .tags span { - @apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5 rounded-sm dark:bg-primary-900 dark:text-primary-200; + @apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5 + rounded-sm dark:bg-primary-900 dark:text-primary-200; } .npub-badge { - @apply inline-flex space-x-1 items-center text-primary-600 dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border border-primary-600 dark:border-primary-500; + @apply inline-flex space-x-1 items-center text-primary-600 + dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border + border-primary-600 dark:border-primary-500; svg { @apply fill-primary-600 dark:fill-primary-500; @@ -303,16 +320,28 @@ } @layer components { + canvas.qr-code { + @apply block mx-auto my-4; + } + /* Legend */ .leather-legend { - @apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2 rounded; - @apply shadow-none text-primary-1000 border border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700; - @apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; + @apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2 + rounded; + @apply shadow-none text-primary-1000 border border-s-4 bg-highlight + border-primary-200 has-[:hover]:border-primary-700; + @apply dark:bg-primary-1000 dark:border-primary-800 + dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; + max-width: 300px; + min-width: 200px; + overflow: hidden; } /* Tooltip */ .tooltip-leather { - @apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border border-gray-200 dark:border-gray-700 transition-colors duration-200; + @apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000 + text-gray-900 dark:text-gray-100 border border-gray-200 + dark:border-gray-700 transition-colors duration-200; max-width: 400px; z-index: 1000; } @@ -536,17 +565,26 @@ input[type="tel"], input[type="url"], textarea { - @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border-s-4 border-primary-200 rounded shadow-none px-4 py-2; + @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 + border-s-4 border-primary-200 rounded shadow-none px-4 py-2; @apply focus:border-primary-600 dark:focus:border-primary-400; } /* Table of Contents highlighting */ .toc-highlight { - @apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600 dark:border-primary-400 font-medium; + @apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600 + dark:border-primary-400 font-medium; transition: all 0.2s ease-in-out; } .toc-highlight:hover { @apply bg-primary-300 dark:bg-primary-600; } + + /* Override prose first-line bold styling */ + .prose p:first-line, + .prose-sm p:first-line, + .prose-invert p:first-line { + font-weight: normal !important; + } } diff --git a/src/app.d.ts b/src/app.d.ts index 25c13d3..3f8a7f6 100644 --- a/src/app.d.ts +++ b/src/app.d.ts @@ -13,6 +13,10 @@ declare global { publicationType?: string; indexEvent?: NDKEvent; url?: URL; + identifierInfo?: { + type: string; + identifier: string; + }; } // interface Platform {} } @@ -23,7 +27,9 @@ declare global { var MathJax: any; var nostr: NDKNip07Signer & { - getRelays: () => Promise>>; + getRelays: () => Promise< + Record> + >; // deno-lint-ignore no-explicit-any signEvent: (event: any) => Promise; }; diff --git a/src/app.html b/src/app.html index 97127be..345607e 100644 --- a/src/app.html +++ b/src/app.html @@ -1,4 +1,4 @@ - + @@ -26,14 +26,18 @@ }, }; - + - + %sveltekit.head% diff --git a/src/lib/components/CommentBox.svelte b/src/lib/components/CommentBox.svelte index fbff0f3..cf08468 100644 --- a/src/lib/components/CommentBox.svelte +++ b/src/lib/components/CommentBox.svelte @@ -1,5 +1,6 @@ + + +{#snippet CommentItem(node: CommentNode)} +
+
+
+
+ +
+ + + {formatRelativeDate(node.event.created_at || 0)} • Kind: {node.event.kind} + +
+
+
+ + {shortenNevent(getNeventUrl(node.event))} + + +
+
+ +
+ {#if node.event.kind === 9802} + +
+ {#if hasHighlightComment(node.event)} + +
+
+ Highlighted content: +
+ {#if node.event.getMatchingTags("context")[0]?.[1]} +
+ {@html node.event.getMatchingTags("context")[0]?.[1]} +
+ {:else} +
+ {node.event.content || ""} +
+ {/if} + {#if getHighlightSource(node.event)} +
+ Source: {getHighlightSource(node.event)?.type === 'nostr_event' ? 'Nostr Event' : 'URL'} +
+ {/if} +
+
+
+ Comment: +
+ +
+ {:else} + + {#if node.event.getMatchingTags("context")[0]?.[1]} +
+ {@html node.event.getMatchingTags("context")[0]?.[1]} +
+ {:else} +
+ {node.event.content || ""} +
+ {/if} + + {#if getHighlightSource(node.event)} +
+ Source: {getHighlightSource(node.event)?.type === 'nostr_event' ? 'Nostr Event' : 'URL'} +
+ {/if} + {/if} + + {#if getHighlightAttribution(node.event).length > 0} +
+ Attribution: + {#each getHighlightAttribution(node.event) as attribution} + + {/each} +
+ {/if} +
+ {:else} + + + {/if} +
+
+ + {#if node.children.length > 0} +
+ {#each node.children as childNode, index (childNode.event.id + '-' + index)} + {@render CommentItem(childNode)} + {/each} +
+ {/if} +
+{/snippet} + +
+ + Comments & Highlights ({threadedComments.length}) + + + {#if loading} +
+

Loading comments...

+
+ {:else if error} +
+

{error}

+
+ {:else if threadedComments.length === 0} +
+

No comments or highlights yet. Be the first to engage!

+
+ {:else} +
+ {#each threadedComments as node, index (node.event.id + '-root-' + index)} + {@render CommentItem(node)} + {/each} +
+ {/if} +
+ + \ No newline at end of file diff --git a/src/lib/components/EventDetails.svelte b/src/lib/components/EventDetails.svelte index 4bd78e4..2f26fe6 100644 --- a/src/lib/components/EventDetails.svelte +++ b/src/lib/components/EventDetails.svelte @@ -1,10 +1,9 @@ -
+
{#if event.kind !== 0 && getEventTitle(event)} -

+

{getEventTitle(event)}

{/if} -
+ + {#if event.kind === 0} + + {/if} + +
{#if toNpub(event.pubkey)} - Author: {@render userBadge( toNpub(event.pubkey) as string, - profile?.display_name || event.pubkey, + profile?.display_name || undefined, )} {:else} - Author: {profile?.display_name || event.pubkey} {/if}
-
- Kind: - {event.kind} - + Kind: + {event.kind} + ({getEventTypeDisplay(event)})
{#if getEventSummary(event)} -
+
Summary: -

{getEventSummary(event)}

-
- {/if} - - {#if getEventHashtags(event).length} -
- Tags: -
- {#each getEventHashtags(event) as tag} - - {/each} -
+

{getEventSummary(event)}

{/if} @@ -449,98 +304,139 @@ -
- {#if event.kind !== 0} - Content: -
- {@html showFullContent ? parsedContent : contentPreview} - {#if !showFullContent && parsedContent.length > 250} + {#if event.kind !== 0} +
+
+ Content: +
+
+ +
+ {#if shouldTruncate} {/if} +
- {/if} -
+
+ {/if} {#if event.kind === 0} {/if} - - {#if event.tags && event.tags.length} -
- Event Tags: -
- {#each event.tags as tag} - {@const tagInfo = getTagButtonInfo(tag)} - {#if tagInfo.text && tagInfo.gotoValue} - - {/if} - {/each} -
-
- {/if} -
- Show Raw Event JSON + Show details -
- + + +
+

Identifiers:

+
+ {#each getIdentifiers(event, profile) as identifier} +
+ {identifier.label}: +
+ {#if identifier.link} + + {identifier.value.slice(0, 20)}...{identifier.value.slice(-8)} + + {:else} + + {identifier.value.slice(0, 20)}...{identifier.value.slice(-8)} + + {/if} + +
+
+ {/each} +
-
+
+    
+    {#if event.tags && event.tags.length}
+      
+

Event Tags:

+
+ {#each event.tags as tag} + {@const tagInfo = getTagButtonInfo(tag)} + {#if tagInfo.text && tagInfo.gotoValue} + + {/if} + {/each} +
+
+ {/if} + + +
+

Raw Event JSON:

+
+
+ +
+
 {JSON.stringify(event.rawEvent(), null, 2)}
-    
+
+
+
diff --git a/src/lib/components/EventInput.svelte b/src/lib/components/EventInput.svelte index 0519692..0e1b4e5 100644 --- a/src/lib/components/EventInput.svelte +++ b/src/lib/components/EventInput.svelte @@ -1,54 +1,48 @@ -
-

Publish Nostr Event

-
-
- - - {#if !isValidKind(kind)} -
- Kind must be an integer between 0 and 65535 (NIP-01). -
- {/if} - {#if Number(kind) === 30040} -
- 30040 - Publication Index: - {get30040EventDescription()} -
- {/if} -
-
- - - - {#if extractedMetadata.length > 0} -
-

- Extracted Metadata (from AsciiDoc header) -

-
- {#each extractedMetadata as [key, value], i} -
- {key}: - - -
- {/each} -
-
- {/if} - -
- {#each tags as [key, value], i} -
- - updateTag(i, (e.target as HTMLInputElement).value, tags[i][1])} - /> - - updateTag(i, tags[i][0], (e.target as HTMLInputElement).value)} - /> - -
- {/each} -
- -
-
-
-
- - -
-
- - -
-
- - - {#if dTagError} -
{dTagError}
- {/if} -
-
+
+
+

Publish Nostr Event

+
+ { + // Trigger validation by submitting the form + const form = document.querySelector('form'); + if (form) { + form.dispatchEvent(new Event('submit', { bubbles: true })); + } + }} > + Validate Form +
- {#if loading} - Publishing... - {/if} - {#if error} -
{error}
- {/if} - {#if success} -
{success}
-
- Relays: {publishedRelays.join(", ")} -
- {#if lastPublishedEventId} -
- Event ID: {lastPublishedEventId} - -
- {/if} - {/if} -
+ + +
+

Load Existing Event

+ + +
+ + +
- {#if showWarning} -
-
-

Warning

-

{warningMessage}

-
+ {#if loadMethod === 'hex'} + +
+ { + if (e.key === 'Enter' && !loadingEvent && eventIdSearch.trim()) { + e.preventDefault(); + loadEventById(); + } + }} + /> + +
+

+ Load an existing event from relays by its hex ID. +

+ {:else} + +
+ +
+

+ Paste a complete event JSON to load it into the form. Fields like id, pubkey, created_at, and sig will be regenerated. +

+ {/if} +
+ + + + + + + + +
+ +
+ + + {#if loading} +
Publishing...
+ {/if} + {#if error} +
{error}
+ {/if} + {#if success} +
{success}
+
+ Relays: {publishedRelays.join(", ")}
+ {#if lastPublishedEventId} +
+ Event ID: {lastPublishedEventId} + +
+ {/if} {/if} + + + showJsonPreview = !showJsonPreview} + /> +
diff --git a/src/lib/components/EventSearch.svelte b/src/lib/components/EventSearch.svelte index 10f888b..fc35baf 100644 --- a/src/lib/components/EventSearch.svelte +++ b/src/lib/components/EventSearch.svelte @@ -1,6 +1,5 @@
diff --git a/src/lib/components/LoginModal.svelte b/src/lib/components/LoginModal.svelte index 085f049..10bd699 100644 --- a/src/lib/components/LoginModal.svelte +++ b/src/lib/components/LoginModal.svelte @@ -2,6 +2,7 @@ import { Button, Modal } from "flowbite-svelte"; import { loginWithExtension } from "$lib/stores/userStore"; import { userStore } from "$lib/stores/userStore"; + import { getNdkContext } from "$lib/ndk"; const { show = false, @@ -13,6 +14,8 @@ onLoginSuccess?: () => void; }>(); + const ndk = getNdkContext(); + let signInFailed = $state(false); let errorMessage = $state(""); let user = $state($userStore); @@ -42,7 +45,7 @@ signInFailed = false; errorMessage = ""; - await loginWithExtension(); + await loginWithExtension(ndk); } catch (e: unknown) { console.error(e); signInFailed = true; diff --git a/src/lib/components/Navigation.svelte b/src/lib/components/Navigation.svelte index e155c03..675ab46 100644 --- a/src/lib/components/Navigation.svelte +++ b/src/lib/components/Navigation.svelte @@ -18,11 +18,14 @@
-

Alexandria

+
+

Alexandria

+

READ THE ORIGINAL. MAKE CONNECTIONS. CULTIVATE KNOWLEDGE.

+
- +
@@ -31,7 +34,9 @@ Visualize Getting Started Events - My Notes + {#if userState.signedIn} + My Notes + {/if} About Contact diff --git a/src/lib/components/Notifications.svelte b/src/lib/components/Notifications.svelte new file mode 100644 index 0000000..7e1a5bd --- /dev/null +++ b/src/lib/components/Notifications.svelte @@ -0,0 +1,1154 @@ + + +{#if isOwnProfile && $userStore.signedIn} +
+
+ Notifications + +
+ + + + +
+ {#each ["to-me", "from-me", "public-messages"] as mode} + {@const modeLabel = mode === "to-me" ? "To Me" : mode === "from-me" ? "From Me" : "Public Messages"} + + {/each} +
+
+
+ + {#if loading} +
+
+ + Loading {notificationMode === "public-messages" ? "public messages" : "notifications"}... + +
+ {:else if error} +
+

Error loading {notificationMode === "public-messages" ? "public messages" : "notifications"}: {error}

+
+ {:else if notificationMode === "public-messages"} + {#if publicMessages.length === 0} +
+

No public messages found.

+
+ {:else} +
+ {#if filteredByUser} +
+
+ + Filtered by user: @{authorProfiles.get(filteredByUser)?.displayName || authorProfiles.get(filteredByUser)?.name || "anon"} + + +
+
+ {/if} +
+ {#each filteredMessages.slice(0, 100) as message} + {@const authorProfile = authorProfiles.get(message.pubkey)} + {@const isFromUser = message.pubkey === $userStore.pubkey} +
+
+ +
+
+ {#if authorProfile?.picture} + Author avatar (e.target as HTMLImageElement).style.display = 'none'} + /> + {:else} +
+ +
+ {/if} +
+ + @{authorProfile?.displayName || authorProfile?.name || "anon"} + +
+
+ + + {#if !isFromUser} +
+ + + + +
+ {/if} +
+ + +
+
+ + {isFromUser ? 'Your Message' : 'Public Message'} + + + {message.created_at ? formatDate(message.created_at) : "Unknown date"} + + +
+ + + + {#if message.getMatchingTags("q").length > 0} +
+ {@render quotedContent(message, publicMessages, ndk)} +
+ {/if} + {#if message.content} +
+
+ +
+
+ {/if} + + +
+
+ +
+ {/each} +
+ + {#if filteredMessages.length > 100} +
+ Showing 100 of {filteredMessages.length} messages {filteredByUser ? `(filtered)` : ''}. Scroll to see more. +
+ {/if} +
+ {/if} + {:else} + {#if notifications.length === 0} +
+

No notifications {notificationMode === "to-me" ? "received" : "sent"} found.

+
+ {:else} +
+ {#each notifications.slice(0, 100) as notification} + {@const authorProfile = authorProfiles.get(notification.pubkey)} +
+
+ +
+
+ {#if authorProfile?.picture} + Author avatar (e.target as HTMLImageElement).style.display = 'none'} + /> + {:else} +
+ +
+ {/if} +
+ + @{authorProfile?.displayName || authorProfile?.name || "anon"} + +
+
+
+ + +
+
+ + {getNotificationType(notification)} + + + {notification.created_at ? formatDate(notification.created_at) : "Unknown date"} + + +
+ + + + {#if notification.content} +
+
+ +
+
+ {/if} + + +
+
+
+ {/each} + + {#if notifications.length > 100} +
+ Showing 100 of {notifications.length} notifications {notificationMode === "to-me" ? "received" : "sent"}. Scroll to see more. +
+ {/if} +
+ {/if} + {/if} +
+ + + + + + + + + + +{/if} \ No newline at end of file diff --git a/src/lib/components/Preview.svelte b/src/lib/components/Preview.svelte index 036098a..72a02ab 100644 --- a/src/lib/components/Preview.svelte +++ b/src/lib/components/Preview.svelte @@ -22,6 +22,7 @@ import BlogHeader from "$components/cards/BlogHeader.svelte"; import { getMatchingTags } from "$lib/utils/nostrUtils"; import { onMount } from "svelte"; + import LazyImage from "$components/util/LazyImage.svelte"; // TODO: Fix move between parents. @@ -250,8 +251,14 @@ {#snippet coverImage(rootId: string, index: number, depth: number)} {#if hasCoverImage(rootId, index)} + {@const event = blogEntries[index][1]}
- {title} +
{/if} {/snippet} diff --git a/src/lib/components/RelayActions.svelte b/src/lib/components/RelayActions.svelte index e7f289c..2b71255 100644 --- a/src/lib/components/RelayActions.svelte +++ b/src/lib/components/RelayActions.svelte @@ -1,35 +1,26 @@ -
- -
- -{#if foundRelays.length > 0} -
- Found on {foundRelays.length} relay(s): -
- {#each foundRelays as relay} - - {/each} -
-
-{/if} -
Found on:
diff --git a/src/lib/components/RelayDisplay.svelte b/src/lib/components/RelayDisplay.svelte index 02ff24b..941e697 100644 --- a/src/lib/components/RelayDisplay.svelte +++ b/src/lib/components/RelayDisplay.svelte @@ -1,7 +1,7 @@ + +
+ {#if showIcon && relayIcon} + Relay icon (e.target as HTMLImageElement).style.display = 'none'} + /> + {:else if showIcon} + +
+ + + +
+ {/if} + +
+ {#if showName} + + {isLoading ? 'Loading...' : displayName} + + {/if} + + {#if showType} + + {relayType} + + {/if} +
+ + {#if error} + + ⚠️ + + {/if} +
diff --git a/src/lib/components/RelayInfoList.svelte b/src/lib/components/RelayInfoList.svelte new file mode 100644 index 0000000..62d6b8b --- /dev/null +++ b/src/lib/components/RelayInfoList.svelte @@ -0,0 +1,143 @@ + + +
+ {#if showLabels && !compact} + {@const categorizedCount = categorizedRelays().length} +
+ Publishing to {categorizedCount} relay(s): +
+ {/if} + + {#if isLoading} +
+
+ Loading relay info... +
+ {:else} + {@const categorized = categorizedRelays()} + +
+ {#each categorized as { relay, category, label }} +
+
+ + {relay} + + {#if category === 'both'} + + common relay + + {/if} +
+
+ {/each} +
+ {/if} +
diff --git a/src/lib/components/RelayStatus.svelte b/src/lib/components/RelayStatus.svelte index fba24c3..cf4d069 100644 --- a/src/lib/components/RelayStatus.svelte +++ b/src/lib/components/RelayStatus.svelte @@ -1,14 +1,15 @@ {#if profile} - +
{#if profile.banner} @@ -80,25 +114,35 @@
{/if}
-
+
{#if profile.picture} Profile avatar { - (e.target as HTMLImageElement).src = "/favicon.png"; + (e.target as HTMLImageElement).style.display = 'none'; + (e.target as HTMLImageElement).nextElementSibling?.classList.remove('hidden'); }} /> + + {:else} +
+ +
{/if} -
- {@render userBadge( - toNpub(event.pubkey) as string, - profile.displayName || - profile.display_name || - profile.name || - event.pubkey, - )} +
+
+ {@render userBadge( + toNpub(event.pubkey) as string, + profile.displayName || + profile.display_name || + profile.name || + event.pubkey, + )} +
{#if communityStatus === true}
{/if} + {#if isInUserLists === true} +
+ + + +
+ {:else if isInUserLists === false} +
+ {/if}
-
+
{#if profile.name} -
-
Name:
-
{profile.name}
+
+
Name:
+
{profile.name}
{/if} {#if profile.displayName} -
-
Display Name:
-
{profile.displayName}
+
+
Display Name:
+
{profile.displayName}
{/if} {#if profile.about} -
-
About:
-
{profile.about}
+
+
About:
+
{profile.about}
{/if} {#if profile.website} -

Scan the QR code or copy the address

{#if lnurl} -

+

diff --git a/src/lib/components/embedded_events/EmbeddedEvent.svelte b/src/lib/components/embedded_events/EmbeddedEvent.svelte new file mode 100644 index 0000000..30ef2dd --- /dev/null +++ b/src/lib/components/embedded_events/EmbeddedEvent.svelte @@ -0,0 +1,738 @@ + + +{#if nestingLevel >= MAX_NESTING_LEVEL} + +
+{:else if loading} + +
+
+
+ Loading event... +
+
+{:else if error} + + +{:else if event} + +
+ +
+
+ + Kind {event.kind} + + + ({getEventType(event.kind || 0)}) + + {#if event.pubkey} + + Author: +
+ {#if toNpub(event.pubkey)} + {@render userBadge( + toNpub(event.pubkey) as string, + authorDisplayName, + )} + {:else} + + {authorDisplayName || event.pubkey.slice(0, 8)}...{event.pubkey.slice(-4)} + + {/if} +
+ {/if} +
+
+ + + {#if getEventTitle(event)} +

+ {getEventTitle(event)} +

+ {/if} + + + {#if event.kind !== 1 && getEventSummary(event)} +
+

+ {getEventSummary(event)} +

+
+ {/if} + + + {#if event.kind === 1 || repostKinds.includes(event.kind)} +
+ {#if repostKinds.includes(event.kind)} + +
+
+ Reposted content: +
+ {@render parsedContent(event.content.slice(0, 300))} + {#if event.content.length > 300} + ... + {/if} +
+ {:else} + + {@render parsedContent(event.content.slice(0, 300))} + {#if event.content.length > 300} + ... + {/if} + {/if} +
+ + {:else if event.kind === 3} +
+ {#if event.content} + {@const contactData = (() => { + try { + return JSON.parse(event.content); + } catch { + return null; + } + })()} + {#if contactData} +
+
+ Contact List + {#if contactData.relays} +
+ Relays: {Object.keys(contactData.relays).length} +
+ {/if} +
+ {#if contactData.follows} +
+ Following: {contactData.follows.length} users +
+ {/if} +
+ {:else} +
+ Invalid contact list data +
+ {/if} + {:else} +
+ Empty contact list +
+ {/if} +
+ + {:else if event.kind === 30040} +
+ {#if event.content} + {@const indexData = (() => { + try { + return JSON.parse(event.content); + } catch { + return null; + } + })()} + {#if indexData} +
+
+ Publication Index + {#if indexData.title} +
+ Title: {indexData.title} +
+ {/if} + {#if indexData.summary} +
+ Summary: {indexData.summary} +
+ {/if} + {#if indexData.authors} +
+ Authors: {indexData.authors.length} +
+ {/if} +
+
+ {:else} +
+ Invalid publication index data +
+ {/if} + {:else} +
+ Empty publication index +
+ {/if} +
+ + {:else if event.kind === 30041 || event.kind === 30818} +
+ {#if event.content} +
+
+ + {event.kind === 30041 ? 'Publication Content' : 'Wiki Content'} + +
+
+
+                {event.content.slice(0, 300)}
+                {#if event.content.length > 300}
+                  ...
+                {/if}
+              
+
+
+ {:else} +
+ Empty {event.kind === 30041 ? 'publication' : 'wiki'} content +
+ {/if} +
+ + {:else if event.kind === 30023} +
+ {#if event.content} +
+
+ Long-form Content +
+
+
+                {event.content.slice(0, 300)}
+                {#if event.content.length > 300}
+                  ...
+                {/if}
+              
+
+
+ {:else} +
+ Empty long-form content +
+ {/if} +
+ + {:else if event.kind === 1111} +
+
+
+ Reply/Comment +
+ {#if event.content && event.content.trim()} +
+ {@render parsedContent(event.content)} +
+ {:else} +
+ Empty reply +
+ {/if} +
+
+ + {:else if event.kind === 1621} +
+
+
+ Git Issue + {#if event.tags} + {@const subjectTag = event.tags.find(tag => tag[0] === 'subject')} + {#if subjectTag && subjectTag[1]} +
+ Subject: {subjectTag[1]} +
+ {/if} + {/if} +
+ {#if event.content && event.content.trim()} +
+ {@render parsedContent(event.content)} +
+ {:else} +
+ Empty issue description +
+ {/if} +
+
+ + {:else if event.kind === 1622} +
+
+
+ Git Comment +
+ {#if event.content && event.content.trim()} +
+ {@render parsedContent(event.content)} +
+ {:else} +
+ Empty comment +
+ {/if} +
+
+ + {:else if event.kind === 7} +
+
+
+ Reaction +
+ {#if event.content && event.content.trim()} +
+ {event.content} +
+ {:else} +
+ Empty reaction +
+ {/if} +
+
+ + {:else if event.kind === 9735} +
+
+
+ Zap Receipt +
+ {#if event.content && event.content.trim()} + {@const zapData = (() => { + try { + return JSON.parse(event.content); + } catch { + return null; + } + })()} + {#if zapData} +
+ {#if zapData.amount} +
Amount: {zapData.amount} sats
+ {/if} + {#if zapData.preimage} +
Preimage: {zapData.preimage.slice(0, 8)}...
+ {/if} + {#if zapData.bolt11} +
Invoice: {zapData.bolt11.slice(0, 20)}...
+ {/if} +
+ {:else} +
+
+                  {event.content.slice(0, 200)}
+                  {#if event.content.length > 200}
+                    ...
+                  {/if}
+                
+
+ {/if} + {:else} +
+ Empty zap receipt +
+ {/if} +
+
+ + {:else if event.kind === 20} +
+
+
+ Image/Media Post +
+ + + {#if event.tags} + {@const imetaTags = event.tags.filter(tag => tag[0] === 'imeta')} + {#if imetaTags.length > 0} +
+ {#each imetaTags as imetaTag} + {@const imetaData = (() => { + const data: any = {}; + for (let i = 1; i < imetaTag.length; i++) { + const item = imetaTag[i]; + if (item.startsWith('url ')) { + data.url = item.substring(4); + } else if (item.startsWith('dim ')) { + data.dimensions = item.substring(4); + } else if (item.startsWith('m ')) { + data.mimeType = item.substring(2); + } else if (item.startsWith('size ')) { + data.size = item.substring(5); + } else if (item.startsWith('blurhash ')) { + data.blurhash = item.substring(9); + } else if (item.startsWith('x ')) { + data.x = item.substring(2); + } + } + return data; + })()} + + {#if imetaData.url && imetaData.mimeType?.startsWith('image/')} +
+ imeta { + (e.target as HTMLImageElement).style.display = 'none'; + const fallback = (e.target as HTMLImageElement).nextElementSibling; + if (fallback) fallback.classList.remove('hidden'); + }} + /> + + + +
+ {#if imetaData.dimensions} + Size: {imetaData.dimensions} + {/if} + {#if imetaData.size} + File: {Math.round(parseInt(imetaData.size) / 1024)}KB + {/if} + {#if imetaData.mimeType} + Type: {imetaData.mimeType} + {/if} +
+
+ {:else if imetaData.url} + +
+ + {#if imetaData.size} +
+ Size: {Math.round(parseInt(imetaData.size) / 1024)}KB +
+ {/if} +
+ {/if} + {/each} +
+ {/if} + {/if} + + + {#if event.content && event.content.trim()} +
+ {@render parsedContent(event.content)} +
+ {/if} + + + {#if event.tags} + {@const altTag = event.tags.find(tag => tag[0] === 'alt')} + {#if altTag && altTag[1]} +
+ Alt: {altTag[1]} +
+ {/if} + {/if} +
+
+ + {:else if event.kind === 0 && profile} +
+ {#if profile.picture} + Profile { + (e.target as HTMLImageElement).style.display = 'none'; + (e.target as HTMLImageElement).nextElementSibling?.classList.remove('hidden'); + }} + /> + + {:else} +
+ +
+ {/if} + {#if profile.about} +

+ {profile.about.slice(0, 200)} + {#if profile.about.length > 200} + ... + {/if} +

+ {/if} +
+ + {:else if event.content} +
+
+          {event.content.slice(0, 300)}
+          {#if event.content.length > 300}
+            ...
+          {/if}
+        
+
+ {:else} +
+ No content +
+ {/if} + + +
+
+ ID: + { + e.preventDefault(); + goto(`/events?id=${event!.id}`); + }} + > + {event!.id.slice(0, 8)}...{event!.id.slice(-4)} + + {#if isAddressableEvent(event!)} + Address: + + {getNaddrUrl(event!).slice(0, 12)}...{getNaddrUrl(event!).slice(-8)} + + {/if} +
+
+
+{/if} diff --git a/src/lib/components/embedded_events/EmbeddedSnippets.svelte b/src/lib/components/embedded_events/EmbeddedSnippets.svelte new file mode 100644 index 0000000..48f103d --- /dev/null +++ b/src/lib/components/embedded_events/EmbeddedSnippets.svelte @@ -0,0 +1,311 @@ + + +{#snippet parsedContent(content: string)} + {#await parseEmbeddedMarkup(content, 0) then parsed} + {@html parsed} + {/await} +{/snippet} + +{#snippet repostContent(content: string)} + {@const originalEvent = (() => { + try { + return JSON.parse(content); + } catch { + return null; + } + })()} + + {#if originalEvent} + {@const originalContent = originalEvent.content || ""} + {@const originalAuthor = originalEvent.pubkey || ""} + {@const originalCreatedAt = originalEvent.created_at || 0} + {@const originalKind = originalEvent.kind || 1} + {@const formattedDate = originalCreatedAt ? new Date(originalCreatedAt * 1000).toLocaleDateString() : "Unknown date"} + {@const shortAuthor = originalAuthor ? `${originalAuthor.slice(0, 8)}...${originalAuthor.slice(-4)}` : "Unknown"} + +
+ +
+
+ + Kind {originalKind} + + + (repost) + + + Author: + + {shortAuthor} + + + + {formattedDate} + +
+
+ + +
+ {#await parseEmbeddedMarkup(originalContent, 0) then parsedOriginalContent} + {@html parsedOriginalContent} + {/await} +
+
+ {:else} + {#await parseEmbeddedMarkup(content, 0) then parsedContent} + {@html parsedContent} + {/await} + {/if} +{/snippet} + +{#snippet quotedContent(message: NDKEvent, publicMessages: NDKEvent[], ndk: NDK)} + {@const qTags = message.getMatchingTags("q")} + {#if qTags.length > 0} + {@const qTag = qTags[0]} + {@const eventId = qTag[1]} + + {#if eventId} + {#await findQuotedMessage(eventId, publicMessages, ndk) then quotedMessage} + {#if quotedMessage} + {@const quotedContent = quotedMessage.content ? quotedMessage.content.slice(0, 200) : "No content"} + {#await parseEmbeddedMarkup(quotedContent, 0) then parsedContent} + + {/await} + {:else} + {@const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId)} + {#if isValidEventId} + {@const nevent = (() => { + try { + return nip19.neventEncode({ id: eventId }); + } catch (error) { + console.warn(`[quotedContent] Failed to encode nevent for ${eventId}:`, error); + return null; + } + })()} + {#if nevent} + + {:else} +
+ Quoted message not found. Event ID: {eventId.slice(0, 8)}... +
+ {/if} + {:else} +
+ Invalid quoted message reference +
+ {/if} + {/if} + {/await} + {/if} + {/if} +{/snippet} diff --git a/src/lib/components/event_input/EventForm.svelte b/src/lib/components/event_input/EventForm.svelte new file mode 100644 index 0000000..94bf99f --- /dev/null +++ b/src/lib/components/event_input/EventForm.svelte @@ -0,0 +1,162 @@ + + +
+ +
+ + + {#if !isValidKind(eventData.kind)} +
+ Kind must be an integer between 0 and 65535 (NIP-01). +
+ {/if} + {#if isValidKind(eventData.kind)} +
+ + {getKindDescription(eventData.kind)} + + {#if eventData.kind === 30040} + + +
+ 30040 - Publication Index: Events that organize AsciiDoc content into structured publications with metadata tags and section references. +
+
+ {/if} +
+ {/if} +
+ + +
+ + + + + {#if eventData.kind === 30023} +
+ Use Markdown format for long-form content. Do not use AsciiDoc headers (=). +
+ {:else if eventData.kind === 30040 || eventData.kind === 30041 || eventData.kind === 30818} +
+ Use AsciiDoc format. Start with a document title (=) and include section headers (==). +
+ {/if} +
+ + + {#if validationError} +
+ {validationError} +
+ {/if} + {#if validationWarning} +
+ Warning: {validationWarning} +
+ {/if} + + +
diff --git a/src/lib/components/event_input/EventPreview.svelte b/src/lib/components/event_input/EventPreview.svelte new file mode 100644 index 0000000..55742fb --- /dev/null +++ b/src/lib/components/event_input/EventPreview.svelte @@ -0,0 +1,172 @@ + + + +
+
+

Event Preview

+ +
+ + {#if showJsonPreview} + {#if eventPreview} +
+ {#if eventPreview.type === 'error'} +
+ {eventPreview.message} +
+ {:else} +
+ + Event Type: {eventPreview.type === '30040_index_event' ? '30040 Publication Index' : 'Standard Event'} + +
+
{JSON.stringify(eventPreview.event, null, 2)}
+ {/if} +
+ {:else} +
+
+ Please log in to see the event preview. +
+
+ {/if} + {/if} +
diff --git a/src/lib/components/event_input/TagManager.svelte b/src/lib/components/event_input/TagManager.svelte new file mode 100644 index 0000000..648fe56 --- /dev/null +++ b/src/lib/components/event_input/TagManager.svelte @@ -0,0 +1,342 @@ + + +
+ + + + {#if extractedMetadata.length > 0} +
+

+ Extracted Metadata (from AsciiDoc header) +

+
+ {extractedMetadata.map(([key, value]) => `${key}: ${value}`).join(', ')} +
+
+ {/if} + + +
+ {#each tags as tag, i} +
+ +
+ Tag: + updateTagKey(i, (e.target as HTMLInputElement).value)} + /> + {#if isPresetTag(tag.key)} + + Preset + + {/if} + +
+ + + {#if isPresetTag(tag.key)} + {@const presetInfo = getPresetTagInfo(tag.key)} + {#if presetInfo} +
+ {presetInfo.description} + {#if presetInfo.autoUpdate} + (auto-updates from content) + {/if} +
+ {/if} + {/if} + + +
+
+ Values: + +
+ + {#each tag.values as value, valueIndex} +
+ + {valueIndex + 1}: + + updateTagValue(i, valueIndex, (e.target as HTMLInputElement).value)} + /> + {#if tag.values.length > 1} + + {/if} +
+ {/each} +
+
+ {/each} + + +
+ +
+
+
diff --git a/src/lib/components/event_input/eventServices.ts b/src/lib/components/event_input/eventServices.ts new file mode 100644 index 0000000..3a6db85 --- /dev/null +++ b/src/lib/components/event_input/eventServices.ts @@ -0,0 +1,277 @@ +/** + * Event publishing and loading services + */ + +import { get } from "svelte/store"; +import { userStore } from "$lib/stores/userStore"; +import NDK, { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk"; +import type { NDKEvent } from "$lib/utils/nostrUtils"; +import { prefixNostrAddresses } from "$lib/utils/nostrUtils"; +import { fetchEventWithFallback } from "$lib/utils/nostrUtils"; + +import { WebSocketPool } from "$lib/data_structures/websocket_pool"; +import { anonymousRelays } from "$lib/consts"; +import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; +import { removeMetadataFromContent } from "$lib/utils/asciidoc_metadata"; +import { build30040EventSet } from "$lib/utils/event_input_utils"; +import type { EventData, TagData, PublishResult, LoadEventResult } from "./types"; + +/** + * Converts TagData array to NDK-compatible format + */ +function convertTagsToNDKFormat(tags: TagData[]): string[][] { + return tags + .filter(tag => tag.key.trim() !== "") + .map(tag => [tag.key, ...tag.values]); +} + +/** + * Publishes an event to relays + */ +export async function publishEvent(ndk: any, eventData: EventData, tags: TagData[]): Promise { + if (!ndk) { + return { success: false, error: "NDK context not available" }; + } + + const userState = get(userStore); + const pubkey = userState.pubkey; + + if (!pubkey) { + return { success: false, error: "User not logged in." }; + } + + const pubkeyString = String(pubkey); + if (!/^[a-fA-F0-9]{64}$/.test(pubkeyString)) { + return { success: false, error: "Invalid public key: must be a 64-character hex string." }; + } + + const baseEvent = { pubkey: pubkeyString, created_at: eventData.createdAt }; + let events: NDKEvent[] = []; + + console.log("Publishing event with kind:", eventData.kind); + console.log("Content length:", eventData.content.length); + console.log("Content preview:", eventData.content.substring(0, 100)); + console.log("Tags:", tags); + + if (Number(eventData.kind) === 30040) { + console.log("=== 30040 EVENT CREATION START ==="); + console.log("Creating 30040 event set with content:", eventData.content); + + try { + // Get the current d and title values from the UI + const dTagValue = tags.find(tag => tag.key === "d")?.values[0] || ""; + const titleTagValue = tags.find(tag => tag.key === "title")?.values[0] || ""; + + // Convert multi-value tags to the format expected by build30040EventSet + // Filter out d and title tags since we'll add them manually + const compatibleTags: [string, string][] = tags + .filter(tag => tag.key.trim() !== "" && tag.key !== "d" && tag.key !== "title") + .map(tag => [tag.key, tag.values[0] || ""] as [string, string]); + + const { indexEvent, sectionEvents } = build30040EventSet( + eventData.content, + compatibleTags, + baseEvent, + ndk, + ); + + // Override the d and title tags with the UI values if they exist + const finalTags = indexEvent.tags.filter(tag => tag[0] !== "d" && tag[0] !== "title"); + if (dTagValue) { + finalTags.push(["d", dTagValue]); + } + if (titleTagValue) { + finalTags.push(["title", titleTagValue]); + } + + // Update the index event with the correct tags + indexEvent.tags = finalTags; + console.log("Index event:", indexEvent); + console.log("Section events:", sectionEvents); + + // Publish all 30041 section events first, then the 30040 index event + events = [...sectionEvents, indexEvent]; + console.log("Total events to publish:", events.length); + console.log("=== 30040 EVENT CREATION END ==="); + } catch (error) { + console.error("Error in build30040EventSet:", error); + return { + success: false, + error: `Failed to build 30040 event set: ${error instanceof Error ? error.message : "Unknown error"}` + }; + } + } else { + // Convert multi-value tags to the format expected by NDK + let eventTags = convertTagsToNDKFormat(tags); + + // For AsciiDoc events, remove metadata from content + let finalContent = eventData.content; + if (eventData.kind === 30040 || eventData.kind === 30041) { + finalContent = removeMetadataFromContent(eventData.content); + } + + // Prefix Nostr addresses before publishing + const prefixedContent = prefixNostrAddresses(finalContent); + + // Create event with proper serialization + const eventDataForNDK = { + kind: eventData.kind, + content: prefixedContent, + tags: eventTags, + pubkey: pubkeyString, + created_at: eventData.createdAt, + }; + + events = [new NDKEventClass(ndk, eventDataForNDK)]; + } + + let atLeastOne = false; + let relaysPublished: string[] = []; + let lastEventId: string | null = null; + + for (let i = 0; i < events.length; i++) { + const event = events[i]; + try { + console.log("Publishing event:", { + kind: event.kind, + content: event.content, + tags: event.tags, + hasContent: event.content && event.content.length > 0, + }); + + // Always sign with a plain object if window.nostr is available + // Create a completely plain object to avoid proxy cloning issues + const plainEvent = { + kind: Number(event.kind), + pubkey: String(event.pubkey), + created_at: Number( + event.created_at ?? Math.floor(Date.now() / 1000), + ), + tags: event.tags.map((tag) => tag.map(String)), + content: String(event.content), + }; + + if ( + typeof window !== "undefined" && + window.nostr && + window.nostr.signEvent + ) { + const signed = await window.nostr.signEvent(plainEvent); + event.sig = signed.sig; + if ("id" in signed) { + event.id = signed.id as string; + } + } else { + await event.sign(); + } + + // Use direct WebSocket publishing like CommentBox does + const signedEvent = { + ...plainEvent, + id: event.id, + sig: event.sig, + }; + + // Try to publish to relays directly + const relays = [ + ...anonymousRelays, + ...get(activeOutboxRelays), + ...get(activeInboxRelays), + ]; + let published = false; + + for (const relayUrl of relays) { + try { + const ws = await WebSocketPool.instance.acquire(relayUrl); + + await new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + WebSocketPool.instance.release(ws); + reject(new Error("Timeout")); + }, 5000); + + ws.onmessage = (e) => { + const [type, id, ok, message] = JSON.parse(e.data); + if (type === "OK" && id === signedEvent.id) { + clearTimeout(timeout); + if (ok) { + published = true; + relaysPublished.push(relayUrl); + WebSocketPool.instance.release(ws); + resolve(); + } else { + WebSocketPool.instance.release(ws); + reject(new Error(message)); + } + } + }; + + // Send the event to the relay + ws.send(JSON.stringify(["EVENT", signedEvent])); + }); + if (published) break; + } catch (e) { + console.error(`Failed to publish to ${relayUrl}:`, e); + } + } + + if (published) { + atLeastOne = true; + // For 30040, set lastEventId to the index event (last in array) + if (Number(eventData.kind) === 30040) { + if (i === events.length - 1) { + lastEventId = event.id; + } + } else { + lastEventId = event.id; + } + } + } catch (signError) { + console.error("Error signing/publishing event:", signError); + return { + success: false, + error: `Failed to sign event: ${signError instanceof Error ? signError.message : "Unknown error"}` + }; + } + } + + if (atLeastOne) { + return { + success: true, + eventId: lastEventId || undefined, + relays: relaysPublished + }; + } else { + return { success: false, error: "Failed to publish to any relay." }; + } +} + +/** + * Loads an event by its hex ID + */ +export async function loadEvent(ndk: any, eventId: string): Promise { + if (!ndk) { + throw new Error("NDK context not available"); + } + + const foundEvent = await fetchEventWithFallback(ndk, eventId, 10000); + + if (foundEvent) { + // Convert NDK event format to our format + const eventData: EventData = { + kind: foundEvent.kind, // Use the actual kind from the event + content: foundEvent.content || "", // Preserve content exactly as-is + createdAt: Math.floor(Date.now() / 1000), // Use current time for replacement + }; + + // Convert NDK tags format to our format + const tags: TagData[] = foundEvent.tags.map((tag: string[]) => ({ + key: tag[0] || "", + values: tag.slice(1) + })); + + return { eventData, tags }; + } + + return null; +} diff --git a/src/lib/components/event_input/types.ts b/src/lib/components/event_input/types.ts new file mode 100644 index 0000000..df7e8f9 --- /dev/null +++ b/src/lib/components/event_input/types.ts @@ -0,0 +1,63 @@ +/** + * Type definitions for the EventInput component system + */ + +export interface EventData { + kind: number; + content: string; + createdAt: number; +} + +export interface TagData { + key: string; + values: string[]; +} + +export interface ValidationResult { + valid: boolean; + reason?: string; + warning?: string; +} + +export interface PublishResult { + success: boolean; + eventId?: string; + relays?: string[]; + error?: string; +} + +export interface LoadEventResult { + eventData: EventData; + tags: TagData[]; +} + +export interface EventPreview { + type: 'standard_event' | '30040_index_event' | 'error'; + event?: { + id: string; + pubkey: string; + created_at: number; + kind: number; + tags: string[][]; + content: string; + sig: string; + }; + message?: string; +} + +export interface PresetTag { + key: string; + defaultValue: string; + required: boolean; + autoUpdate: boolean; + description: string; +} + +export interface KindConfig { + kind: number; + name: string; + description: string; + presetTags: PresetTag[]; + requiresContent: boolean; + contentValidation?: (content: string) => ValidationResult; +} diff --git a/src/lib/components/event_input/validation.ts b/src/lib/components/event_input/validation.ts new file mode 100644 index 0000000..7fb6609 --- /dev/null +++ b/src/lib/components/event_input/validation.ts @@ -0,0 +1,90 @@ +/** + * Event validation utilities + */ + +import { get } from "svelte/store"; +import { userStore } from "$lib/stores/userStore"; +import type { EventData, TagData, ValidationResult } from "./types"; +import { + validateNotAsciidoc, + validateAsciiDoc, + validate30040EventSet, +} from "$lib/utils/event_input_utils"; + +/** + * Validates an event and its tags + */ +export function validateEvent(eventData: EventData, tags: TagData[]): ValidationResult { + const userState = get(userStore); + + const pubkey = userState.pubkey; + if (!pubkey) { + return { valid: false, reason: "Not logged in." }; + } + + // Content validation - 30040 events don't require content + if (eventData.kind !== 30040 && !eventData.content.trim()) { + return { valid: false, reason: "Content required." }; + } + + // Kind-specific validation + if (eventData.kind === 30023) { + const v = validateNotAsciidoc(eventData.content); + if (!v.valid) return v; + } + + if (eventData.kind === 30040) { + // Check for required tags + const versionTag = tags.find(t => t.key === "version"); + const dTag = tags.find(t => t.key === "d"); + const titleTag = tags.find(t => t.key === "title"); + + if (!versionTag || !versionTag.values[0] || versionTag.values[0].trim() === "") { + return { valid: false, reason: "30040 events require a 'version' tag." }; + } + + if (!dTag || !dTag.values[0] || dTag.values[0].trim() === "") { + return { valid: false, reason: "30040 events require a 'd' tag." }; + } + + if (!titleTag || !titleTag.values[0] || titleTag.values[0].trim() === "") { + return { valid: false, reason: "30040 events require a 'title' tag." }; + } + + // Validate content format if present + if (eventData.content.trim()) { + const v = validate30040EventSet(eventData.content); + if (!v.valid) return v; + if (v.warning) return { valid: true, warning: v.warning }; + } + } + + if (eventData.kind === 30041 || eventData.kind === 30818) { + const v = validateAsciiDoc(eventData.content); + if (!v.valid) return v; + } + + return { valid: true }; +} + +/** + * Validates that a kind is within valid range + */ +export function isValidKind(kind: number | string): boolean { + const n = Number(kind); + return Number.isInteger(n) && n >= 0 && n <= 65535; +} + +/** + * Validates that a tag has a valid key + */ +export function isValidTagKey(key: string): boolean { + return key.trim().length > 0; +} + +/** + * Validates that a tag has at least one value + */ +export function isValidTag(tag: TagData): boolean { + return isValidTagKey(tag.key) && tag.values.some(v => v.trim().length > 0); +} diff --git a/src/lib/components/publications/Publication.svelte b/src/lib/components/publications/Publication.svelte index 52489e5..6170e15 100644 --- a/src/lib/components/publications/Publication.svelte +++ b/src/lib/components/publications/Publication.svelte @@ -24,43 +24,67 @@ import TableOfContents from "./TableOfContents.svelte"; import type { TableOfContents as TocType } from "./table_of_contents.svelte"; - let { rootAddress, publicationType, indexEvent } = $props<{ + let { rootAddress, publicationType, indexEvent, publicationTree, toc } = $props<{ rootAddress: string; publicationType: string; indexEvent: NDKEvent; + publicationTree: SveltePublicationTree; + toc: TocType; }>(); - const publicationTree = getContext( - "publicationTree", - ) as SveltePublicationTree; - const toc = getContext("toc") as TocType; - // #region Loading - let leaves = $state>([]); - let isLoading = $state(false); - let isDone = $state(false); + let isLoading = $state(false); + let isDone = $state(false); let lastElementRef = $state(null); let activeAddress = $state(null); + let loadedAddresses = $state>(new Set()); + let hasInitialized = $state(false); let observer: IntersectionObserver; async function loadMore(count: number) { + if (!publicationTree) { + console.warn("[Publication] publicationTree is not available"); + return; + } + + console.log(`[Publication] Loading ${count} more events. Current leaves: ${leaves.length}, loaded addresses: ${loadedAddresses.size}`); + isLoading = true; - for (let i = 0; i < count; i++) { - const iterResult = await publicationTree.next(); - const { done, value } = iterResult; - - if (done) { - isDone = true; - break; + try { + for (let i = 0; i < count; i++) { + const iterResult = await publicationTree.next(); + const { done, value } = iterResult; + + if (done) { + console.log("[Publication] Iterator done, no more events"); + isDone = true; + break; + } + + if (value) { + const address = value.tagAddress(); + console.log(`[Publication] Got event: ${address} (${value.id})`); + if (!loadedAddresses.has(address)) { + loadedAddresses.add(address); + leaves.push(value); + console.log(`[Publication] Added event: ${address}`); + } else { + console.warn(`[Publication] Duplicate event detected: ${address}`); + } + } else { + console.log("[Publication] Got null event"); + leaves.push(null); + } } - - leaves.push(value); + } catch (error) { + console.error("[Publication] Error loading more content:", error); + } finally { + isLoading = false; + console.log(`[Publication] Finished loading. Total leaves: ${leaves.length}, loaded addresses: ${loadedAddresses.size}`); } - - isLoading = false; } function setLastElementRef(el: HTMLElement, i: number) { @@ -85,6 +109,34 @@ // #endregion + // AI-NOTE: 2025-01-24 - Combined effect to handle publicationTree changes and initial loading + // This prevents conflicts between separate effects that could cause duplicate loading + $effect(() => { + if (publicationTree) { + // Reset state when publicationTree changes + leaves = []; + isLoading = false; + isDone = false; + lastElementRef = null; + loadedAddresses = new Set(); + hasInitialized = false; + + // Reset the publication tree iterator to prevent duplicate events + if (typeof publicationTree.resetIterator === 'function') { + publicationTree.resetIterator(); + } + + // AI-NOTE: 2025-01-24 - Use setTimeout to ensure iterator reset completes before loading + // This prevents race conditions where loadMore is called before the iterator is fully reset + setTimeout(() => { + // Load initial content after reset + console.log("[Publication] Loading initial content after reset"); + hasInitialized = true; + loadMore(12); + }, 0); + } + }); + // #region Columns visibility let currentBlog: null | string = $state(null); @@ -175,14 +227,17 @@ observer = new IntersectionObserver( (entries) => { entries.forEach((entry) => { - if (entry.isIntersecting && !isLoading && !isDone) { + if (entry.isIntersecting && !isLoading && !isDone && publicationTree) { loadMore(1); } }); }, { threshold: 0.5 }, ); - loadMore(12); + + // AI-NOTE: 2025-01-24 - Removed duplicate loadMore call + // Initial content loading is handled by the $effect that watches publicationTree + // This prevents duplicate loading when both onMount and $effect trigger return () => { observer.disconnect(); @@ -207,11 +262,12 @@ /> publicationTree.setBookmark(address)} onLoadMore={() => { - if (!isLoading && !isDone) { + if (!isLoading && !isDone && publicationTree) { loadMore(4); } }} @@ -241,6 +297,8 @@ {rootAddress} {leaves} {address} + {publicationTree} + {toc} ref={(el) => onPublicationSectionMounted(el, address)} /> {/if} @@ -249,7 +307,7 @@ {#if isLoading} {:else if !isDone} - + {:else}

You've reached the end of the publication. @@ -300,6 +358,8 @@ {rootAddress} {leaves} address={leaf.tagAddress()} + {publicationTree} + {toc} ref={(el) => setLastElementRef(el, i)} /> diff --git a/src/lib/components/publications/PublicationFeed.svelte b/src/lib/components/publications/PublicationFeed.svelte index 48e4eba..c353627 100644 --- a/src/lib/components/publications/PublicationFeed.svelte +++ b/src/lib/components/publications/PublicationFeed.svelte @@ -1,24 +1,30 @@

{#if loading && eventsInView.length === 0} diff --git a/src/lib/components/publications/PublicationHeader.svelte b/src/lib/components/publications/PublicationHeader.svelte index c1c6222..5cab792 100644 --- a/src/lib/components/publications/PublicationHeader.svelte +++ b/src/lib/components/publications/PublicationHeader.svelte @@ -35,7 +35,7 @@ let title: string = $derived(event.getMatchingTags("title")[0]?.[1]); let author: string = $derived( - event.getMatchingTags(event, "author")[0]?.[1] ?? "unknown", + event.getMatchingTags("author")[0]?.[1] ?? "unknown", ); let version: string = $derived( event.getMatchingTags("version")[0]?.[1] ?? "1", diff --git a/src/lib/components/publications/PublicationSection.svelte b/src/lib/components/publications/PublicationSection.svelte index 6c5b6be..2b9aace 100644 --- a/src/lib/components/publications/PublicationSection.svelte +++ b/src/lib/components/publications/PublicationSection.svelte @@ -1,5 +1,4 @@ @@ -21,14 +14,14 @@ {@const npub = toNpub(identifier)} {#if npub} {#if !displayText || displayText.trim().toLowerCase() === "unknown"} - {#await getUserMetadata(npub) then profile} - {@const p = profile as NostrProfileWithLegacy} + {#await getUserMetadata(npub, undefined, false) then profile} + {@const p = profile as UserProfile} +
`; +} + +/** + * Shared service for processing media URLs + */ +export function processMediaUrl(url: string, alt?: string): string { + const clean = stripTrackingParams(url); + + if (YOUTUBE_URL_REGEX.test(clean)) { + const videoId = extractYouTubeVideoId(clean); + if (videoId) { + return ``; + } + } + + if (VIDEO_URL_REGEX.test(clean)) { + return ``; + } + + if (AUDIO_URL_REGEX.test(clean)) { + return ``; + } + + if (IMAGE_EXTENSIONS.test(clean.split("?")[0])) { + return processImageWithReveal(clean, alt || "Embedded media"); + } + + // Default to clickable link + return `${clean}`; +} + +/** + * Shared service for processing nostr identifiers + */ +export async function processNostrIdentifiersInText( + text: string, + ndk?: NDK, +): Promise { + let processedText = text; + + // Find all profile-related nostr addresses (only npub and nprofile) + const matches = Array.from(processedText.matchAll(NOSTR_PROFILE_REGEX)); + + // Process them in reverse order to avoid index shifting issues + for (let i = matches.length - 1; i >= 0; i--) { + const match = matches[i]; + const [fullMatch] = match; + const matchIndex = match.index ?? 0; + + // Skip if part of a URL + const before = processedText.slice( + Math.max(0, matchIndex - 12), + matchIndex, + ); + if (/https?:\/\/$|www\.$/i.test(before)) { + continue; + } + + // Process the nostr identifier directly + let identifier = fullMatch; + if (!identifier.startsWith("nostr:")) { + identifier = "nostr:" + identifier; + } + + // Get user metadata and create link + let metadata; + if (ndk) { + metadata = await getUserMetadata(identifier, ndk); + } else { + // Fallback when NDK is not available - just use the identifier + metadata = { name: identifier.slice(0, 8) + "..." + identifier.slice(-4) }; + } + const displayText = metadata.displayName || metadata.name; + const link = createProfileLink(identifier, displayText); + + // Replace the match in the text + processedText = processedText.slice(0, matchIndex) + link + + processedText.slice(matchIndex + fullMatch.length); + } + + return processedText; +} + +/** + * Shared service for processing nostr identifiers with embedded events + * Replaces nostr: links with embedded event placeholders + * Only processes event-related identifiers (nevent, naddr, note), not profile identifiers (npub, nprofile) + */ +export function processNostrIdentifiersWithEmbeddedEvents( + text: string, + nestingLevel: number = 0, +): string { + const eventPattern = /nostr:(note|nevent|naddr)[a-zA-Z0-9]{20,}/g; + let processedText = text; + + // Maximum nesting level allowed + const MAX_NESTING_LEVEL = 3; + + // Find all event-related nostr addresses + const matches = Array.from(processedText.matchAll(eventPattern)); + + // Process them in reverse order to avoid index shifting issues + for (let i = matches.length - 1; i >= 0; i--) { + const match = matches[i]; + const [fullMatch] = match; + const matchIndex = match.index ?? 0; + + let replacement: string; + + if (nestingLevel >= MAX_NESTING_LEVEL) { + // At max nesting level, just show the link + replacement = + `${fullMatch}`; + } else { + // Create a placeholder for embedded event + const componentId = `embedded-event-${ + Math.random().toString(36).substr(2, 9) + }`; + replacement = + `
`; + } + + // Replace the match in the text + processedText = processedText.slice(0, matchIndex) + replacement + + processedText.slice(matchIndex + fullMatch.length); + } + + return processedText; +} + +/** + * Shared service for processing emoji shortcodes + */ +export function processEmojiShortcodes(text: string): string { + return emoji.emojify(text); +} + +/** + * Shared service for processing WebSocket URLs + */ +export function processWebSocketUrls(text: string): string { + const wssUrlRegex = /wss:\/\/[^\s<>"]+/g; + return text.replace(wssUrlRegex, (match) => { + const cleanUrl = match.slice(6).replace(/\/+$/, ""); + return `${match}`; + }); +} + +/** + * Shared service for processing hashtags + */ +export function processHashtags(text: string): string { + const hashtagRegex = /(?#$1', + ); +} + +/** + * Shared service for processing basic text formatting + */ +export function processBasicTextFormatting(text: string): string { + // Bold: **text** or *text* + text = text.replace( + /(\*\*|[*])((?:[^*\n]|\*(?!\*))+)\1/g, + "$2", + ); + + // Italic: _text_ or __text__ + text = text.replace(/\b(_[^_\n]+_|\b__[^_\n]+__)\b/g, (match) => { + const text = match.replace(/^_+|_+$/g, ""); + return `${text}`; + }); + + // Strikethrough: ~~text~~ or ~text~ + text = text.replace( + /~~([^~\n]+)~~|~([^~\n]+)~/g, + (_match, doubleText, singleText) => { + const text = doubleText || singleText; + return `${text}`; + }, + ); + + return text; +} + +/** + * Shared service for processing blockquotes + */ +export function processBlockquotes(text: string): string { + const blockquoteRegex = /^([ \t]*>[ \t]?.*)(?:\n\1[ \t]*(?!>).*)*$/gm; + return text.replace(blockquoteRegex, (match) => { + const lines = match.split("\n").map((line) => { + return line.replace(/^[ \t]*>[ \t]?/, "").trim(); + }); + return `
${ + lines.join("\n") + }
`; + }); +} + +// Helper functions +export function stripTrackingParams(url: string): string { + try { + const urlObj = new URL(url); + // Remove common tracking parameters + const trackingParams = [ + "utm_source", + "utm_medium", + "utm_campaign", + "utm_term", + "utm_content", + "fbclid", + "gclid", + ]; + trackingParams.forEach((param) => urlObj.searchParams.delete(param)); + return urlObj.toString(); + } catch { + return url; + } +} + +function extractYouTubeVideoId(url: string): string | null { + const match = url.match( + /(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})/, + ); + return match ? match[1] : null; +} + +/** + * Normalizes a string for use as a d-tag by converting to lowercase, + * replacing non-alphanumeric characters with dashes, and removing + * leading/trailing dashes. + */ +function normalizeDTag(input: string): string { + return input + .toLowerCase() + .replace(/[^\p{L}\p{N}]/gu, "-") + .replace(/-+/g, "-") + .replace(/^-|-$/g, ""); +} + +/** + * Shared service for processing wikilinks in the format [[target]] or [[target|display]] + */ +export function processWikilinks(text: string): string { + // [[target page]] or [[target page|display text]] + return text.replace( + /\[\[([^\]|]+)(?:\|([^\]]+))?\]\]/g, + (_match, target, label) => { + const normalized = normalizeDTag(target.trim()); + const display = (label || target).trim(); + const url = `/events?d=${normalized}`; + return `${display}`; + }, + ); +} + +/** + * Shared service for processing AsciiDoc anchor tags + */ +export function processAsciiDocAnchors(text: string): string { + return text.replace(/<\/a>/g, (_match, id) => { + const normalized = normalizeDTag(id.trim()); + const url = `/events?d=${normalized}`; + return `${id}`; + }); +} diff --git a/src/lib/utils/markup/tikzRenderer.ts b/src/lib/utils/markup/tikzRenderer.ts index 3e194b6..3be3932 100644 --- a/src/lib/utils/markup/tikzRenderer.ts +++ b/src/lib/utils/markup/tikzRenderer.ts @@ -44,7 +44,9 @@ function createBasicSVG(tikzCode: string): string {
-
${escapeHtml(tikzCode)}
+
${
+    escapeHtml(tikzCode)
+  }
`; diff --git a/src/lib/utils/mime.ts b/src/lib/utils/mime.ts index b4326db..a8714c3 100644 --- a/src/lib/utils/mime.ts +++ b/src/lib/utils/mime.ts @@ -104,7 +104,7 @@ export function getMimeTags(kind: number): [string, string][] { MTag = ["M", `article/long-form/${replaceability}`]; break; - // Add more cases as needed... + // Add more cases as needed... } return [mTag, MTag]; diff --git a/src/lib/utils/network_detection.ts b/src/lib/utils/network_detection.ts index b7a7315..c1821b8 100644 --- a/src/lib/utils/network_detection.ts +++ b/src/lib/utils/network_detection.ts @@ -4,18 +4,18 @@ import { deduplicateRelayUrls } from "./relay_management.ts"; * Network conditions for relay selection */ export enum NetworkCondition { - ONLINE = 'online', - SLOW = 'slow', - OFFLINE = 'offline' + ONLINE = "online", + SLOW = "slow", + OFFLINE = "offline", } /** * Network connectivity test endpoints */ const NETWORK_ENDPOINTS = [ - 'https://www.google.com/favicon.ico', - 'https://httpbin.org/status/200', - 'https://api.github.com/zen' + "https://www.google.com/favicon.ico", + "https://httpbin.org/status/200", + "https://api.github.com/zen", ]; /** @@ -27,20 +27,23 @@ export async function isNetworkOnline(): Promise { try { // Use a simple fetch without HEAD method to avoid CORS issues await fetch(endpoint, { - method: 'GET', - cache: 'no-cache', + method: "GET", + cache: "no-cache", signal: AbortSignal.timeout(3000), - mode: 'no-cors' // Use no-cors mode to avoid CORS issues + mode: "no-cors", // Use no-cors mode to avoid CORS issues }); // With no-cors mode, we can't check response.ok, so we assume success if no error return true; } catch (error) { - console.debug(`[network_detection.ts] Failed to reach ${endpoint}:`, error); + console.debug( + `[network_detection.ts] Failed to reach ${endpoint}:`, + error, + ); continue; } } - - console.debug('[network_detection.ts] All network endpoints failed'); + + console.debug("[network_detection.ts] All network endpoints failed"); return false; } @@ -50,25 +53,30 @@ export async function isNetworkOnline(): Promise { */ export async function testNetworkSpeed(): Promise { const startTime = performance.now(); - + for (const endpoint of NETWORK_ENDPOINTS) { try { await fetch(endpoint, { - method: 'GET', - cache: 'no-cache', + method: "GET", + cache: "no-cache", signal: AbortSignal.timeout(5000), - mode: 'no-cors' // Use no-cors mode to avoid CORS issues + mode: "no-cors", // Use no-cors mode to avoid CORS issues }); - + const endTime = performance.now(); return endTime - startTime; } catch (error) { - console.debug(`[network_detection.ts] Speed test failed for ${endpoint}:`, error); + console.debug( + `[network_detection.ts] Speed test failed for ${endpoint}:`, + error, + ); continue; } } - - console.debug('[network_detection.ts] Network speed test failed for all endpoints'); + + console.debug( + "[network_detection.ts] Network speed test failed for all endpoints", + ); return Infinity; // Very slow if it fails } @@ -78,21 +86,25 @@ export async function testNetworkSpeed(): Promise { */ export async function detectNetworkCondition(): Promise { const isOnline = await isNetworkOnline(); - + if (!isOnline) { - console.debug('[network_detection.ts] Network condition: OFFLINE'); + console.debug("[network_detection.ts] Network condition: OFFLINE"); return NetworkCondition.OFFLINE; } - + const speed = await testNetworkSpeed(); - + // Consider network slow if response time > 2000ms if (speed > 2000) { - console.debug(`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`); + console.debug( + `[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`, + ); return NetworkCondition.SLOW; } - - console.debug(`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`); + + console.debug( + `[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`, + ); return NetworkCondition.ONLINE; } @@ -108,39 +120,49 @@ export function getRelaySetForNetworkCondition( networkCondition: NetworkCondition, discoveredLocalRelays: string[], lowbandwidthRelays: string[], - fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] } + fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] }, ): { inboxRelays: string[]; outboxRelays: string[] } { switch (networkCondition) { case NetworkCondition.OFFLINE: // When offline, use local relays if available, otherwise rely on cache // This will be improved when IndexedDB local relay is implemented if (discoveredLocalRelays.length > 0) { - console.debug('[network_detection.ts] Using local relays (offline)'); + console.debug("[network_detection.ts] Using local relays (offline)"); return { inboxRelays: discoveredLocalRelays, - outboxRelays: discoveredLocalRelays + outboxRelays: discoveredLocalRelays, }; } else { - console.debug('[network_detection.ts] No local relays available, will rely on cache (offline)'); + console.debug( + "[network_detection.ts] No local relays available, will rely on cache (offline)", + ); return { inboxRelays: [], - outboxRelays: [] + outboxRelays: [], }; } case NetworkCondition.SLOW: { // Local relays + low bandwidth relays when slow (deduplicated) - console.debug('[network_detection.ts] Using local + low bandwidth relays (slow network)'); - const slowInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); - const slowOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); + console.debug( + "[network_detection.ts] Using local + low bandwidth relays (slow network)", + ); + const slowInboxRelays = deduplicateRelayUrls([ + ...discoveredLocalRelays, + ...lowbandwidthRelays, + ]); + const slowOutboxRelays = deduplicateRelayUrls([ + ...discoveredLocalRelays, + ...lowbandwidthRelays, + ]); return { inboxRelays: slowInboxRelays, - outboxRelays: slowOutboxRelays + outboxRelays: slowOutboxRelays, }; } case NetworkCondition.ONLINE: default: // Full relay set when online - console.debug('[network_detection.ts] Using full relay set (online)'); + console.debug("[network_detection.ts] Using full relay set (online)"); return fullRelaySet; } } @@ -161,14 +183,16 @@ export function startNetworkMonitoring( const checkNetwork = async () => { try { const currentCondition = await detectNetworkCondition(); - + if (currentCondition !== lastCondition) { - console.debug(`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`); + console.debug( + `[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`, + ); lastCondition = currentCondition; onNetworkChange(currentCondition); } } catch (error) { - console.warn('[network_detection.ts] Network monitoring error:', error); + console.warn("[network_detection.ts] Network monitoring error:", error); } }; @@ -185,4 +209,4 @@ export function startNetworkMonitoring( intervalId = null; } }; -} \ No newline at end of file +} diff --git a/src/lib/utils/nostrEventService.ts b/src/lib/utils/nostrEventService.ts index cdea5e1..403eeed 100644 --- a/src/lib/utils/nostrEventService.ts +++ b/src/lib/utils/nostrEventService.ts @@ -1,10 +1,9 @@ import { nip19 } from "nostr-tools"; -import { getEventHash, signEvent, prefixNostrAddresses } from "./nostrUtils.ts"; -import { get } from "svelte/store"; +import { getEventHash, prefixNostrAddresses, signEvent } from "./nostrUtils.ts"; import { goto } from "$app/navigation"; import { EVENT_KINDS, TIME_CONSTANTS } from "./search_constants.ts"; -import { ndkInstance } from "../ndk.ts"; -import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; +import { EXPIRATION_DURATION } from "../consts.ts"; +import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk"; export interface RootEventInfo { rootId: string; @@ -95,21 +94,21 @@ export function extractRootEventInfo(parent: NDKEvent): RootEventInfo { rootInfo.rootId = rootE[1]; rootInfo.rootRelay = getRelayString(rootE[2]); rootInfo.rootPubkey = getPubkeyString(rootE[3] || rootInfo.rootPubkey); - rootInfo.rootKind = - Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; + rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) || + rootInfo.rootKind; } else if (rootA) { rootInfo.rootAddress = rootA[1]; rootInfo.rootRelay = getRelayString(rootA[2]); rootInfo.rootPubkey = getPubkeyString( getTagValue(parent.tags, "P") || rootInfo.rootPubkey, ); - rootInfo.rootKind = - Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; + rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) || + rootInfo.rootKind; } else if (rootI) { rootInfo.rootIValue = rootI[1]; rootInfo.rootIRelay = getRelayString(rootI[2]); - rootInfo.rootKind = - Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; + rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) || + rootInfo.rootKind; } return rootInfo; @@ -223,7 +222,8 @@ export function buildReplyTags( if (isParentReplaceable) { const dTag = getTagValue(parent.tags || [], "d"); if (dTag) { - const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; + const parentAddress = + `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; addTags(tags, createTag("a", parentAddress, "", "root")); } } @@ -232,7 +232,8 @@ export function buildReplyTags( if (isParentReplaceable) { const dTag = getTagValue(parent.tags || [], "d"); if (dTag) { - const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; + const parentAddress = + `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; if (isReplyToComment) { // Root scope (uppercase) - use the original article @@ -316,16 +317,25 @@ export async function createSignedEvent( pubkey: string, kind: number, tags: string[][], -// deno-lint-ignore no-explicit-any + // deno-lint-ignore no-explicit-any ): Promise<{ id: string; sig: string; event: any }> { const prefixedContent = prefixNostrAddresses(content); + // Add expiration tag for kind 24 events (NIP-40) + const finalTags = [...tags]; + if (kind === 24) { + const expirationTimestamp = + Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR) + + EXPIRATION_DURATION; + finalTags.push(["expiration", String(expirationTimestamp)]); + } + const eventToSign = { kind: Number(kind), created_at: Number( Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR), ), - tags: tags.map((tag) => [ + tags: finalTags.map((tag: any) => [ String(tag[0]), String(tag[1]), String(tag[2] || ""), @@ -336,7 +346,10 @@ export async function createSignedEvent( }; let sig, id; - if (typeof window !== "undefined" && globalThis.nostr && globalThis.nostr.signEvent) { + if ( + typeof window !== "undefined" && globalThis.nostr && + globalThis.nostr.signEvent + ) { const signed = await globalThis.nostr.signEvent(eventToSign); sig = signed.sig as string; id = "id" in signed ? (signed.id as string) : getEventHash(eventToSign); @@ -365,9 +378,9 @@ export async function createSignedEvent( export async function publishEvent( event: NDKEvent, relayUrls: string[], + ndk: NDK, ): Promise { const successfulRelays: string[] = []; - const ndk = get(ndkInstance); if (!ndk) { throw new Error("NDK instance not available"); @@ -379,7 +392,7 @@ export async function publishEvent( try { // If event is a plain object, create an NDKEvent from it let ndkEvent: NDKEvent; - if (event.publish && typeof event.publish === 'function') { + if (event.publish && typeof event.publish === "function") { // It's already an NDKEvent ndkEvent = event; } else { @@ -389,15 +402,15 @@ export async function publishEvent( // Publish with timeout await ndkEvent.publish(relaySet).withTimeout(5000); - + // For now, assume all relays were successful // In a more sophisticated implementation, you'd track individual relay responses successfulRelays.push(...relayUrls); - + console.debug("[nostrEventService] Published event successfully:", { eventId: ndkEvent.id, relayCount: relayUrls.length, - successfulRelays + successfulRelays, }); } catch (error) { console.error("[nostrEventService] Failed to publish event:", error); diff --git a/src/lib/utils/nostrUtils.ts b/src/lib/utils/nostrUtils.ts index c36108f..ef3e8ca 100644 --- a/src/lib/utils/nostrUtils.ts +++ b/src/lib/utils/nostrUtils.ts @@ -1,11 +1,16 @@ import { get } from "svelte/store"; import { nip19 } from "nostr-tools"; -import { ndkInstance } from "../ndk.ts"; -import { npubCache } from "./npubCache.ts"; +import { unifiedProfileCache } from "./npubCache.ts"; import NDK, { NDKEvent, NDKRelaySet, NDKUser } from "@nostr-dev-kit/ndk"; -import type { NDKKind, NostrEvent } from "@nostr-dev-kit/ndk"; +import type { NostrEvent } from "@nostr-dev-kit/ndk"; import type { Filter } from "./search_types.ts"; -import { communityRelays, secondaryRelays } from "../consts.ts"; +import { + anonymousRelays, + communityRelays, + searchRelays, + secondaryRelays, + localRelays, +} from "../consts.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; import { NDKRelaySet as NDKRelaySetFromNDK } from "@nostr-dev-kit/ndk"; import { sha256 } from "@noble/hashes/sha2.js"; @@ -51,88 +56,23 @@ function escapeHtml(text: string): string { return text.replace(/[&<>"']/g, (char) => htmlEscapes[char]); } +/** + * Escape regex special characters + */ +function escapeRegExp(string: string): string { + return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); +} + /** * Get user metadata for a nostr identifier (npub or nprofile) */ export async function getUserMetadata( identifier: string, + ndk?: NDK, force = false, ): Promise { - // Remove nostr: prefix if present - const cleanId = identifier.replace(/^nostr:/, ""); - - console.log("getUserMetadata called with identifier:", identifier, "force:", force); - - if (!force && npubCache.has(cleanId)) { - const cached = npubCache.get(cleanId)!; - console.log("getUserMetadata returning cached profile:", cached); - return cached; - } - - const fallback = { name: `${cleanId.slice(0, 8)}...${cleanId.slice(-4)}` }; - - try { - const ndk = get(ndkInstance); - if (!ndk) { - console.warn("getUserMetadata: No NDK instance available"); - npubCache.set(cleanId, fallback); - return fallback; - } - - const decoded = nip19.decode(cleanId); - if (!decoded) { - console.warn("getUserMetadata: Failed to decode identifier:", cleanId); - npubCache.set(cleanId, fallback); - return fallback; - } - - // Handle different identifier types - let pubkey: string; - if (decoded.type === "npub") { - pubkey = decoded.data; - } else if (decoded.type === "nprofile") { - pubkey = decoded.data.pubkey; - } else { - console.warn("getUserMetadata: Unsupported identifier type:", decoded.type); - npubCache.set(cleanId, fallback); - return fallback; - } - - console.log("getUserMetadata: Fetching profile for pubkey:", pubkey); - - const profileEvent = await fetchEventWithFallback(ndk, { - kinds: [0], - authors: [pubkey], - }); - - console.log("getUserMetadata: Profile event found:", profileEvent); - - const profile = - profileEvent && profileEvent.content - ? JSON.parse(profileEvent.content) - : null; - - console.log("getUserMetadata: Parsed profile:", profile); - - const metadata: NostrProfile = { - name: profile?.name || fallback.name, - displayName: profile?.displayName || profile?.display_name, - nip05: profile?.nip05, - picture: profile?.picture || profile?.image, - about: profile?.about, - banner: profile?.banner, - website: profile?.website, - lud16: profile?.lud16, - }; - - console.log("getUserMetadata: Final metadata:", metadata); - npubCache.set(cleanId, metadata); - return metadata; - } catch (e) { - console.error("getUserMetadata: Error fetching profile:", e); - npubCache.set(cleanId, fallback); - return fallback; - } + // Use the unified profile cache which handles all relay searching and caching + return unifiedProfileCache.getProfile(identifier, ndk, force); } /** @@ -157,8 +97,8 @@ export function createProfileLink( export async function createProfileLinkWithVerification( identifier: string, displayText: string | undefined, + ndk?: NDK, ): Promise { - const ndk = get(ndkInstance) as NDK; if (!ndk) { return createProfileLink(identifier, displayText); } @@ -192,6 +132,7 @@ export async function createProfileLinkWithVerification( }; const allRelays = [ + ...searchRelays, // Include search relays for profile searches ...communityRelays, ...userRelays, ...secondaryRelays, @@ -215,8 +156,7 @@ export async function createProfileLinkWithVerification( const defaultText = `${cleanId.slice(0, 8)}...${cleanId.slice(-4)}`; const escapedText = escapeHtml(displayText || defaultText); - const displayIdentifier = - profile?.displayName ?? + const displayIdentifier = profile?.displayName ?? profile?.display_name ?? profile?.name ?? escapedText; @@ -253,6 +193,7 @@ function createNoteLink(identifier: string): string { */ export async function processNostrIdentifiers( content: string, + ndk: NDK, ): Promise { let processedContent = content; @@ -275,10 +216,14 @@ export async function processNostrIdentifiers( if (!identifier.startsWith("nostr:")) { identifier = "nostr:" + identifier; } - const metadata = await getUserMetadata(identifier); + const metadata = await getUserMetadata(identifier, ndk); const displayText = metadata.displayName || metadata.name; const link = createProfileLink(identifier, displayText); - processedContent = processedContent.replace(fullMatch, link); + // Replace all occurrences of this exact match + processedContent = processedContent.replace( + new RegExp(escapeRegExp(fullMatch), "g"), + link, + ); } // Process notes (nevent, note, naddr) @@ -294,7 +239,11 @@ export async function processNostrIdentifiers( identifier = "nostr:" + identifier; } const link = createNoteLink(identifier); - processedContent = processedContent.replace(fullMatch, link); + // Replace all occurrences of this exact match + processedContent = processedContent.replace( + new RegExp(escapeRegExp(fullMatch), "g"), + link, + ); } return processedContent; @@ -399,7 +348,7 @@ export function withTimeout( return Promise.race([ promise, new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), timeoutMs), + setTimeout(() => reject(new Error("Timeout")), timeoutMs) ), ]); } @@ -410,7 +359,7 @@ export function withTimeout( return Promise.race([ promise, new Promise((_, reject) => - setTimeout(() => reject(new Error("Timeout")), timeoutMs), + setTimeout(() => reject(new Error("Timeout")), timeoutMs) ), ]); } @@ -441,34 +390,66 @@ Promise.prototype.withTimeout = function ( export async function fetchEventWithFallback( ndk: NDK, filterOrId: string | Filter, - timeoutMs: number = 3000, + timeoutMs: number = 10000, ): Promise { - // Use both inbox and outbox relays for better event discovery + // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive event discovery + // This ensures we don't miss events that might be on any available relay + + // Get all relays from NDK pool first (most comprehensive) + const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) => + r.url + ); const inboxRelays = get(activeInboxRelays); const outboxRelays = get(activeOutboxRelays); - const allRelays = [...inboxRelays, ...outboxRelays]; - + + // Combine all available relays, prioritizing pool relays + let allRelays = [ + ...new Set([...poolRelays, ...inboxRelays, ...outboxRelays]), + ]; + + console.log("fetchEventWithFallback: Using pool relays:", poolRelays); console.log("fetchEventWithFallback: Using inbox relays:", inboxRelays); console.log("fetchEventWithFallback: Using outbox relays:", outboxRelays); - + console.log("fetchEventWithFallback: Total unique relays:", allRelays.length); + // Check if we have any relays available if (allRelays.length === 0) { - console.warn("fetchEventWithFallback: No relays available for event fetch"); - return null; + console.warn( + "fetchEventWithFallback: No relays available for event fetch, using fallback relays", + ); + // Use fallback relays when no relays are available + // AI-NOTE: 2025-01-24 - Include ALL available relays for comprehensive event discovery + // This ensures we don't miss events that might be on any available relay + allRelays = [ + ...secondaryRelays, + ...searchRelays, + ...anonymousRelays, + ...inboxRelays, // Include user's inbox relays + ...outboxRelays, // Include user's outbox relays + ]; + console.log("fetchEventWithFallback: Using fallback relays:", allRelays); } - + // Create relay set from all available relays const relaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk); try { if (relaySet.relays.size === 0) { - console.warn("fetchEventWithFallback: No relays in relay set for event fetch"); + console.warn( + "fetchEventWithFallback: No relays in relay set for event fetch", + ); return null; } - console.log("fetchEventWithFallback: Relay set size:", relaySet.relays.size); + console.log( + "fetchEventWithFallback: Relay set size:", + relaySet.relays.size, + ); console.log("fetchEventWithFallback: Filter:", filterOrId); - console.log("fetchEventWithFallback: Relay URLs:", Array.from(relaySet.relays).map((r) => r.url)); + console.log( + "fetchEventWithFallback: Relay URLs:", + Array.from(relaySet.relays).map((r) => r.url), + ); let found: NDKEvent | null = null; @@ -480,8 +461,9 @@ export async function fetchEventWithFallback( .fetchEvent({ ids: [filterOrId] }, undefined, relaySet) .withTimeout(timeoutMs); } else { - const filter = - typeof filterOrId === "string" ? { ids: [filterOrId] } : filterOrId; + const filter = typeof filterOrId === "string" + ? { ids: [filterOrId] } + : filterOrId; const results = await ndk .fetchEvents(filter, undefined, relaySet) .withTimeout(timeoutMs); @@ -492,7 +474,9 @@ export async function fetchEventWithFallback( if (!found) { const timeoutSeconds = timeoutMs / 1000; - const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", "); + const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join( + ", ", + ); console.warn( `fetchEventWithFallback: Event not found after ${timeoutSeconds}s timeout. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`, ); @@ -503,29 +487,49 @@ export async function fetchEventWithFallback( // Always wrap as NDKEvent return found instanceof NDKEvent ? found : new NDKEvent(ndk, found); } catch (err) { - if (err instanceof Error && err.message === 'Timeout') { + if (err instanceof Error && err.message === "Timeout") { const timeoutSeconds = timeoutMs / 1000; - const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", "); + const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join( + ", ", + ); console.warn( `fetchEventWithFallback: Event fetch timed out after ${timeoutSeconds}s. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`, ); } else { - console.error("fetchEventWithFallback: Error in fetchEventWithFallback:", err); + console.error( + "fetchEventWithFallback: Error in fetchEventWithFallback:", + err, + ); } return null; } } /** - * Converts a hex pubkey to npub, or returns npub if already encoded. + * Converts various Nostr identifiers to npub format. + * Handles hex pubkeys, npub strings, and nprofile strings. */ export function toNpub(pubkey: string | undefined): string | null { if (!pubkey) return null; try { + // If it's already an npub, return it + if (pubkey.startsWith("npub")) return pubkey; + + // If it's a hex pubkey, convert to npub if (new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(pubkey)) { return nip19.npubEncode(pubkey); } - if (pubkey.startsWith("npub1")) return pubkey; + + // If it's an nprofile, decode and extract npub + if (pubkey.startsWith("nprofile")) { + const decoded = nip19.decode(pubkey); + if (decoded.type === "nprofile") { + return decoded.data.pubkey + ? nip19.npubEncode(decoded.data.pubkey) + : null; + } + } + return null; } catch { return null; @@ -540,7 +544,10 @@ export function createRelaySetFromUrls(relayUrls: string[], ndk: NDK) { return NDKRelaySetFromNDK.fromRelayUrls(relayUrls, ndk); } -export function createNDKEvent(ndk: NDK, rawEvent: NDKEvent | NostrEvent | undefined) { +export function createNDKEvent( + ndk: NDK, + rawEvent: NDKEvent | NostrEvent | undefined, +) { return new NDKEvent(ndk, rawEvent); } diff --git a/src/lib/utils/nostr_identifiers.ts b/src/lib/utils/nostr_identifiers.ts index 8e789d7..78d1a3d 100644 --- a/src/lib/utils/nostr_identifiers.ts +++ b/src/lib/utils/nostr_identifiers.ts @@ -1,4 +1,4 @@ -import { VALIDATION } from './search_constants'; +import { VALIDATION } from "./search_constants"; /** * Nostr identifier types @@ -22,7 +22,7 @@ export interface ParsedCoordinate { * @returns True if it's a valid hex event ID */ export function isEventId(id: string): id is NostrEventId { - return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, 'i').test(id); + return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(id); } /** @@ -30,22 +30,24 @@ export function isEventId(id: string): id is NostrEventId { * @param coordinate The string to check * @returns True if it's a valid coordinate */ -export function isCoordinate(coordinate: string): coordinate is NostrCoordinate { - const parts = coordinate.split(':'); +export function isCoordinate( + coordinate: string, +): coordinate is NostrCoordinate { + const parts = coordinate.split(":"); if (parts.length < 3) return false; - + const [kindStr, pubkey, ...dTagParts] = parts; - + // Check if kind is a valid number const kind = parseInt(kindStr, 10); if (isNaN(kind) || kind < 0) return false; - + // Check if pubkey is a valid hex string if (!isEventId(pubkey)) return false; - + // Check if d-tag exists (can contain colons) if (dTagParts.length === 0) return false; - + return true; } @@ -56,14 +58,14 @@ export function isCoordinate(coordinate: string): coordinate is NostrCoordinate */ export function parseCoordinate(coordinate: string): ParsedCoordinate | null { if (!isCoordinate(coordinate)) return null; - - const parts = coordinate.split(':'); + + const parts = coordinate.split(":"); const [kindStr, pubkey, ...dTagParts] = parts; - + return { kind: parseInt(kindStr, 10), pubkey, - dTag: dTagParts.join(':') // Rejoin in case d-tag contains colons + dTag: dTagParts.join(":"), // Rejoin in case d-tag contains colons }; } @@ -74,7 +76,11 @@ export function parseCoordinate(coordinate: string): ParsedCoordinate | null { * @param dTag The d-tag value * @returns The coordinate string */ -export function createCoordinate(kind: number, pubkey: string, dTag: string): NostrCoordinate { +export function createCoordinate( + kind: number, + pubkey: string, + dTag: string, +): NostrCoordinate { return `${kind}:${pubkey}:${dTag}`; } @@ -83,6 +89,8 @@ export function createCoordinate(kind: number, pubkey: string, dTag: string): No * @param identifier The string to check * @returns True if it's a valid Nostr identifier */ -export function isNostrIdentifier(identifier: string): identifier is NostrIdentifier { +export function isNostrIdentifier( + identifier: string, +): identifier is NostrIdentifier { return isEventId(identifier) || isCoordinate(identifier); -} \ No newline at end of file +} diff --git a/src/lib/utils/npubCache.ts b/src/lib/utils/npubCache.ts index 4fc4405..bc50d7b 100644 --- a/src/lib/utils/npubCache.ts +++ b/src/lib/utils/npubCache.ts @@ -1,51 +1,398 @@ -import type { NostrProfile } from "./nostrUtils"; +import type { NostrProfile } from "./search_types"; +import NDK, { NDKEvent } from "@nostr-dev-kit/ndk"; +import { fetchEventWithFallback } from "./nostrUtils"; +import { nip19 } from "nostr-tools"; export type NpubMetadata = NostrProfile; -class NpubCache { - private cache: Record = {}; +interface CacheEntry { + profile: NpubMetadata; + timestamp: number; + pubkey: string; + relaySource?: string; +} + +class UnifiedProfileCache { + private cache: Map = new Map(); + private readonly storageKey = "alexandria_unified_profile_cache"; + private readonly maxAge = 2 * 60 * 60 * 1000; // 2 hours in milliseconds - shorter for fresher data - get(key: string): NpubMetadata | undefined { - return this.cache[key]; + constructor() { + this.loadFromStorage(); } - set(key: string, value: NpubMetadata): void { - this.cache[key] = value; + private loadFromStorage(): void { + try { + if (typeof window !== "undefined") { + const stored = localStorage.getItem(this.storageKey); + if (stored) { + const data = JSON.parse(stored) as Record; + const now = Date.now(); + + // Filter out expired entries + for (const [key, entry] of Object.entries(data)) { + if (entry.timestamp && (now - entry.timestamp) < this.maxAge) { + this.cache.set(key, entry); + } + } + } + } + } catch (error) { + console.warn("Failed to load unified profile cache from storage:", error); + } } - has(key: string): boolean { - return key in this.cache; + private saveToStorage(): void { + try { + if (typeof window !== "undefined") { + const data: Record = {}; + for (const [key, entry] of this.cache.entries()) { + data[key] = entry; + } + localStorage.setItem(this.storageKey, JSON.stringify(data)); + } + } catch (error) { + console.warn("Failed to save unified profile cache to storage:", error); + } } - delete(key: string): boolean { - if (key in this.cache) { - delete this.cache[key]; - return true; + /** + * Get profile data, fetching fresh data if needed + */ + async getProfile(identifier: string, ndk?: NDK, force = false): Promise { + const cleanId = identifier.replace(/^nostr:/, ""); + + // Check cache first (unless forced) + if (!force && this.cache.has(cleanId)) { + const entry = this.cache.get(cleanId)!; + const now = Date.now(); + + // Return cached data if not expired + if ((now - entry.timestamp) < this.maxAge) { + console.log("UnifiedProfileCache: Returning cached profile:", cleanId); + return entry.profile; + } } + + // Fetch fresh data + return this.fetchAndCacheProfile(cleanId, ndk); + } + + /** + * Fetch profile from all available relays and cache it + */ + private async fetchAndCacheProfile(identifier: string, ndk?: NDK): Promise { + const fallback = { name: `${identifier.slice(0, 8)}...${identifier.slice(-4)}` }; + + try { + if (!ndk) { + console.warn("UnifiedProfileCache: No NDK instance available"); + return fallback; + } + + const decoded = nip19.decode(identifier); + if (!decoded) { + console.warn("UnifiedProfileCache: Failed to decode identifier:", identifier); + return fallback; + } + + // Handle different identifier types + let pubkey: string; + if (decoded.type === "npub") { + pubkey = decoded.data; + } else if (decoded.type === "nprofile") { + pubkey = decoded.data.pubkey; + } else { + console.warn("UnifiedProfileCache: Unsupported identifier type:", decoded.type); + return fallback; + } + + console.log("UnifiedProfileCache: Fetching fresh profile for pubkey:", pubkey); + + // Use fetchEventWithFallback to search ALL available relays + const profileEvent = await fetchEventWithFallback(ndk, { + kinds: [0], + authors: [pubkey], + }); + + if (!profileEvent || !profileEvent.content) { + console.warn("UnifiedProfileCache: No profile event found for:", pubkey); + return fallback; + } + + const profile = JSON.parse(profileEvent.content); + const metadata: NostrProfile = { + name: profile?.name || fallback.name, + displayName: profile?.displayName || profile?.display_name, + nip05: profile?.nip05, + picture: profile?.picture || profile?.image, + about: profile?.about, + banner: profile?.banner, + website: profile?.website, + lud16: profile?.lud16, + }; + + // Cache the fresh data + const entry: CacheEntry = { + profile: metadata, + timestamp: Date.now(), + pubkey: pubkey, + relaySource: profileEvent.relay?.url, + }; + + this.cache.set(identifier, entry); + this.cache.set(pubkey, entry); // Also cache by pubkey for convenience + this.saveToStorage(); + + console.log("UnifiedProfileCache: Cached fresh profile:", metadata); + return metadata; + + } catch (e) { + console.error("UnifiedProfileCache: Error fetching profile:", e); + return fallback; + } + } + + /** + * Get cached profile without fetching (synchronous) + */ + getCached(identifier: string): NpubMetadata | undefined { + const cleanId = identifier.replace(/^nostr:/, ""); + const entry = this.cache.get(cleanId); + + if (entry) { + const now = Date.now(); + if ((now - entry.timestamp) < this.maxAge) { + return entry.profile; + } else { + // Remove expired entry + this.cache.delete(cleanId); + } + } + + return undefined; + } + + /** + * Set profile data in cache + */ + set(identifier: string, profile: NpubMetadata, pubkey?: string, relaySource?: string): void { + const cleanId = identifier.replace(/^nostr:/, ""); + const entry: CacheEntry = { + profile, + timestamp: Date.now(), + pubkey: pubkey || cleanId, + relaySource, + }; + + this.cache.set(cleanId, entry); + if (pubkey && pubkey !== cleanId) { + this.cache.set(pubkey, entry); + } + this.saveToStorage(); + } + + /** + * Check if profile is cached and valid + */ + has(identifier: string): boolean { + const cleanId = identifier.replace(/^nostr:/, ""); + const entry = this.cache.get(cleanId); + + if (entry) { + const now = Date.now(); + if ((now - entry.timestamp) < this.maxAge) { + return true; + } else { + // Remove expired entry + this.cache.delete(cleanId); + } + } + return false; } - deleteMany(keys: string[]): number { - let deleted = 0; - for (const key of keys) { - if (this.delete(key)) { - deleted++; + /** + * Remove profile from cache + */ + delete(identifier: string): boolean { + const cleanId = identifier.replace(/^nostr:/, ""); + const entry = this.cache.get(cleanId); + + if (entry) { + this.cache.delete(cleanId); + if (entry.pubkey && entry.pubkey !== cleanId) { + this.cache.delete(entry.pubkey); } + this.saveToStorage(); + return true; } - return deleted; + + return false; } + /** + * Clear all cached profiles + */ clear(): void { - this.cache = {}; + this.cache.clear(); + this.saveToStorage(); } + /** + * Get cache size + */ size(): number { - return Object.keys(this.cache).length; + return this.cache.size; } + /** + * Get all cached profiles + */ getAll(): Record { - return { ...this.cache }; + const result: Record = {}; + for (const [key, entry] of this.cache.entries()) { + result[key] = entry.profile; + } + return result; + } + + /** + * Clean up expired entries + */ + cleanup(): void { + const now = Date.now(); + const expiredKeys: string[] = []; + + for (const [key, entry] of this.cache.entries()) { + if ((now - entry.timestamp) >= this.maxAge) { + expiredKeys.push(key); + } + } + + expiredKeys.forEach(key => this.cache.delete(key)); + + if (expiredKeys.length > 0) { + this.saveToStorage(); + console.log(`UnifiedProfileCache: Cleaned up ${expiredKeys.length} expired entries`); + } } } -export const npubCache = new NpubCache(); +// Export the unified cache instance +export const unifiedProfileCache = new UnifiedProfileCache(); + +// Clean up expired entries every 30 minutes +if (typeof window !== "undefined") { + setInterval(() => { + unifiedProfileCache.cleanup(); + }, 30 * 60 * 1000); +} + +// Legacy compatibility - keep the old npubCache for backward compatibility +// but make it use the unified cache internally +export const npubCache = { + get: (key: string) => unifiedProfileCache.getCached(key), + set: (key: string, value: NpubMetadata) => unifiedProfileCache.set(key, value), + has: (key: string) => unifiedProfileCache.has(key), + delete: (key: string) => unifiedProfileCache.delete(key), + clear: () => unifiedProfileCache.clear(), + size: () => unifiedProfileCache.size(), + getAll: () => unifiedProfileCache.getAll(), +}; + +// Legacy compatibility for old profileCache functions +export async function getDisplayName(pubkey: string, ndk: NDK): Promise { + const profile = await unifiedProfileCache.getProfile(pubkey, ndk); + return profile.displayName || profile.name || `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`; +} + +export function getDisplayNameSync(pubkey: string): string { + const profile = unifiedProfileCache.getCached(pubkey); + return profile?.displayName || profile?.name || `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`; +} + +export async function batchFetchProfiles( + pubkeys: string[], + ndk: NDK, + onProgress?: (fetched: number, total: number) => void, +): Promise { + const allProfileEvents: NDKEvent[] = []; + + if (onProgress) onProgress(0, pubkeys.length); + + // Fetch profiles in parallel using the unified cache + const fetchPromises = pubkeys.map(async (pubkey, index) => { + try { + const profile = await unifiedProfileCache.getProfile(pubkey, ndk); + if (onProgress) onProgress(index + 1, pubkeys.length); + + // Create a mock NDKEvent for compatibility + const event = new NDKEvent(ndk); + event.content = JSON.stringify(profile); + event.pubkey = pubkey; + return event; + } catch (e) { + console.error(`Failed to fetch profile for ${pubkey}:`, e); + return null; + } + }); + + const results = await Promise.allSettled(fetchPromises); + results.forEach(result => { + if (result.status === 'fulfilled' && result.value) { + allProfileEvents.push(result.value); + } + }); + + return allProfileEvents; +} + +export function extractPubkeysFromEvents(events: NDKEvent[]): Set { + const pubkeys = new Set(); + + events.forEach((event) => { + // Add author pubkey + if (event.pubkey) { + pubkeys.add(event.pubkey); + } + + // Add pubkeys from p tags + const pTags = event.getMatchingTags("p"); + pTags.forEach((tag) => { + if (tag[1]) { + pubkeys.add(tag[1]); + } + }); + + // Extract pubkeys from content (nostr:npub1... format) + const npubPattern = /nostr:npub1[a-z0-9]{58}/g; + const matches = event.content?.match(npubPattern) || []; + matches.forEach((match) => { + try { + const npub = match.replace("nostr:", ""); + const decoded = nip19.decode(npub); + if (decoded.type === "npub") { + pubkeys.add(decoded.data as string); + } + } catch (e) { + // Invalid npub, ignore + } + }); + }); + + return pubkeys; +} + +export function clearProfileCache(): void { + unifiedProfileCache.clear(); +} + +export function replacePubkeysWithDisplayNames(text: string): string { + // Match hex pubkeys (64 characters) + const pubkeyRegex = /\b[0-9a-fA-F]{64}\b/g; + + return text.replace(pubkeyRegex, (match) => { + return getDisplayNameSync(match); + }); +} diff --git a/src/lib/utils/profileCache.ts b/src/lib/utils/profileCache.ts deleted file mode 100644 index 2a93a45..0000000 --- a/src/lib/utils/profileCache.ts +++ /dev/null @@ -1,252 +0,0 @@ -import type { NDKEvent } from "@nostr-dev-kit/ndk"; -import { ndkInstance } from "$lib/ndk"; -import { get } from "svelte/store"; -import { nip19 } from "nostr-tools"; - -interface ProfileData { - display_name?: string; - name?: string; - picture?: string; - about?: string; -} - -// Cache for user profiles -const profileCache = new Map(); - -/** - * Fetches profile data for a pubkey - * @param pubkey - The public key to fetch profile for - * @returns Profile data or null if not found - */ -async function fetchProfile(pubkey: string): Promise { - try { - const ndk = get(ndkInstance); - const profileEvents = await ndk.fetchEvents({ - kinds: [0], - authors: [pubkey], - limit: 1 - }); - - if (profileEvents.size === 0) { - return null; - } - - // Get the most recent profile event - const profileEvent = Array.from(profileEvents)[0]; - - try { - const content = JSON.parse(profileEvent.content); - return content as ProfileData; - } catch (e) { - console.error("Failed to parse profile content:", e); - return null; - } - } catch (e) { - console.error("Failed to fetch profile:", e); - return null; - } -} - -/** - * Gets the display name for a pubkey, using cache - * @param pubkey - The public key to get display name for - * @returns Display name, name, or shortened pubkey - */ -export async function getDisplayName(pubkey: string): Promise { - // Check cache first - if (profileCache.has(pubkey)) { - const profile = profileCache.get(pubkey)!; - return profile.display_name || profile.name || shortenPubkey(pubkey); - } - - // Fetch profile - const profile = await fetchProfile(pubkey); - if (profile) { - profileCache.set(pubkey, profile); - return profile.display_name || profile.name || shortenPubkey(pubkey); - } - - // Fallback to shortened pubkey - return shortenPubkey(pubkey); -} - -/** - * Batch fetches profiles for multiple pubkeys - * @param pubkeys - Array of public keys to fetch profiles for - * @param onProgress - Optional callback for progress updates - * @returns Array of profile events - */ -export async function batchFetchProfiles( - pubkeys: string[], - onProgress?: (fetched: number, total: number) => void -): Promise { - const allProfileEvents: NDKEvent[] = []; - - // Filter out already cached pubkeys - const uncachedPubkeys = pubkeys.filter(pk => !profileCache.has(pk)); - - if (uncachedPubkeys.length === 0) { - if (onProgress) onProgress(pubkeys.length, pubkeys.length); - return allProfileEvents; - } - - try { - const ndk = get(ndkInstance); - - // Report initial progress - const cachedCount = pubkeys.length - uncachedPubkeys.length; - if (onProgress) onProgress(cachedCount, pubkeys.length); - - // Batch fetch in chunks to avoid overwhelming relays - const CHUNK_SIZE = 50; - let fetchedCount = cachedCount; - - for (let i = 0; i < uncachedPubkeys.length; i += CHUNK_SIZE) { - const chunk = uncachedPubkeys.slice(i, Math.min(i + CHUNK_SIZE, uncachedPubkeys.length)); - - const profileEvents = await ndk.fetchEvents({ - kinds: [0], - authors: chunk - }); - - // Process each profile event - profileEvents.forEach((event: NDKEvent) => { - try { - const content = JSON.parse(event.content); - profileCache.set(event.pubkey, content as ProfileData); - allProfileEvents.push(event); - fetchedCount++; - } catch (e) { - console.error("Failed to parse profile content:", e); - } - }); - - // Update progress - if (onProgress) { - onProgress(fetchedCount, pubkeys.length); - } - } - - // Final progress update - if (onProgress) onProgress(pubkeys.length, pubkeys.length); - } catch (e) { - console.error("Failed to batch fetch profiles:", e); - } - - return allProfileEvents; -} - -/** - * Gets display name synchronously from cache - * @param pubkey - The public key to get display name for - * @returns Display name, name, or shortened pubkey - */ -export function getDisplayNameSync(pubkey: string): string { - if (profileCache.has(pubkey)) { - const profile = profileCache.get(pubkey)!; - return profile.display_name || profile.name || shortenPubkey(pubkey); - } - return shortenPubkey(pubkey); -} - -/** - * Shortens a pubkey for display - * @param pubkey - The public key to shorten - * @returns Shortened pubkey (first 8 chars...last 4 chars) - */ -function shortenPubkey(pubkey: string): string { - if (pubkey.length <= 12) return pubkey; - return `${pubkey.slice(0, 8)}...${pubkey.slice(-4)}`; -} - -/** - * Clears the profile cache - */ -export function clearProfileCache(): void { - profileCache.clear(); -} - -/** - * Extracts all pubkeys from events (authors and p tags) - * @param events - Array of events to extract pubkeys from - * @returns Set of unique pubkeys - */ -export function extractPubkeysFromEvents(events: NDKEvent[]): Set { - const pubkeys = new Set(); - - events.forEach(event => { - // Add author pubkey - if (event.pubkey) { - pubkeys.add(event.pubkey); - } - - // Add pubkeys from p tags - const pTags = event.getMatchingTags("p"); - pTags.forEach(tag => { - if (tag[1]) { - pubkeys.add(tag[1]); - } - }); - - // Extract pubkeys from content (nostr:npub1... format) - const npubPattern = /nostr:npub1[a-z0-9]{58}/g; - const matches = event.content?.match(npubPattern) || []; - matches.forEach(match => { - try { - const npub = match.replace('nostr:', ''); - const decoded = nip19.decode(npub); - if (decoded.type === 'npub') { - pubkeys.add(decoded.data as string); - } - } catch (e) { - // Invalid npub, ignore - } - }); - }); - - return pubkeys; -} - -/** - * Replaces pubkeys in content with display names - * @param content - The content to process - * @returns Content with pubkeys replaced by display names - */ -export function replaceContentPubkeys(content: string): string { - if (!content) return content; - - // Replace nostr:npub1... references - const npubPattern = /nostr:npub[a-z0-9]{58}/g; - let result = content; - - const matches = content.match(npubPattern) || []; - matches.forEach(match => { - try { - const npub = match.replace('nostr:', ''); - const decoded = nip19.decode(npub); - if (decoded.type === 'npub') { - const pubkey = decoded.data as string; - const displayName = getDisplayNameSync(pubkey); - result = result.replace(match, `@${displayName}`); - } - } catch (e) { - // Invalid npub, leave as is - } - }); - - return result; -} - -/** - * Replaces pubkey references in text with display names - * @param text - Text that may contain pubkey references - * @returns Text with pubkeys replaced by display names - */ -export function replacePubkeysWithDisplayNames(text: string): string { - // Match hex pubkeys (64 characters) - const pubkeyRegex = /\b[0-9a-fA-F]{64}\b/g; - - return text.replace(pubkeyRegex, (match) => { - return getDisplayNameSync(match); - }); -} \ No newline at end of file diff --git a/src/lib/utils/profile_search.ts b/src/lib/utils/profile_search.ts index eeac332..550f87d 100644 --- a/src/lib/utils/profile_search.ts +++ b/src/lib/utils/profile_search.ts @@ -1,22 +1,24 @@ -import { ndkInstance } from "../ndk.ts"; -import { getUserMetadata, getNpubFromNip05 } from "./nostrUtils.ts"; -import NDK, { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; +import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; +import { getNpubFromNip05, getUserMetadata, fetchEventWithFallback } from "./nostrUtils.ts"; +import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk"; import { searchCache } from "./searchCache.ts"; -import { communityRelays, secondaryRelays } from "../consts.ts"; +import { communityRelays, searchRelays, secondaryRelays, anonymousRelays } from "../consts.ts"; import { get } from "svelte/store"; import type { NostrProfile, ProfileSearchResult } from "./search_types.ts"; import { + createProfileFromEvent, fieldMatches, nip05Matches, normalizeSearchTerm, - createProfileFromEvent, } from "./search_utils.ts"; +import { nip19 } from "nostr-tools"; /** * Search for profiles by various criteria (display name, name, NIP-05, npub) */ export async function searchProfiles( searchTerm: string, + ndk: NDK, ): Promise { const normalizedSearchTerm = normalizeSearchTerm(searchTerm); @@ -46,7 +48,6 @@ export async function searchProfiles( return { profiles, Status: {} }; } - const ndk = get(ndkInstance); if (!ndk) { console.error("NDK not initialized"); throw new Error("NDK not initialized"); @@ -63,7 +64,7 @@ export async function searchProfiles( normalizedSearchTerm.startsWith("nprofile") ) { try { - const metadata = await getUserMetadata(normalizedSearchTerm); + const metadata = await getUserMetadata(normalizedSearchTerm, ndk); if (metadata) { foundProfiles = [metadata]; } @@ -76,10 +77,30 @@ export async function searchProfiles( try { const npub = await getNpubFromNip05(normalizedNip05); if (npub) { - const metadata = await getUserMetadata(npub); - const profile: NostrProfile = { + const metadata = await getUserMetadata(npub, ndk); + + // AI-NOTE: 2025-01-24 - Fetch the original event timestamp to preserve created_at + let created_at: number | undefined = undefined; + try { + const decoded = nip19.decode(npub); + if (decoded.type === "npub") { + const pubkey = decoded.data as string; + const originalEvent = await fetchEventWithFallback(ndk, { + kinds: [0], + authors: [pubkey], + }); + if (originalEvent && originalEvent.created_at) { + created_at = originalEvent.created_at; + } + } + } catch (e) { + console.warn("profile_search: Failed to fetch original event timestamp:", e); + } + + const profile: NostrProfile & { created_at?: number } = { ...metadata, pubkey: npub, + created_at: created_at, }; foundProfiles = [profile]; } @@ -89,7 +110,7 @@ export async function searchProfiles( } else { // Try NIP-05 search first (faster than relay search) console.log("Starting NIP-05 search for:", normalizedSearchTerm); - foundProfiles = await searchNip05Domains(normalizedSearchTerm); + foundProfiles = await searchNip05Domains(normalizedSearchTerm, ndk); console.log( "NIP-05 search completed, found:", foundProfiles.length, @@ -142,6 +163,7 @@ export async function searchProfiles( */ async function searchNip05Domains( searchTerm: string, + ndk: NDK, ): Promise { const foundProfiles: NostrProfile[] = []; @@ -184,10 +206,30 @@ async function searchNip05Domains( "NIP-05 search: SUCCESS! found npub for gitcitadel.com:", npub, ); - const metadata = await getUserMetadata(npub); - const profile: NostrProfile = { + const metadata = await getUserMetadata(npub, ndk); + + // AI-NOTE: 2025-01-24 - Fetch the original event timestamp to preserve created_at + let created_at: number | undefined = undefined; + try { + const decoded = nip19.decode(npub); + if (decoded.type === "npub") { + const pubkey = decoded.data as string; + const originalEvent = await fetchEventWithFallback(ndk, { + kinds: [0], + authors: [pubkey], + }); + if (originalEvent && originalEvent.created_at) { + created_at = originalEvent.created_at; + } + } + } catch (e) { + console.warn("profile_search: Failed to fetch original event timestamp:", e); + } + + const profile: NostrProfile & { created_at?: number } = { ...metadata, pubkey: npub, + created_at: created_at, }; console.log( "NIP-05 search: created profile for gitcitadel.com:", @@ -216,10 +258,30 @@ async function searchNip05Domains( const npub = await getNpubFromNip05(nip05Address); if (npub) { console.log("NIP-05 search: found npub for", nip05Address, ":", npub); - const metadata = await getUserMetadata(npub); - const profile: NostrProfile = { + const metadata = await getUserMetadata(npub, ndk); + + // AI-NOTE: 2025-01-24 - Fetch the original event timestamp to preserve created_at + let created_at: number | undefined = undefined; + try { + const decoded = nip19.decode(npub); + if (decoded.type === "npub") { + const pubkey = decoded.data as string; + const originalEvent = await fetchEventWithFallback(ndk, { + kinds: [0], + authors: [pubkey], + }); + if (originalEvent && originalEvent.created_at) { + created_at = originalEvent.created_at; + } + } + } catch (e) { + console.warn("profile_search: Failed to fetch original event timestamp:", e); + } + + const profile: NostrProfile & { created_at?: number } = { ...metadata, pubkey: npub, + created_at: created_at, }; console.log( "NIP-05 search: created profile for", @@ -252,7 +314,7 @@ async function searchNip05Domains( } /** - * Quick relay search with short timeout + * Search for profiles across all available relays */ async function quickRelaySearch( searchTerm: string, @@ -264,12 +326,32 @@ async function quickRelaySearch( const normalizedSearchTerm = normalizeSearchTerm(searchTerm); console.log("Normalized search term for relay search:", normalizedSearchTerm); - // Use all profile relays for better coverage - const quickRelayUrls = [...communityRelays, ...secondaryRelays]; // Use all available relays - console.log("Using all relays for search:", quickRelayUrls); + // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive profile discovery + // This ensures we don't miss profiles due to stale cache or limited relay coverage + + // Get all available relays from NDK pool (most comprehensive) + const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) => r.url) as string[]; + const userInboxRelays = get(activeInboxRelays); + const userOutboxRelays = get(activeOutboxRelays); + + // Combine ALL available relays for maximum coverage + const allRelayUrls = [ + ...poolRelays, // All NDK pool relays + ...userInboxRelays, // User's personal inbox relays + ...userOutboxRelays, // User's personal outbox relays + ...searchRelays, // Dedicated profile search relays + ...communityRelays, // Community relays + ...secondaryRelays, // Secondary relays as fallback + ...anonymousRelays, // Anonymous relays as additional fallback + ]; + + // Deduplicate relay URLs + const uniqueRelayUrls = [...new Set(allRelayUrls)]; + console.log("Using ALL available relays for profile search:", uniqueRelayUrls); + console.log("Total relays for profile search:", uniqueRelayUrls.length); // Create relay sets for parallel search - const relaySets = quickRelayUrls + const relaySets = uniqueRelayUrls .map((url) => { try { return NDKRelaySet.fromRelayUrls([url], ndk); @@ -280,6 +362,8 @@ async function quickRelaySearch( }) .filter(Boolean); + console.log("Created relay sets for profile search:", relaySets.length); + // Search all relays in parallel with short timeout const searchPromises = relaySets.map((relaySet, index) => { if (!relaySet) return []; @@ -289,7 +373,7 @@ async function quickRelaySearch( let eventCount = 0; console.log( - `Starting search on relay ${index + 1}: ${quickRelayUrls[index]}`, + `Starting search on relay ${index + 1}: ${uniqueRelayUrls[index]}`, ); const sub = ndk.subscribe( @@ -303,8 +387,8 @@ async function quickRelaySearch( try { if (!event.content) return; const profileData = JSON.parse(event.content); - const displayName = - profileData.displayName || profileData.display_name || ""; + const displayName = profileData.displayName || + profileData.display_name || ""; const display_name = profileData.display_name || ""; const name = profileData.name || ""; const nip05 = profileData.nip05 || ""; @@ -336,6 +420,7 @@ async function quickRelaySearch( nip05: profileData.nip05, pubkey: event.pubkey, searchTerm: normalizedSearchTerm, + relay: uniqueRelayUrls[index], }); const profile = createProfileFromEvent(event, profileData); @@ -354,7 +439,9 @@ async function quickRelaySearch( sub.on("eose", () => { console.log( - `Relay ${index + 1} (${quickRelayUrls[index]}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`, + `Relay ${index + 1} (${ + uniqueRelayUrls[index] + }) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`, ); resolve(foundInRelay); }); @@ -362,7 +449,9 @@ async function quickRelaySearch( // Short timeout for quick search setTimeout(() => { console.log( - `Relay ${index + 1} (${quickRelayUrls[index]}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`, + `Relay ${index + 1} (${ + uniqueRelayUrls[index] + }) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`, ); sub.stop(); resolve(foundInRelay); diff --git a/src/lib/utils/relayDiagnostics.ts b/src/lib/utils/relayDiagnostics.ts index 6be37c4..2e650e3 100644 --- a/src/lib/utils/relayDiagnostics.ts +++ b/src/lib/utils/relayDiagnostics.ts @@ -42,9 +42,8 @@ export async function testRelay(url: string): Promise { responseTime: Date.now() - startTime, }); } - } + }; }); - } /** @@ -93,7 +92,9 @@ export function logRelayDiagnostics(diagnostics: RelayDiagnostic[]): void { console.log(`✅ Working relays (${working.length}):`); working.forEach((d) => { console.log( - ` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${d.responseTime ? ` (${d.responseTime}ms)` : ""}`, + ` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${ + d.responseTime ? ` (${d.responseTime}ms)` : "" + }`, ); }); diff --git a/src/lib/utils/relay_info_service.ts b/src/lib/utils/relay_info_service.ts new file mode 100644 index 0000000..2b83c71 --- /dev/null +++ b/src/lib/utils/relay_info_service.ts @@ -0,0 +1,184 @@ +/** + * Simplifies a URL by removing protocol and common prefixes + * @param url The URL to simplify + * @returns Simplified URL string + */ +function simplifyUrl(url: string): string { + try { + const urlObj = new URL(url); + return urlObj.hostname + (urlObj.port ? `:${urlObj.port}` : ""); + } catch { + // If URL parsing fails, return the original string + return url; + } +} + +export interface RelayInfo { + name?: string; + description?: string; + icon?: string; + pubkey?: string; + contact?: string; + supported_nips?: number[]; + software?: string; + version?: string; + tags?: string[]; + payments_url?: string; + limitation?: { + auth_required?: boolean; + payment_required?: boolean; + }; +} + +export interface RelayInfoWithMetadata extends RelayInfo { + url: string; + shortUrl: string; + hasNip11: boolean; + triedNip11: boolean; +} + +/** + * Fetches relay information using NIP-11 + * @param url The relay URL to fetch info for + * @returns Promise resolving to relay info or undefined if failed + */ +export async function fetchRelayInfo( + url: string, +): Promise { + try { + // Convert WebSocket URL to HTTP URL for NIP-11 + const httpUrl = url.replace("ws://", "http://").replace( + "wss://", + "https://", + ); + + const response = await fetch(httpUrl, { + headers: { + "Accept": "application/nostr+json", + "User-Agent": "Alexandria/1.0", + }, + // Add timeout to prevent hanging + signal: AbortSignal.timeout(5000), + }); + + if (!response.ok) { + console.warn(`[RelayInfo] HTTP ${response.status} for ${url}`); + return { + url, + shortUrl: simplifyUrl(url), + hasNip11: false, + triedNip11: true, + }; + } + + const relayInfo = await response.json() as RelayInfo; + + return { + ...relayInfo, + url, + shortUrl: simplifyUrl(url), + hasNip11: Object.keys(relayInfo).length > 0, + triedNip11: true, + }; + } catch (error) { + console.warn(`[RelayInfo] Failed to fetch info for ${url}:`, error); + return { + url, + shortUrl: simplifyUrl(url), + hasNip11: false, + triedNip11: true, + }; + } +} + +/** + * Fetches relay information for multiple relays in parallel + * @param urls Array of relay URLs to fetch info for + * @returns Promise resolving to array of relay info objects + */ +export async function fetchRelayInfos( + urls: string[], +): Promise { + if (urls.length === 0) { + return []; + } + + const promises = urls.map((url) => fetchRelayInfo(url)); + const results = await Promise.allSettled(promises); + + return results + .map((result) => result.status === "fulfilled" ? result.value : undefined) + .filter((info): info is RelayInfoWithMetadata => info !== undefined); +} + +/** + * Gets relay type label based on relay URL and info + * @param relayUrl The relay URL + * @param relayInfo Optional relay info + * @returns String describing the relay type + */ +export function getRelayTypeLabel( + relayUrl: string, + relayInfo?: RelayInfoWithMetadata, +): string { + // Check if it's a local relay + if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) { + return "Local"; + } + + // Check if it's a community relay + if ( + relayUrl.includes("nostr.band") || relayUrl.includes("noswhere.com") || + relayUrl.includes("damus.io") || relayUrl.includes("nostr.wine") + ) { + return "Community"; + } + + // Check if it's a user's relay (likely inbox/outbox) + if ( + relayUrl.includes("relay.nsec.app") || + relayUrl.includes("relay.snort.social") + ) { + return "User"; + } + + // Use relay name if available + if (relayInfo?.name) { + return relayInfo.name; + } + + // Fallback to domain + try { + const domain = new URL(relayUrl).hostname; + return domain.replace("www.", ""); + } catch { + return "Unknown"; + } +} + +/** + * Gets relay icon URL or fallback + * @param relayInfo Relay info object + * @param relayUrl Relay URL as fallback + * @returns Icon URL or undefined + */ +export function getRelayIcon( + relayInfo?: RelayInfoWithMetadata, + relayUrl?: string, +): string | undefined { + if (relayInfo?.icon) { + return relayInfo.icon; + } + + // Generate favicon URL from relay URL + if (relayUrl) { + try { + const url = new URL(relayUrl); + return `${url.protocol}//${url.hostname}/favicon.ico`; + } catch { + // Invalid URL, return undefined + } + } + + return undefined; +} diff --git a/src/lib/utils/relay_management.ts b/src/lib/utils/relay_management.ts index a4f41fa..32c21b8 100644 --- a/src/lib/utils/relay_management.ts +++ b/src/lib/utils/relay_management.ts @@ -1,5 +1,11 @@ import NDK, { NDKKind, NDKRelay, NDKUser } from "@nostr-dev-kit/ndk"; -import { searchRelays, secondaryRelays, anonymousRelays, lowbandwidthRelays, localRelays } from "../consts.ts"; +import { + anonymousRelays, + localRelays, + lowbandwidthRelays, + searchRelays, + secondaryRelays, +} from "../consts.ts"; import { getRelaySetForNetworkCondition } from "./network_detection.ts"; import { networkCondition } from "../stores/networkStore.ts"; import { get } from "svelte/store"; @@ -11,15 +17,15 @@ import { get } from "svelte/store"; */ export function normalizeRelayUrl(url: string): string { let normalized = url.toLowerCase().trim(); - + // Ensure protocol is present - if (!normalized.startsWith('ws://') && !normalized.startsWith('wss://')) { - normalized = 'wss://' + normalized; + if (!normalized.startsWith("ws://") && !normalized.startsWith("wss://")) { + normalized = "wss://" + normalized; } - + // Remove trailing slash - normalized = normalized.replace(/\/$/, ''); - + normalized = normalized.replace(/\/$/, ""); + return normalized; } @@ -43,12 +49,12 @@ export function deduplicateRelayUrls(urls: string[]): string[] { } /** - * Tests connection to a relay and returns connection status - * @param relayUrl The relay URL to test + * Tests connection to a local relay (ws:// protocol) + * @param relayUrl The local relay URL to test (should be ws://) * @param ndk The NDK instance * @returns Promise that resolves to connection status */ -export function testRelayConnection( +export function testLocalRelayConnection( relayUrl: string, ndk: NDK, ): Promise<{ @@ -57,9 +63,158 @@ export function testRelayConnection( error?: string; actualUrl?: string; }> { + // Only test connections on client-side + if (typeof window === "undefined") { + return Promise.resolve({ + connected: false, + requiresAuth: false, + error: "Server-side rendering - connection test skipped", + actualUrl: relayUrl, + }); + } + return new Promise((resolve) => { - // Ensure the URL is using wss:// protocol - const secureUrl = ensureSecureWebSocket(relayUrl); + try { + // Ensure the URL is using ws:// protocol for local relays + const localUrl = relayUrl.replace(/^wss:\/\//, "ws://"); + + // Use the existing NDK instance instead of creating a new one + const relay = new NDKRelay(localUrl, undefined, ndk); + let authRequired = false; + let connected = false; + let error: string | undefined; + let actualUrl: string | undefined; + + const timeout = setTimeout(() => { + try { + relay.disconnect(); + } catch { + // Silently ignore disconnect errors + } + resolve({ + connected: false, + requiresAuth: authRequired, + error: "Connection timeout", + actualUrl, + }); + }, 3000); + + // Wrap all event handlers in try-catch to prevent errors from bubbling up + relay.on("connect", () => { + try { + connected = true; + actualUrl = localUrl; + clearTimeout(timeout); + relay.disconnect(); + resolve({ + connected: true, + requiresAuth: authRequired, + error, + actualUrl, + }); + } catch { + // Silently handle any errors in connect handler + clearTimeout(timeout); + resolve({ + connected: false, + requiresAuth: false, + error: "Connection handler error", + actualUrl: localUrl, + }); + } + }); + + relay.on("notice", (message: string) => { + try { + if (message.includes("auth-required")) { + authRequired = true; + } + } catch { + // Silently ignore notice handler errors + } + }); + + relay.on("disconnect", () => { + try { + if (!connected) { + error = "Connection failed"; + clearTimeout(timeout); + resolve({ + connected: false, + requiresAuth: authRequired, + error, + actualUrl, + }); + } + } catch { + // Silently handle any errors in disconnect handler + clearTimeout(timeout); + resolve({ + connected: false, + requiresAuth: false, + error: "Disconnect handler error", + actualUrl: localUrl, + }); + } + }); + + // Wrap the connect call in try-catch + try { + relay.connect(); + } catch (connectError) { + // Silently handle connection errors + clearTimeout(timeout); + resolve({ + connected: false, + requiresAuth: false, + error: "Connection failed", + actualUrl: localUrl, + }); + } + } catch (outerError) { + // Catch any other errors that might occur during setup + resolve({ + connected: false, + requiresAuth: false, + error: "Setup failed", + actualUrl: relayUrl, + }); + } + }); +} + +/** + * Tests connection to a remote relay (wss:// protocol) + * @param relayUrl The remote relay URL to test + * @param ndk The NDK instance + * @returns Promise that resolves to connection status + */ +export function testRemoteRelayConnection( + relayUrl: string, + ndk: NDK, +): Promise<{ + connected: boolean; + requiresAuth: boolean; + error?: string; + actualUrl?: string; +}> { + // Only test connections on client-side + if (typeof window === "undefined") { + return Promise.resolve({ + connected: false, + requiresAuth: false, + error: "Server-side rendering - connection test skipped", + actualUrl: relayUrl, + }); + } + + return new Promise((resolve) => { + // Ensure the URL is using wss:// protocol for remote relays + const secureUrl = relayUrl.replace(/^ws:\/\//, "wss://"); + + console.debug( + `[relay_management.ts] Testing remote relay connection: ${secureUrl}`, + ); // Use the existing NDK instance instead of creating a new one const relay = new NDKRelay(secureUrl, undefined, ndk); @@ -69,6 +224,9 @@ export function testRelayConnection( let actualUrl: string | undefined; const timeout = setTimeout(() => { + console.debug( + `[relay_management.ts] Relay ${secureUrl} connection timeout`, + ); relay.disconnect(); resolve({ connected: false, @@ -76,9 +234,12 @@ export function testRelayConnection( error: "Connection timeout", actualUrl, }); - }, 3000); // Increased timeout to 3 seconds to give relays more time + }, 3000); relay.on("connect", () => { + console.debug( + `[relay_management.ts] Relay ${secureUrl} connected successfully`, + ); connected = true; actualUrl = secureUrl; clearTimeout(timeout); @@ -99,6 +260,9 @@ export function testRelayConnection( relay.on("disconnect", () => { if (!connected) { + console.debug( + `[relay_management.ts] Relay ${secureUrl} disconnected without connecting`, + ); error = "Connection failed"; clearTimeout(timeout); resolve({ @@ -113,29 +277,28 @@ export function testRelayConnection( relay.connect(); }); } - + /** - * Ensures a relay URL uses secure WebSocket protocol for remote relays - * @param url The relay URL to secure - * @returns The URL with wss:// protocol (except for localhost) + * Tests connection to a relay and returns connection status + * @param relayUrl The relay URL to test + * @param ndk The NDK instance + * @returns Promise that resolves to connection status */ -function ensureSecureWebSocket(url: string): string { - // For localhost, always use ws:// (never wss://) - if (url.includes('localhost') || url.includes('127.0.0.1')) { - // Convert any wss://localhost to ws://localhost - return url.replace(/^wss:\/\//, "ws://"); - } - - // Replace ws:// with wss:// for remote relays - const secureUrl = url.replace(/^ws:\/\//, "wss://"); - - if (secureUrl !== url) { - console.warn( - `[relay_management.ts] Protocol upgrade for rem ote relay: ${url} -> ${secureUrl}`, - ); +export function testRelayConnection( + relayUrl: string, + ndk: NDK, +): Promise<{ + connected: boolean; + requiresAuth: boolean; + error?: string; + actualUrl?: string; +}> { + // Determine if this is a local or remote relay + if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) { + return testLocalRelayConnection(relayUrl, ndk); + } else { + return testRemoteRelayConnection(relayUrl, ndk); } - - return secureUrl; } /** @@ -144,34 +307,46 @@ function ensureSecureWebSocket(url: string): string { * @param ndk NDK instance * @returns Promise that resolves to array of working local relay URLs */ -async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise { - const workingRelays: string[] = []; - - if (localRelayUrls.length === 0) { +async function testLocalRelays( + localRelayUrls: string[], + ndk: NDK, +): Promise { + try { + const workingRelays: string[] = []; + + if (localRelayUrls.length === 0) { + return workingRelays; + } + + // Test local relays quietly, without logging failures + await Promise.all( + localRelayUrls.map(async (url) => { + try { + const result = await testLocalRelayConnection(url, ndk); + if (result.connected) { + workingRelays.push(url); + console.debug( + `[relay_management.ts] Local relay connected: ${url}`, + ); + } + // Don't log failures - local relays are optional + } catch { + // Silently ignore local relay failures - they're optional + } + }), + ); + + if (workingRelays.length > 0) { + console.info( + `[relay_management.ts] Found ${workingRelays.length} working local relays`, + ); + } return workingRelays; + } catch { + // If anything goes wrong with the entire local relay testing process, + // just return an empty array silently + return []; } - - console.debug(`[relay_management.ts] Testing ${localRelayUrls.length} local relays...`); - - await Promise.all( - localRelayUrls.map(async (url) => { - try { - const result = await testRelayConnection(url, ndk); - if (result.connected) { - workingRelays.push(url); - console.debug(`[relay_management.ts] Local relay connected: ${url}`); - } else { - console.debug(`[relay_management.ts] Local relay failed: ${url} - ${result.error}`); - } - } catch { - // Silently ignore local relay failures - they're optional - console.debug(`[relay_management.ts] Local relay error (ignored): ${url}`); - } - }) - ); - - console.debug(`[relay_management.ts] Found ${workingRelays.length} working local relays`); - return workingRelays; } /** @@ -183,17 +358,17 @@ export async function discoverLocalRelays(ndk: NDK): Promise { try { // If no local relays are configured, return empty array if (localRelays.length === 0) { - console.debug('[relay_management.ts] No local relays configured'); + console.debug("[relay_management.ts] No local relays configured"); return []; } - + // Convert wss:// URLs from consts to ws:// for local testing - const localRelayUrls = localRelays.map((url: string) => - url.replace(/^wss:\/\//, 'ws://') + const localRelayUrls = localRelays.map((url: string) => + url.replace(/^wss:\/\//, "ws://") ); - + const workingRelays = await testLocalRelays(localRelayUrls, ndk); - + // If no local relays are working, return empty array // The network detection logic will provide fallback relays return workingRelays; @@ -209,7 +384,10 @@ export async function discoverLocalRelays(ndk: NDK): Promise { * @param user User to fetch local relays for * @returns Promise that resolves to array of local relay URLs */ -export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise { +export async function getUserLocalRelays( + ndk: NDK, + user: NDKUser, +): Promise { try { const localRelayEvent = await ndk.fetchEvent( { @@ -220,7 +398,7 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise { - if (tag[0] === 'r' && tag[1]) { + if (tag[0] === "r" && tag[1]) { localRelays.push(tag[1]); } }); return localRelays; } catch (error) { - console.info('[relay_management.ts] Error fetching user local relays:', error); + console.info( + "[relay_management.ts] Error fetching user local relays:", + error, + ); return []; } } @@ -247,7 +428,10 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise { +export async function getUserBlockedRelays( + ndk: NDK, + user: NDKUser, +): Promise { try { const blockedRelayEvent = await ndk.fetchEvent( { @@ -258,7 +442,7 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise { - if (tag[0] === 'r' && tag[1]) { + if (tag[0] === "r" && tag[1]) { blockedRelays.push(tag[1]); } }); return blockedRelays; } catch (error) { - console.info('[relay_management.ts] Error fetching user blocked relays:', error); + console.info( + "[relay_management.ts] Error fetching user blocked relays:", + error, + ); return []; } } @@ -285,9 +472,15 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise { +export async function getUserOutboxRelays( + ndk: NDK, + user: NDKUser, +): Promise { try { - console.debug('[relay_management.ts] Fetching outbox relays for user:', user.pubkey); + console.debug( + "[relay_management.ts] Fetching outbox relays for user:", + user.pubkey, + ); const relayList = await ndk.fetchEvent( { kinds: [10002], @@ -297,36 +490,47 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise { - console.debug('[relay_management.ts] Processing tag:', tag); - if (tag[0] === 'w' && tag[1]) { + console.debug("[relay_management.ts] Processing tag:", tag); + if (tag[0] === "w" && tag[1]) { outboxRelays.push(tag[1]); - console.debug('[relay_management.ts] Added outbox relay:', tag[1]); - } else if (tag[0] === 'r' && tag[1]) { + console.debug("[relay_management.ts] Added outbox relay:", tag[1]); + } else if (tag[0] === "r" && tag[1]) { // Some relay lists use 'r' for both inbox and outbox outboxRelays.push(tag[1]); - console.debug('[relay_management.ts] Added relay (r tag):', tag[1]); + console.debug("[relay_management.ts] Added relay (r tag):", tag[1]); } else { - console.debug('[relay_management.ts] Skipping tag:', tag[0], 'value:', tag[1]); + console.debug( + "[relay_management.ts] Skipping tag:", + tag[0], + "value:", + tag[1], + ); } }); - console.debug('[relay_management.ts] Final outbox relays:', outboxRelays); + console.debug("[relay_management.ts] Final outbox relays:", outboxRelays); return outboxRelays; } catch (error) { - console.info('[relay_management.ts] Error fetching user outbox relays:', error); + console.info( + "[relay_management.ts] Error fetching user outbox relays:", + error, + ); return []; } } @@ -338,45 +542,65 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise { try { // Check if we're in a browser environment with extension support - if (typeof window === 'undefined' || !globalThis.nostr) { - console.debug('[relay_management.ts] No globalThis.nostr available'); + if (typeof window === "undefined" || !globalThis.nostr) { + console.debug("[relay_management.ts] No globalThis.nostr available"); return []; } - console.debug('[relay_management.ts] Extension available, checking for getRelays()'); + console.debug( + "[relay_management.ts] Extension available, checking for getRelays()", + ); const extensionRelays: string[] = []; - + // Try to get relays from the extension's API // Different extensions may expose their relay config differently if (globalThis.nostr.getRelays) { - console.debug('[relay_management.ts] getRelays() method found, calling it...'); + console.debug( + "[relay_management.ts] getRelays() method found, calling it...", + ); try { const relays = await globalThis.nostr.getRelays(); - console.debug('[relay_management.ts] getRelays() returned:', relays); - if (relays && typeof relays === 'object') { + console.debug("[relay_management.ts] getRelays() returned:", relays); + if (relays && typeof relays === "object") { // Convert relay object to array of URLs const relayUrls = Object.keys(relays); extensionRelays.push(...relayUrls); - console.debug('[relay_management.ts] Got relays from extension:', relayUrls); + console.debug( + "[relay_management.ts] Got relays from extension:", + relayUrls, + ); } } catch (error) { - console.debug('[relay_management.ts] Extension getRelays() failed:', error); + console.debug( + "[relay_management.ts] Extension getRelays() failed:", + error, + ); } } else { - console.debug('[relay_management.ts] getRelays() method not found on globalThis.nostr'); + console.debug( + "[relay_management.ts] getRelays() method not found on globalThis.nostr", + ); } // If getRelays() didn't work, try alternative methods if (extensionRelays.length === 0) { // Some extensions might expose relays through other methods // This is a fallback for extensions that don't expose getRelays() - console.debug('[relay_management.ts] Extension does not expose relay configuration'); + console.debug( + "[relay_management.ts] Extension does not expose relay configuration", + ); } - console.debug('[relay_management.ts] Final extension relays:', extensionRelays); + console.debug( + "[relay_management.ts] Final extension relays:", + extensionRelays, + ); return extensionRelays; } catch (error) { - console.debug('[relay_management.ts] Error getting extension relays:', error); + console.debug( + "[relay_management.ts] Error getting extension relays:", + error, + ); return []; } } @@ -391,27 +615,59 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise { const workingRelays: string[] = []; const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them + console.debug( + `[relay_management.ts] Testing ${relayUrls.length} relays in batches of ${maxConcurrent}`, + ); + console.debug(`[relay_management.ts] Relay URLs to test:`, relayUrls); + for (let i = 0; i < relayUrls.length; i += maxConcurrent) { const batch = relayUrls.slice(i, i + maxConcurrent); - + console.debug( + `[relay_management.ts] Testing batch ${ + Math.floor(i / maxConcurrent) + 1 + }:`, + batch, + ); + const batchPromises = batch.map(async (url) => { try { + console.debug(`[relay_management.ts] Testing relay: ${url}`); const result = await testRelayConnection(url, ndk); + console.debug( + `[relay_management.ts] Relay ${url} test result:`, + result, + ); return result.connected ? url : null; } catch (error) { - console.debug(`[relay_management.ts] Failed to test relay ${url}:`, error); + console.debug( + `[relay_management.ts] Failed to test relay ${url}:`, + error, + ); return null; } }); const batchResults = await Promise.allSettled(batchPromises); const batchWorkingRelays = batchResults - .filter((result): result is PromiseFulfilledResult => result.status === 'fulfilled') - .map(result => result.value) + .filter((result): result is PromiseFulfilledResult => + result.status === "fulfilled" + ) + .map((result) => result.value) .filter((url): url is string => url !== null); + + console.debug( + `[relay_management.ts] Batch ${ + Math.floor(i / maxConcurrent) + 1 + } working relays:`, + batchWorkingRelays, + ); workingRelays.push(...batchWorkingRelays); } + console.debug( + `[relay_management.ts] Total working relays after testing:`, + workingRelays, + ); return workingRelays; } @@ -423,13 +679,19 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise { */ export async function buildCompleteRelaySet( ndk: NDK, - user: NDKUser | null + user: NDKUser | null, ): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> { - console.debug('[relay_management.ts] buildCompleteRelaySet: Starting with user:', user?.pubkey || 'null'); - + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Starting with user:", + user?.pubkey || "null", + ); + // Discover local relays first const discoveredLocalRelays = await discoverLocalRelays(ndk); - console.debug('[relay_management.ts] buildCompleteRelaySet: Discovered local relays:', discoveredLocalRelays); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Discovered local relays:", + discoveredLocalRelays, + ); // Get user-specific relays if available let userOutboxRelays: string[] = []; @@ -438,42 +700,75 @@ export async function buildCompleteRelaySet( let extensionRelays: string[] = []; if (user) { - console.debug('[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:', user.pubkey); - + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:", + user.pubkey, + ); + try { userOutboxRelays = await getUserOutboxRelays(ndk, user); - console.debug('[relay_management.ts] buildCompleteRelaySet: User outbox relays:', userOutboxRelays); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: User outbox relays:", + userOutboxRelays, + ); } catch (error) { - console.debug('[relay_management.ts] Error fetching user outbox relays:', error); + console.debug( + "[relay_management.ts] Error fetching user outbox relays:", + error, + ); } try { userLocalRelays = await getUserLocalRelays(ndk, user); - console.debug('[relay_management.ts] buildCompleteRelaySet: User local relays:', userLocalRelays); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: User local relays:", + userLocalRelays, + ); } catch (error) { - console.debug('[relay_management.ts] Error fetching user local relays:', error); + console.debug( + "[relay_management.ts] Error fetching user local relays:", + error, + ); } try { blockedRelays = await getUserBlockedRelays(ndk, user); - console.debug('[relay_management.ts] buildCompleteRelaySet: User blocked relays:', blockedRelays); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: User blocked relays:", + blockedRelays, + ); } catch { // Silently ignore blocked relay fetch errors } try { extensionRelays = await getExtensionRelays(); - console.debug('[relay_management.ts] Extension relays gathered:', extensionRelays); + console.debug( + "[relay_management.ts] Extension relays gathered:", + extensionRelays, + ); } catch (error) { - console.debug('[relay_management.ts] Error fetching extension relays:', error); + console.debug( + "[relay_management.ts] Error fetching extension relays:", + error, + ); } } else { - console.debug('[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays'); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays", + ); } // Build initial relay sets and deduplicate - const finalInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userLocalRelays]); - const finalOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userOutboxRelays, ...extensionRelays]); + const finalInboxRelays = deduplicateRelayUrls([ + ...discoveredLocalRelays, + ...userLocalRelays, + ]); + const finalOutboxRelays = deduplicateRelayUrls([ + ...discoveredLocalRelays, + ...userOutboxRelays, + ...extensionRelays, + ]); // Test relays and filter out non-working ones let testedInboxRelays: string[] = []; @@ -489,16 +784,29 @@ export async function buildCompleteRelaySet( // If no relays passed testing, use remote relays without testing if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) { - const remoteRelays = deduplicateRelayUrls([...secondaryRelays, ...searchRelays]); + const remoteRelays = deduplicateRelayUrls([ + ...secondaryRelays, + ...searchRelays, + ]); return { inboxRelays: remoteRelays, - outboxRelays: remoteRelays + outboxRelays: remoteRelays, }; } - // Use tested relays and deduplicate - const inboxRelays = testedInboxRelays.length > 0 ? deduplicateRelayUrls(testedInboxRelays) : deduplicateRelayUrls(secondaryRelays); - const outboxRelays = testedOutboxRelays.length > 0 ? deduplicateRelayUrls(testedOutboxRelays) : deduplicateRelayUrls(secondaryRelays); + // Always include some remote relays as fallback, even when local relays are working + const fallbackRelays = deduplicateRelayUrls([ + ...anonymousRelays, + ...secondaryRelays, + ]); + + // Use tested relays and add fallback relays + const inboxRelays = testedInboxRelays.length > 0 + ? deduplicateRelayUrls([...testedInboxRelays, ...fallbackRelays]) + : deduplicateRelayUrls(fallbackRelays); + const outboxRelays = testedOutboxRelays.length > 0 + ? deduplicateRelayUrls([...testedOutboxRelays, ...fallbackRelays]) + : deduplicateRelayUrls(fallbackRelays); // Apply network condition optimization const currentNetworkCondition = get(networkCondition); @@ -506,26 +814,51 @@ export async function buildCompleteRelaySet( currentNetworkCondition, discoveredLocalRelays, lowbandwidthRelays, - { inboxRelays, outboxRelays } + { inboxRelays, outboxRelays }, ); // Filter out blocked relays and deduplicate final sets const finalRelaySet = { - inboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.inboxRelays.filter((r: string) => !blockedRelays.includes(r))), - outboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.outboxRelays.filter((r: string) => !blockedRelays.includes(r))) + inboxRelays: deduplicateRelayUrls( + networkOptimizedRelaySet.inboxRelays.filter((r: string) => + !blockedRelays.includes(r) + ), + ), + outboxRelays: deduplicateRelayUrls( + networkOptimizedRelaySet.outboxRelays.filter((r: string) => + !blockedRelays.includes(r) + ), + ), }; - // If no relays are working, use anonymous relays as fallback - if (finalRelaySet.inboxRelays.length === 0 && finalRelaySet.outboxRelays.length === 0) { + // Ensure we always have at least some relays + if ( + finalRelaySet.inboxRelays.length === 0 && + finalRelaySet.outboxRelays.length === 0 + ) { + console.warn( + "[relay_management.ts] No relays available, using anonymous relays as final fallback", + ); return { inboxRelays: deduplicateRelayUrls(anonymousRelays), - outboxRelays: deduplicateRelayUrls(anonymousRelays) + outboxRelays: deduplicateRelayUrls(anonymousRelays), }; } - console.debug('[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:', finalRelaySet.inboxRelays.length, 'outbox:', finalRelaySet.outboxRelays.length); - console.debug('[relay_management.ts] buildCompleteRelaySet: Final inbox relays:', finalRelaySet.inboxRelays); - console.debug('[relay_management.ts] buildCompleteRelaySet: Final outbox relays:', finalRelaySet.outboxRelays); - + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:", + finalRelaySet.inboxRelays.length, + "outbox:", + finalRelaySet.outboxRelays.length, + ); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Final inbox relays:", + finalRelaySet.inboxRelays, + ); + console.debug( + "[relay_management.ts] buildCompleteRelaySet: Final outbox relays:", + finalRelaySet.outboxRelays, + ); + return finalRelaySet; -} \ No newline at end of file +} diff --git a/src/lib/utils/search_constants.ts b/src/lib/utils/search_constants.ts index 663e985..5226782 100644 --- a/src/lib/utils/search_constants.ts +++ b/src/lib/utils/search_constants.ts @@ -17,7 +17,7 @@ export const TIMEOUTS = { SUBSCRIPTION_SEARCH: 10000, /** Timeout for second-order search operations */ - SECOND_ORDER_SEARCH: 5000, + SECOND_ORDER_SEARCH: 3000, // AI-NOTE: 2025-01-24 - Reduced timeout since we limit scope /** Timeout for relay diagnostics */ RELAY_DIAGNOSTICS: 5000, @@ -27,6 +27,9 @@ export const TIMEOUTS = { /** Cache cleanup interval */ CACHE_CLEANUP: 60000, + + /** Timeout for relay search operations */ + RELAY_TIMEOUT: 1500, // 1.5 seconds for quick relay searches } as const; // Cache duration constants (in milliseconds) @@ -44,13 +47,22 @@ export const SEARCH_LIMITS = { SPECIFIC_PROFILE: 10, /** Limit for general profile searches */ - GENERAL_PROFILE: 500, + GENERAL_PROFILE: 100, // AI-NOTE: 2025-01-24 - Reduced from 500 to prevent wild searches + + /** Limit for general content searches (t-tag, d-tag, etc.) */ + GENERAL_CONTENT: 100, // AI-NOTE: 2025-01-24 - Added limit for all content searches /** Limit for community relay checks */ COMMUNITY_CHECK: 1, /** Limit for second-order search results */ SECOND_ORDER_RESULTS: 100, + + /** Maximum results for profile searches */ + MAX_PROFILE_RESULTS: 20, + + /** Batch size for profile fetching operations */ + BATCH_SIZE: 50, } as const; // Nostr event kind ranges diff --git a/src/lib/utils/search_result_formatter.ts b/src/lib/utils/search_result_formatter.ts new file mode 100644 index 0000000..2e946d7 --- /dev/null +++ b/src/lib/utils/search_result_formatter.ts @@ -0,0 +1,28 @@ +/** + * Utility class for formatting search result messages + * AI-NOTE: 2025-01-24 - Extracted from EventSearch component for better separation of concerns + */ +export class SearchResultFormatter { + /** + * Formats a result message based on search count and type + */ + formatResultMessage( + searchResultCount: number | null, + searchResultType: string | null, + ): string { + if (searchResultCount === 0) { + return "Search completed. No results found."; + } + + const typeLabel = searchResultType === "n" + ? "profile" + : searchResultType === "nip05" + ? "NIP-05 address" + : "event"; + const countLabel = searchResultType === "n" ? "profiles" : "events"; + + return searchResultCount === 1 + ? `Search completed. Found 1 ${typeLabel}.` + : `Search completed. Found ${searchResultCount} ${countLabel}.`; + } +} diff --git a/src/lib/utils/search_types.ts b/src/lib/utils/search_types.ts index c187c4e..46da61e 100644 --- a/src/lib/utils/search_types.ts +++ b/src/lib/utils/search_types.ts @@ -27,6 +27,9 @@ export interface NostrProfile { website?: string; lud16?: string; pubkey?: string; + isInUserLists?: boolean; + listKinds?: number[]; + created_at?: number; // AI-NOTE: 2025-01-24 - Timestamp for proper date display } /** @@ -61,6 +64,8 @@ export type SearchSubscriptionType = "d" | "t" | "n"; export interface SearchFilter { filter: Filter; subscriptionType: string; + searchTerm?: string; // AI-NOTE: 2025-01-24 - Optional search term for client-side filtering + preloadedEvents?: NDKEvent[]; // AI-NOTE: 2025-01-24 - Preloaded events for profile searches } /** diff --git a/src/lib/utils/search_utility.ts b/src/lib/utils/search_utility.ts index e91da1f..45d8a85 100644 --- a/src/lib/utils/search_utility.ts +++ b/src/lib/utils/search_utility.ts @@ -13,13 +13,13 @@ export { searchBySubscription } from "./subscription_search"; export { searchEvent, searchNip05 } from "./event_search"; export { checkCommunity } from "./community_checker"; export { - wellKnownUrl, - lnurlpWellKnownUrl, - isValidNip05Address, - normalizeSearchTerm, - fieldMatches, - nip05Matches, COMMON_DOMAINS, - isEmojiReaction, createProfileFromEvent, + fieldMatches, + isEmojiReaction, + isValidNip05Address, + lnurlpWellKnownUrl, + nip05Matches, + normalizeSearchTerm, + wellKnownUrl, } from "./search_utils"; diff --git a/src/lib/utils/search_utils.ts b/src/lib/utils/search_utils.ts index 055004e..8ad6243 100644 --- a/src/lib/utils/search_utils.ts +++ b/src/lib/utils/search_utils.ts @@ -106,5 +106,8 @@ export function createProfileFromEvent(event: NDKEvent, profileData: any): any { website: profileData.website, lud16: profileData.lud16, pubkey: event.pubkey, + created_at: event.created_at, // AI-NOTE: 2025-01-24 - Preserve timestamp for proper date display + isInUserLists: profileData.isInUserLists, // AI-NOTE: 2025-01-24 - Preserve user list information + listKinds: profileData.listKinds, // AI-NOTE: 2025-01-24 - Preserve list kinds information }; } diff --git a/src/lib/utils/subscription_search.ts b/src/lib/utils/subscription_search.ts index 17fa093..bb6d692 100644 --- a/src/lib/utils/subscription_search.ts +++ b/src/lib/utils/subscription_search.ts @@ -1,40 +1,52 @@ // deno-lint-ignore-file no-explicit-any -import { ndkInstance } from "../ndk.ts"; import { getMatchingTags, getNpubFromNip05 } from "./nostrUtils.ts"; import { nip19 } from "./nostrUtils.ts"; -import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; +import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk"; import { searchCache } from "./searchCache.ts"; -import { communityRelays, searchRelays } from "../consts.ts"; +import { searchRelays } from "../consts.ts"; import { get } from "svelte/store"; import type { + SearchCallbacks, + SearchFilter, SearchResult, SearchSubscriptionType, - SearchFilter, - SearchCallbacks, } from "./search_types.ts"; import { - fieldMatches, - nip05Matches, COMMON_DOMAINS, + fieldMatches, isEmojiReaction, + nip05Matches, } from "./search_utils.ts"; -import { TIMEOUTS, SEARCH_LIMITS } from "./search_constants.ts"; +import { SEARCH_LIMITS, TIMEOUTS } from "./search_constants.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; // Helper function to normalize URLs for comparison const normalizeUrl = (url: string): string => { - return url.replace(/\/$/, ''); // Remove trailing slash + return url.replace(/\/$/, ""); // Remove trailing slash }; +/** + * Filter out unwanted events from search results + * @param events Array of NDKEvent to filter + * @returns Filtered array of NDKEvent + */ +function filterUnwantedEvents(events: NDKEvent[]): NDKEvent[] { + return events.filter( + (event) => !isEmojiReaction(event) && event.kind !== 3 && event.kind !== 5, + ); +} + /** * Search for events by subscription type (d, t, n) */ export async function searchBySubscription( searchType: SearchSubscriptionType, searchTerm: string, + ndk: NDK, callbacks?: SearchCallbacks, abortSignal?: AbortSignal, ): Promise { + const startTime = Date.now(); // AI-NOTE: 2025-01-08 - Track search performance const normalizedSearchTerm = searchTerm.toLowerCase().trim(); console.log("subscription_search: Starting search:", { @@ -47,10 +59,52 @@ export async function searchBySubscription( const cachedResult = searchCache.get(searchType, normalizedSearchTerm); if (cachedResult) { console.log("subscription_search: Found cached result:", cachedResult); - return cachedResult; + + // AI-NOTE: 2025-01-24 - Ensure cached events have created_at property preserved + // This fixes the "Unknown date" issue when events are retrieved from cache + const eventsWithCreatedAt = cachedResult.events.map(event => { + if (event && typeof event === 'object' && !event.created_at) { + console.warn("subscription_search: Event missing created_at, setting to 0:", event.id); + (event as any).created_at = 0; + } + return event; + }); + + const secondOrderWithCreatedAt = cachedResult.secondOrder.map(event => { + if (event && typeof event === 'object' && !event.created_at) { + console.warn("subscription_search: Second order event missing created_at, setting to 0:", event.id); + (event as any).created_at = 0; + } + return event; + }); + + const tTagEventsWithCreatedAt = cachedResult.tTagEvents.map(event => { + if (event && typeof event === 'object' && !event.created_at) { + console.warn("subscription_search: T-tag event missing created_at, setting to 0:", event.id); + (event as any).created_at = 0; + } + return event; + }); + + const resultWithCreatedAt = { + ...cachedResult, + events: eventsWithCreatedAt, + secondOrder: secondOrderWithCreatedAt, + tTagEvents: tTagEventsWithCreatedAt + }; + + // AI-NOTE: 2025-01-24 - For profile searches, return cached results immediately + // The EventSearch component now handles cache checking before calling this function + if (searchType === "n") { + console.log( + "subscription_search: Returning cached profile result immediately", + ); + return resultWithCreatedAt; + } else { + return resultWithCreatedAt; + } } - const ndk = get(ndkInstance); if (!ndk) { console.error("subscription_search: NDK not initialized"); throw new Error("NDK not initialized"); @@ -64,7 +118,7 @@ export async function searchBySubscription( searchState.timeoutId = setTimeout(() => { console.log("subscription_search: Search timeout reached"); cleanup(); - }, TIMEOUTS.SUBSCRIPTION_SEARCH); + }, TIMEOUTS.SUBSCRIPTION_SEARCH); // AI-NOTE: 2025-01-24 - Use standard timeout since cache is checked first // Check for abort signal if (abortSignal?.aborted) { @@ -76,6 +130,7 @@ export async function searchBySubscription( const searchFilter = await createSearchFilter( searchType, normalizedSearchTerm, + ndk, ); console.log("subscription_search: Created search filter:", searchFilter); const primaryRelaySet = createPrimaryRelaySet(searchType, ndk); @@ -85,6 +140,31 @@ export async function searchBySubscription( "relays", ); + // AI-NOTE: 2025-01-24 - Check for preloaded events first (for profile searches) + if (searchFilter.preloadedEvents && searchFilter.preloadedEvents.length > 0) { + console.log("subscription_search: Using preloaded events:", searchFilter.preloadedEvents.length); + processPrimaryRelayResults( + new Set(searchFilter.preloadedEvents), + searchType, + searchFilter.subscriptionType, + normalizedSearchTerm, + searchState, + abortSignal, + cleanup, + ); + + if (hasResults(searchState, searchType)) { + console.log("subscription_search: Found results from preloaded events, returning immediately"); + const immediateResult = createSearchResult( + searchState, + searchType, + normalizedSearchTerm, + ); + searchCache.set(searchType, normalizedSearchTerm, immediateResult); + return immediateResult; + } + } + // Phase 1: Search primary relay if (primaryRelaySet.relays.size > 0) { try { @@ -125,11 +205,36 @@ export async function searchBySubscription( ); searchCache.set(searchType, normalizedSearchTerm, immediateResult); - // Start Phase 2 in background for additional results + // AI-NOTE: 2025-01-08 - For profile searches, return immediately when found + // but still start background search for second-order results + if (searchType === "n") { + console.log( + "subscription_search: Profile found, returning immediately but starting background second-order search", + ); + + // Start Phase 2 in background for second-order results + searchOtherRelaysInBackground( + searchType, + searchFilter, + searchState, + ndk, + callbacks, + cleanup, + ); + + const elapsed = Date.now() - startTime; + console.log( + `subscription_search: Profile search completed in ${elapsed}ms`, + ); + return immediateResult; + } + + // Start Phase 2 in background for additional results (only for non-profile searches) searchOtherRelaysInBackground( searchType, searchFilter, searchState, + ndk, callbacks, cleanup, ); @@ -137,8 +242,85 @@ export async function searchBySubscription( return immediateResult; } else { console.log( - "subscription_search: No results from primary relay, continuing to Phase 2", + "subscription_search: No results from primary relay", ); + + // AI-NOTE: 2025-01-08 - For profile searches, if no results found in search relays, + // try all relays as fallback + if (searchType === "n") { + console.log( + "subscription_search: No profile found in search relays, trying all relays", + ); + // Try with all relays as fallback + const allRelaySet = new NDKRelaySet( + new Set(Array.from(ndk.pool.relays.values())) as any, + ndk, + ); + try { + const fallbackEvents = await ndk.fetchEvents( + searchFilter.filter, + { closeOnEose: true }, + allRelaySet, + ); + + console.log( + "subscription_search: Fallback search returned", + fallbackEvents.size, + "events", + ); + + processPrimaryRelayResults( + fallbackEvents, + searchType, + searchFilter.subscriptionType, + normalizedSearchTerm, + searchState, + abortSignal, + cleanup, + ); + + if (hasResults(searchState, searchType)) { + console.log( + "subscription_search: Found profile in fallback search, returning immediately", + ); + const fallbackResult = createSearchResult( + searchState, + searchType, + normalizedSearchTerm, + ); + searchCache.set(searchType, normalizedSearchTerm, fallbackResult); + const elapsed = Date.now() - startTime; + console.log( + `subscription_search: Profile search completed in ${elapsed}ms (fallback)`, + ); + return fallbackResult; + } + } catch (fallbackError) { + console.error( + "subscription_search: Fallback search failed:", + fallbackError, + ); + } + + console.log( + "subscription_search: Profile not found in any relays, returning empty result", + ); + const emptyResult = createEmptySearchResult( + searchType, + normalizedSearchTerm, + ); + // AI-NOTE: 2025-01-08 - Don't cache empty profile results as they may be due to search issues + // rather than the profile not existing + const elapsed = Date.now() - startTime; + console.log( + `subscription_search: Profile search completed in ${elapsed}ms (not found)`, + ); + return emptyResult; + } else { + console.log( + "subscription_search: No results from primary relay, continuing to Phase 2", + ); + } } } catch (error) { console.error( @@ -153,13 +335,24 @@ export async function searchBySubscription( } // Always do Phase 2: Search all other relays in parallel - return searchOtherRelaysInBackground( + const result = await searchOtherRelaysInBackground( searchType, searchFilter, searchState, + ndk, callbacks, cleanup, ); + + // AI-NOTE: 2025-01-08 - Log performance for non-profile searches + if (searchType !== "n") { + const elapsed = Date.now() - startTime; + console.log( + `subscription_search: ${searchType} search completed in ${elapsed}ms`, + ); + } + + return result; } /** @@ -205,6 +398,7 @@ function createCleanupFunction(searchState: any) { async function createSearchFilter( searchType: SearchSubscriptionType, normalizedSearchTerm: string, + ndk: NDK, ): Promise { console.log("subscription_search: Creating search filter for:", { searchType, @@ -214,7 +408,10 @@ async function createSearchFilter( switch (searchType) { case "d": { const dFilter = { - filter: { "#d": [normalizedSearchTerm] }, + filter: { + "#d": [normalizedSearchTerm], + limit: SEARCH_LIMITS.GENERAL_CONTENT, + }, subscriptionType: "d-tag", }; console.log("subscription_search: Created d-tag filter:", dFilter); @@ -222,15 +419,64 @@ async function createSearchFilter( } case "t": { const tFilter = { - filter: { "#t": [normalizedSearchTerm] }, + filter: { + "#t": [normalizedSearchTerm], + limit: SEARCH_LIMITS.GENERAL_CONTENT, + }, subscriptionType: "t-tag", }; console.log("subscription_search: Created t-tag filter:", tFilter); return tFilter; } case "n": { - const nFilter = await createProfileSearchFilter(normalizedSearchTerm); - console.log("subscription_search: Created profile filter:", nFilter); + // AI-NOTE: 2025-01-24 - Use the existing profile search functionality + // This properly handles NIP-05 lookups and name searches + const { searchProfiles } = await import("./profile_search.ts"); + const profileResult = await searchProfiles(normalizedSearchTerm, ndk); + + // Convert profile results to events for compatibility + const events = profileResult.profiles.map((profile) => { + const event = new NDKEvent(ndk); + event.content = JSON.stringify(profile); + + // AI-NOTE: 2025-01-24 - Convert npub to hex public key for compatibility with nprofileEncode + // The profile.pubkey is an npub (bech32-encoded), but nprofileEncode expects hex-encoded public key + let hexPubkey = profile.pubkey || ""; + if (profile.pubkey && profile.pubkey.startsWith("npub")) { + try { + const decoded = nip19.decode(profile.pubkey); + if (decoded.type === "npub") { + hexPubkey = decoded.data as string; + } + } catch (e) { + console.warn("subscription_search: Failed to decode npub:", profile.pubkey, e); + } + } + event.pubkey = hexPubkey; + event.kind = 0; + + // AI-NOTE: 2025-01-24 - Use the preserved created_at timestamp from the profile + // This ensures the profile cards show the actual creation date instead of "Unknown date" + if ((profile as any).created_at) { + event.created_at = (profile as any).created_at; + console.log("subscription_search: Using preserved timestamp:", event.created_at); + } else { + // Fallback to current timestamp if no preserved timestamp + event.created_at = Math.floor(Date.now() / 1000); + console.log("subscription_search: Using fallback timestamp:", event.created_at); + } + + return event; + }); + + // Return a mock filter since we're using the profile search directly + const nFilter = { + filter: { kinds: [0], limit: 1 }, // Dummy filter + subscriptionType: "profile-search", + searchTerm: normalizedSearchTerm, + preloadedEvents: events, // AI-NOTE: 2025-01-24 - Pass preloaded events + }; + console.log("subscription_search: Created profile filter with preloaded events:", nFilter); return nFilter; } default: { @@ -239,100 +485,73 @@ async function createSearchFilter( } } -/** - * Create profile search filter - */ -async function createProfileSearchFilter( - normalizedSearchTerm: string, -): Promise { - // For npub searches, try to decode the search term first - try { - const decoded = nip19.decode(normalizedSearchTerm); - if (decoded && decoded.type === "npub") { - return { - filter: { - kinds: [0], - authors: [decoded.data], - limit: SEARCH_LIMITS.SPECIFIC_PROFILE, - }, - subscriptionType: "npub-specific", - }; - } - } catch { - // Not a valid npub, continue with other strategies - } - - // Try NIP-05 lookup first - try { - for (const domain of COMMON_DOMAINS) { - const nip05Address = `${normalizedSearchTerm}@${domain}`; - try { - const npub = await getNpubFromNip05(nip05Address); - if (npub) { - return { - filter: { - kinds: [0], - authors: [npub], - limit: SEARCH_LIMITS.SPECIFIC_PROFILE, - }, - subscriptionType: "nip05-found", - }; - } - } catch { - // Continue to next domain - } - } - } catch { - // Fallback to reasonable profile search - } - return { - filter: { kinds: [0], limit: SEARCH_LIMITS.GENERAL_PROFILE }, - subscriptionType: "profile", - }; -} /** - * Create primary relay set based on search type + * Create primary relay set for search operations + * AI-NOTE: 2025-01-24 - Updated to use all available relays to prevent search failures */ function createPrimaryRelaySet( searchType: SearchSubscriptionType, - ndk: any, + ndk: NDK, ): NDKRelaySet { - // Use the new relay management system - const searchRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)]; - console.debug('subscription_search: Active relay stores:', { - inboxRelays: get(activeInboxRelays), - outboxRelays: get(activeOutboxRelays), - searchRelays - }); - // Debug: Log all relays in NDK pool const poolRelays = Array.from(ndk.pool.relays.values()); - console.debug('subscription_search: NDK pool relays:', poolRelays.map((r: any) => r.url)); - + console.debug( + "subscription_search: NDK pool relays:", + poolRelays.map((r: any) => r.url), + ); + + // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive search coverage + // This ensures searches don't fail due to missing relays and provides maximum event discovery + if (searchType === "n") { - // For profile searches, use search relays first - const profileRelaySet = poolRelays.filter( + // For profile searches, prioritize search relays for speed but include all relays + const searchRelaySet = poolRelays.filter( (relay: any) => searchRelays.some( (searchRelay: string) => normalizeUrl(relay.url) === normalizeUrl(searchRelay), ), ); - console.debug('subscription_search: Profile relay set:', profileRelaySet.map((r: any) => r.url)); - return new NDKRelaySet(new Set(profileRelaySet) as any, ndk); + + if (searchRelaySet.length > 0) { + console.debug( + "subscription_search: Profile search - using search relays for speed:", + searchRelaySet.map((r: any) => r.url), + ); + // Still include all relays for comprehensive coverage + console.debug( + "subscription_search: Profile search - also including all relays for comprehensive coverage", + ); + return new NDKRelaySet(new Set(poolRelays) as any, ndk); + } else { + // Use all relays if search relays not available + console.debug( + "subscription_search: Profile search - using all relays:", + poolRelays.map((r: any) => r.url), + ); + return new NDKRelaySet(new Set(poolRelays) as any, ndk); + } } else { - // For other searches, use active relays first - const activeRelaySet = poolRelays.filter( - (relay: any) => - searchRelays.some( - (searchRelay: string) => - normalizeUrl(relay.url) === normalizeUrl(searchRelay), - ), + // For all other searches, use ALL available relays for maximum coverage + const activeRelays = [ + ...get(activeInboxRelays), + ...get(activeOutboxRelays), + ]; + console.debug("subscription_search: Active relay stores:", { + inboxRelays: get(activeInboxRelays), + outboxRelays: get(activeOutboxRelays), + activeRelays, + }); + + // AI-NOTE: 2025-01-24 - Use all pool relays instead of filtering to active relays only + // This ensures we don't miss events that might be on other relays + console.debug( + "subscription_search: Using ALL pool relays for comprehensive search coverage:", + poolRelays.map((r: any) => r.url), ); - console.debug('subscription_search: Active relay set:', activeRelaySet.map((r: any) => r.url)); - return new NDKRelaySet(new Set(activeRelaySet) as any, ndk); + return new NDKRelaySet(new Set(poolRelays) as any, ndk); } } @@ -399,10 +618,11 @@ function processProfileEvent( ) { if (!event.content) return; - // If this is a specific npub search or NIP-05 found search, include all matching events + // If this is a specific npub search, NIP-05 found search, or profile-search, include all matching events if ( subscriptionType === "npub-specific" || - subscriptionType === "nip05-found" + subscriptionType === "nip05-found" || + subscriptionType === "profile-search" ) { searchState.foundProfiles.push(event); return; @@ -503,12 +723,11 @@ function createSearchResult( normalizedSearchTerm: string, ): SearchResult { return { - events: - searchType === "n" - ? searchState.foundProfiles - : searchType === "t" - ? searchState.tTagEvents - : searchState.firstOrderEvents, + events: searchType === "n" + ? searchState.foundProfiles + : searchType === "t" + ? searchState.tTagEvents + : searchState.firstOrderEvents, secondOrder: [], tTagEvents: [], eventIds: searchState.eventIds, @@ -525,32 +744,22 @@ function searchOtherRelaysInBackground( searchType: SearchSubscriptionType, searchFilter: SearchFilter, searchState: any, + ndk: NDK, callbacks?: SearchCallbacks, cleanup?: () => void, ): Promise { - const ndk = get(ndkInstance); - + // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive search coverage + // This ensures we don't miss events that might be on any available relay const otherRelays = new NDKRelaySet( - new Set( - Array.from(ndk.pool.relays.values()).filter((relay: any) => { - if (searchType === "n") { - // For profile searches, exclude search relays from fallback search - return !searchRelays.some( - (searchRelay: string) => - normalizeUrl(relay.url) === normalizeUrl(searchRelay), - ); - } else { - // For other searches, exclude community relays from fallback search - return !communityRelays.some( - (communityRelay: string) => - normalizeUrl(relay.url) === normalizeUrl(communityRelay), - ); - } - }), - ), + new Set(Array.from(ndk.pool.relays.values())), ndk, ); + console.debug( + "subscription_search: Background search using ALL relays:", + Array.from(ndk.pool.relays.values()).map((r: any) => r.url), + ); + // Subscribe to events from other relays const sub = ndk.subscribe( searchFilter.filter, @@ -589,6 +798,7 @@ function searchOtherRelaysInBackground( searchType, searchState, searchFilter, + ndk, callbacks, ); searchCache.set(searchType, searchState.normalizedSearchTerm, result); @@ -605,12 +815,13 @@ function processEoseResults( searchType: SearchSubscriptionType, searchState: any, searchFilter: SearchFilter, + ndk: NDK, callbacks?: SearchCallbacks, ): SearchResult { if (searchType === "n") { - return processProfileEoseResults(searchState, searchFilter, callbacks); + return processProfileEoseResults(searchState, searchFilter, ndk, callbacks); } else if (searchType === "d") { - return processContentEoseResults(searchState, searchType); + return processContentEoseResults(searchState, searchType, ndk); } else if (searchType === "t") { return processTTagEoseResults(searchState); } @@ -624,6 +835,7 @@ function processEoseResults( function processProfileEoseResults( searchState: any, searchFilter: SearchFilter, + ndk: NDK, callbacks?: SearchCallbacks, ): SearchResult { if (searchState.foundProfiles.length === 0) { @@ -652,29 +864,48 @@ function processProfileEoseResults( ) { const targetPubkey = dedupedProfiles[0]?.pubkey; if (targetPubkey) { + console.log( + "subscription_search: Triggering second-order search for npub-specific profile:", + targetPubkey, + ); performSecondOrderSearchInBackground( "n", dedupedProfiles, new Set(), new Set(), + ndk, targetPubkey, callbacks, ); + } else { + console.log( + "subscription_search: No targetPubkey found for second-order search", + ); } } else if (searchFilter.subscriptionType === "profile") { // For general profile searches, perform second-order search for each found profile for (const profile of dedupedProfiles) { if (profile.pubkey) { + console.log( + "subscription_search: Triggering second-order search for general profile:", + profile.pubkey, + ); performSecondOrderSearchInBackground( "n", dedupedProfiles, new Set(), new Set(), + ndk, profile.pubkey, callbacks, ); } } + } else { + console.log( + "subscription_search: No second-order search triggered for subscription type:", + searchFilter.subscriptionType, + ); } return { @@ -694,6 +925,7 @@ function processProfileEoseResults( function processContentEoseResults( searchState: any, searchType: SearchSubscriptionType, + ndk: NDK, ): SearchResult { if (searchState.firstOrderEvents.length === 0) { return createEmptySearchResult( @@ -721,6 +953,7 @@ function processContentEoseResults( dedupedEvents, searchState.eventIds, searchState.eventAddresses, + ndk, ); } @@ -780,11 +1013,17 @@ async function performSecondOrderSearchInBackground( firstOrderEvents: NDKEvent[], eventIds: Set = new Set(), addresses: Set = new Set(), + ndk: NDK, targetPubkey?: string, callbacks?: SearchCallbacks, ) { try { - const ndk = get(ndkInstance); + console.log( + "subscription_search: Starting second-order search for", + searchType, + "with targetPubkey:", + targetPubkey, + ); let allSecondOrderEvents: NDKEvent[] = []; // Set a timeout for second-order search @@ -797,18 +1036,74 @@ async function performSecondOrderSearchInBackground( const searchPromise = (async () => { if (searchType === "n" && targetPubkey) { + console.log( + "subscription_search: Searching for events mentioning pubkey:", + targetPubkey, + ); + + // AI-NOTE: 2025-01-24 - Use only active relays for second-order profile search to prevent hanging + const activeRelays = [ + ...get(activeInboxRelays), + ...get(activeOutboxRelays), + ]; + const availableRelays = activeRelays + .map((url) => ndk.pool.relays.get(url)) + .filter((relay): relay is any => relay !== undefined); + const relaySet = new NDKRelaySet( + new Set(availableRelays), + ndk, + ); + + console.log( + "subscription_search: Using", + activeRelays.length, + "active relays for second-order search", + ); + // Search for events that mention this pubkey via p-tags - const pTagFilter = { "#p": [targetPubkey] }; + const pTagFilter = { "#p": [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging const pTagEvents = await ndk.fetchEvents( pTagFilter, { closeOnEose: true }, - new NDKRelaySet(new Set(Array.from(ndk.pool.relays.values())), ndk), + relaySet, ); - // Filter out emoji reactions - const filteredEvents = Array.from(pTagEvents).filter( - (event) => !isEmojiReaction(event), + console.log( + "subscription_search: Found", + pTagEvents.size, + "events with p-tag for", + targetPubkey, ); - allSecondOrderEvents = [...allSecondOrderEvents, ...filteredEvents]; + + // AI-NOTE: 2025-01-24 - Also search for events written by this pubkey with limit + const authorFilter = { authors: [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging + const authorEvents = await ndk.fetchEvents( + authorFilter, + { closeOnEose: true }, + relaySet, + ); + console.log( + "subscription_search: Found", + authorEvents.size, + "events written by", + targetPubkey, + ); + + // Filter out unwanted events from both sets + const filteredPTagEvents = filterUnwantedEvents(Array.from(pTagEvents)); + const filteredAuthorEvents = filterUnwantedEvents( + Array.from(authorEvents), + ); + + console.log( + "subscription_search: After filtering unwanted events:", + filteredPTagEvents.length, + "p-tag events,", + filteredAuthorEvents.length, + "author events", + ); + + // Combine both sets of events + allSecondOrderEvents = [...filteredPTagEvents, ...filteredAuthorEvents]; } else if (searchType === "d") { // Parallel fetch for #e and #a tag events const relaySet = new NDKRelaySet( @@ -818,26 +1113,28 @@ async function performSecondOrderSearchInBackground( const [eTagEvents, aTagEvents] = await Promise.all([ eventIds.size > 0 ? ndk.fetchEvents( - { "#e": Array.from(eventIds) }, - { closeOnEose: true }, - relaySet, - ) + { + "#e": Array.from(eventIds), + limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS, + }, + { closeOnEose: true }, + relaySet, + ) : Promise.resolve([]), addresses.size > 0 ? ndk.fetchEvents( - { "#a": Array.from(addresses) }, - { closeOnEose: true }, - relaySet, - ) + { + "#a": Array.from(addresses), + limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS, + }, + { closeOnEose: true }, + relaySet, + ) : Promise.resolve([]), ]); - // Filter out emoji reactions - const filteredETagEvents = Array.from(eTagEvents).filter( - (event) => !isEmojiReaction(event), - ); - const filteredATagEvents = Array.from(aTagEvents).filter( - (event) => !isEmojiReaction(event), - ); + // Filter out unwanted events + const filteredETagEvents = filterUnwantedEvents(Array.from(eTagEvents)); + const filteredATagEvents = filterUnwantedEvents(Array.from(aTagEvents)); allSecondOrderEvents = [ ...allSecondOrderEvents, ...filteredETagEvents, @@ -866,15 +1163,20 @@ async function performSecondOrderSearchInBackground( .sort((a, b) => (b.created_at || 0) - (a.created_at || 0)) .slice(0, SEARCH_LIMITS.SECOND_ORDER_RESULTS); + console.log( + "subscription_search: Second-order search completed with", + sortedSecondOrder.length, + "results", + ); + // Update the search results with second-order events const result: SearchResult = { events: firstOrderEvents, secondOrder: sortedSecondOrder, tTagEvents: [], - eventIds: - searchType === "n" - ? new Set(firstOrderEvents.map((p) => p.id)) - : eventIds, + eventIds: searchType === "n" + ? new Set(firstOrderEvents.map((p) => p.id)) + : eventIds, addresses: searchType === "n" ? new Set() : addresses, searchType: searchType, searchTerm: "", // This will be set by the caller @@ -882,7 +1184,16 @@ async function performSecondOrderSearchInBackground( // Notify UI of updated results if (callbacks?.onSecondOrderUpdate) { + console.log( + "subscription_search: Calling onSecondOrderUpdate callback with", + sortedSecondOrder.length, + "second-order events", + ); callbacks.onSecondOrderUpdate(result); + } else { + console.log( + "subscription_search: No onSecondOrderUpdate callback available", + ); } })(); diff --git a/src/lib/utils/tag_event_fetch.ts b/src/lib/utils/tag_event_fetch.ts index 077a93e..651ba80 100644 --- a/src/lib/utils/tag_event_fetch.ts +++ b/src/lib/utils/tag_event_fetch.ts @@ -1,7 +1,5 @@ -import type { NDKEvent } from "@nostr-dev-kit/ndk"; -import { ndkInstance } from "../ndk"; -import { get } from "svelte/store"; -import { extractPubkeysFromEvents, batchFetchProfiles } from "./profileCache"; +import NDK, { type NDKEvent } from "@nostr-dev-kit/ndk"; +import { batchFetchProfiles, extractPubkeysFromEvents } from "./npubCache.ts"; // Constants for publication event kinds const INDEX_EVENT_KIND = 30040; @@ -17,12 +15,12 @@ export interface TagExpansionResult { /** * Fetches publications and their content events from relays based on tags - * + * * This function handles the relay-based fetching portion of tag expansion: * 1. Fetches publication index events that have any of the specified tags * 2. Extracts content event references from those publications * 3. Fetches the referenced content events - * + * * @param tags Array of tags to search for in publications * @param existingEventIds Set of existing event IDs to avoid duplicates * @param baseEvents Array of base events to check for existing content @@ -33,44 +31,46 @@ export async function fetchTaggedEventsFromRelays( tags: string[], existingEventIds: Set, baseEvents: NDKEvent[], - debug?: (...args: any[]) => void + ndk: NDK, + debug?: (...args: any[]) => void, ): Promise { const log = debug || console.debug; - + log("Fetching from relays for tags:", tags); - + // Fetch publications that have any of the specified tags - const ndk = get(ndkInstance); const taggedPublications = await ndk.fetchEvents({ kinds: [INDEX_EVENT_KIND], "#t": tags, // Match any of these tags - limit: 30 // Reasonable default limit + limit: 30, // Reasonable default limit }); - + log("Found tagged publications from relays:", taggedPublications.size); - + // Filter to avoid duplicates const newPublications = Array.from(taggedPublications).filter( - (event: NDKEvent) => !existingEventIds.has(event.id) + (event: NDKEvent) => !existingEventIds.has(event.id), ); - + // Extract content event d-tags from new publications const contentEventDTags = new Set(); const existingContentDTags = new Set( baseEvents - .filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)) - .map(e => e.tagValue("d")) - .filter(d => d !== undefined) + .filter((e) => + e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind) + ) + .map((e) => e.tagValue("d")) + .filter((d) => d !== undefined), ); - + newPublications.forEach((event: NDKEvent) => { const aTags = event.getMatchingTags("a"); aTags.forEach((tag: string[]) => { // Parse the 'a' tag identifier: kind:pubkey:d-tag if (tag[1]) { - const parts = tag[1].split(':'); + const parts = tag[1].split(":"); if (parts.length >= 3) { - const dTag = parts.slice(2).join(':'); // Handle d-tags with colons + const dTag = parts.slice(2).join(":"); // Handle d-tags with colons if (!existingContentDTags.has(dTag)) { contentEventDTags.add(dTag); } @@ -78,7 +78,7 @@ export async function fetchTaggedEventsFromRelays( } }); }); - + // Fetch the content events let newContentEvents: NDKEvent[] = []; if (contentEventDTags.size > 0) { @@ -88,21 +88,21 @@ export async function fetchTaggedEventsFromRelays( }); newContentEvents = Array.from(contentEventsSet); } - + return { publications: newPublications, - contentEvents: newContentEvents + contentEvents: newContentEvents, }; } /** * Searches through already fetched events for publications with specified tags - * + * * This function handles the local search portion of tag expansion: * 1. Searches through existing events for publications with matching tags * 2. Extracts content event references from those publications * 3. Finds the referenced content events in existing events - * + * * @param allEvents Array of all fetched events to search through * @param tags Array of tags to search for in publications * @param existingEventIds Set of existing event IDs to avoid duplicates @@ -115,42 +115,44 @@ export function findTaggedEventsInFetched( tags: string[], existingEventIds: Set, baseEvents: NDKEvent[], - debug?: (...args: any[]) => void + debug?: (...args: any[]) => void, ): TagExpansionResult { const log = debug || console.debug; - + log("Searching through already fetched events for tags:", tags); - + // Find publications in allEvents that have the specified tags - const taggedPublications = allEvents.filter(event => { + const taggedPublications = allEvents.filter((event) => { if (event.kind !== INDEX_EVENT_KIND) return false; if (existingEventIds.has(event.id)) return false; // Skip base events - + // Check if event has any of the specified tags - const eventTags = event.getMatchingTags("t").map(tag => tag[1]); - return tags.some(tag => eventTags.includes(tag)); + const eventTags = event.getMatchingTags("t").map((tag) => tag[1]); + return tags.some((tag) => eventTags.includes(tag)); }); - + const newPublications = taggedPublications; log("Found", newPublications.length, "publications in fetched events"); - + // For content events, also search in allEvents const existingContentDTags = new Set( baseEvents - .filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)) - .map(e => e.tagValue("d")) - .filter(d => d !== undefined) + .filter((e) => + e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind) + ) + .map((e) => e.tagValue("d")) + .filter((d) => d !== undefined), ); - + const contentEventDTags = new Set(); newPublications.forEach((event: NDKEvent) => { const aTags = event.getMatchingTags("a"); aTags.forEach((tag: string[]) => { // Parse the 'a' tag identifier: kind:pubkey:d-tag if (tag[1]) { - const parts = tag[1].split(':'); + const parts = tag[1].split(":"); if (parts.length >= 3) { - const dTag = parts.slice(2).join(':'); // Handle d-tags with colons + const dTag = parts.slice(2).join(":"); // Handle d-tags with colons if (!existingContentDTags.has(dTag)) { contentEventDTags.add(dTag); } @@ -158,23 +160,23 @@ export function findTaggedEventsInFetched( } }); }); - + // Find content events in allEvents - const newContentEvents = allEvents.filter(event => { + const newContentEvents = allEvents.filter((event) => { if (!CONTENT_EVENT_KINDS.includes(event.kind || 0)) return false; const dTag = event.tagValue("d"); return dTag !== undefined && contentEventDTags.has(dTag); }); - + return { publications: newPublications, - contentEvents: newContentEvents + contentEvents: newContentEvents, }; } /** * Fetches profiles for new events and updates progress - * + * * @param newPublications Array of new publication events * @param newContentEvents Array of new content events * @param onProgressUpdate Callback to update progress state @@ -184,23 +186,33 @@ export function findTaggedEventsInFetched( export async function fetchProfilesForNewEvents( newPublications: NDKEvent[], newContentEvents: NDKEvent[], - onProgressUpdate: (progress: { current: number; total: number } | null) => void, - debug?: (...args: any[]) => void + ndk: NDK, + onProgressUpdate: ( + progress: { current: number; total: number } | null, + ) => void, + debug?: (...args: any[]) => void, ): Promise { const log = debug || console.debug; - + // Extract pubkeys from new events - const newPubkeys = extractPubkeysFromEvents([...newPublications, ...newContentEvents]); - + const newPubkeys = extractPubkeysFromEvents([ + ...newPublications, + ...newContentEvents, + ]); + if (newPubkeys.size > 0) { - log("Fetching profiles for", newPubkeys.size, "new pubkeys from tag expansion"); - + log( + "Fetching profiles for", + newPubkeys.size, + "new pubkeys from tag expansion", + ); + onProgressUpdate({ current: 0, total: newPubkeys.size }); - - await batchFetchProfiles(Array.from(newPubkeys), (fetched, total) => { + + await batchFetchProfiles(Array.from(newPubkeys), ndk, (fetched, total) => { onProgressUpdate({ current: fetched, total }); }); - + onProgressUpdate(null); } -} \ No newline at end of file +} diff --git a/src/lib/utils/user_lists.ts b/src/lib/utils/user_lists.ts new file mode 100644 index 0000000..dc71be9 --- /dev/null +++ b/src/lib/utils/user_lists.ts @@ -0,0 +1,257 @@ +import { getNdkContext, activeInboxRelays } from "../ndk.ts"; +import { get } from "svelte/store"; +import type { NDKEvent } from "@nostr-dev-kit/ndk"; +import type NDK from "@nostr-dev-kit/ndk"; +import { userStore } from "../stores/userStore.ts"; +import { nip19 } from "nostr-tools"; +import { npubCache } from "./npubCache.ts"; + +/** + * NIP-51 List kinds for user lists + * @see https://github.com/nostr-protocol/nips/blob/master/51.md + */ +export const NIP51_LIST_KINDS = { + FOLLOWS: 3, // Follow list + MUTED: 10000, // Mute list + PINNED: 10001, // Pin list + RELAYS: 10002, // Relay list + PEOPLE: 30000, // Categorized people list + BOOKMARKS: 30001, // Categorized bookmark list + COMMUNITIES: 34550, // Community definition + STARTER_PACKS: 39089, // Starter packs + MEDIA_STARTER_PACKS: 39092, // Media starter packs +} as const; + +/** + * Get all list kinds that contain people (npubs) + */ +export const PEOPLE_LIST_KINDS = [ + NIP51_LIST_KINDS.FOLLOWS, + NIP51_LIST_KINDS.PEOPLE, + NIP51_LIST_KINDS.STARTER_PACKS, + NIP51_LIST_KINDS.MEDIA_STARTER_PACKS, +] as const; + +/** + * Interface for a user list event + */ +export interface UserListEvent { + event: NDKEvent; + kind: number; + pubkeys: string[]; + listName?: string; + listDescription?: string; +} + +/** + * Fetch all user lists for a given pubkey + * @param pubkey - The pubkey to fetch lists for + * @param listKinds - Array of list kinds to fetch (defaults to all people list kinds) + * @returns Promise that resolves to an array of UserListEvent objects + */ +export async function fetchUserLists( + pubkey: string, + listKinds: number[] = [...PEOPLE_LIST_KINDS], + ndk?: NDK +): Promise { + const ndkInstance = ndk || getNdkContext(); + if (!ndkInstance) { + console.warn("fetchUserLists: No NDK instance available"); + return []; + } + + console.log(`fetchUserLists: Fetching lists for ${pubkey}, kinds:`, listKinds); + + try { + const events = await ndkInstance.fetchEvents({ + kinds: listKinds, + authors: [pubkey], + }); + + const userLists: UserListEvent[] = []; + + for (const event of events) { + const pubkeys: string[] = []; + + // Extract pubkeys from p-tags + event.tags.forEach(tag => { + if (tag[0] === 'p' && tag[1]) { + pubkeys.push(tag[1]); + } + }); + + // Extract list metadata from content if available + let listName: string | undefined; + let listDescription: string | undefined; + + if (event.content) { + try { + const content = JSON.parse(event.content); + listName = content.name || content.title; + listDescription = content.description; + } catch { + // Content is not JSON, ignore + } + } + + // Get list name from d-tag if available (for addressable lists) + if (!listName && event.kind >= 30000 && event.kind < 40000) { + const dTag = event.getMatchingTags('d')[0]?.[1]; + if (dTag) { + listName = dTag; + } + } + + userLists.push({ + event, + kind: event.kind, + pubkeys, + listName, + listDescription, + }); + } + + console.log(`fetchUserLists: Found ${userLists.length} lists with ${userLists.reduce((sum, list) => sum + list.pubkeys.length, 0)} total pubkeys`); + return userLists; + } catch (error) { + console.error("fetchUserLists: Error fetching user lists:", error); + return []; + } +} + +/** + * Fetch the current user's lists + * @param listKinds - Array of list kinds to fetch (defaults to all people list kinds) + * @param ndk - Optional NDK instance (if not provided, will use getNdkContext) + * @returns Promise that resolves to an array of UserListEvent objects + */ +export async function fetchCurrentUserLists( + listKinds: number[] = [...PEOPLE_LIST_KINDS], + ndk?: NDK +): Promise { + const userState = get(userStore); + + if (!userState.signedIn || !userState.pubkey) { + console.warn("fetchCurrentUserLists: No active user found in userStore"); + return []; + } + + console.log("fetchCurrentUserLists: Found user pubkey:", userState.pubkey); + return fetchUserLists(userState.pubkey, listKinds, ndk); +} + +/** + * Get all pubkeys from user lists + * @param userLists - Array of UserListEvent objects + * @returns Set of unique pubkeys + */ +export function getPubkeysFromUserLists(userLists: UserListEvent[]): Set { + const pubkeys = new Set(); + + userLists.forEach(list => { + list.pubkeys.forEach(pubkey => { + pubkeys.add(pubkey); + }); + }); + + return pubkeys; +} + +/** + * Get pubkeys from a specific list kind + * @param userLists - Array of UserListEvent objects + * @param kind - The list kind to filter by + * @returns Set of unique pubkeys from the specified list kind + */ +export function getPubkeysFromListKind(userLists: UserListEvent[], kind: number): Set { + const pubkeys = new Set(); + + userLists.forEach(list => { + if (list.kind === kind) { + list.pubkeys.forEach(pubkey => { + pubkeys.add(pubkey); + }); + } + }); + + return pubkeys; +} + +/** + * Check if a pubkey is in any of the user's lists + * @param pubkey - The pubkey to check + * @param userLists - Array of UserListEvent objects + * @returns True if the pubkey is in any list + */ +export function isPubkeyInUserLists(pubkey: string, userLists: UserListEvent[]): boolean { + const result = userLists.some(list => list.pubkeys.includes(pubkey)); + console.log(`isPubkeyInUserLists: Checking ${pubkey} against ${userLists.length} lists, result: ${result}`); + if (result) { + console.log(`isPubkeyInUserLists: Found ${pubkey} in lists:`, userLists.filter(list => list.pubkeys.includes(pubkey)).map(list => ({ kind: list.kind, name: list.listName }))); + } + return result; +} + +/** + * Get the list kinds that contain a specific pubkey + * @param pubkey - The pubkey to check + * @param userLists - Array of UserListEvent objects + * @returns Array of list kinds that contain the pubkey + */ +export function getListKindsForPubkey(pubkey: string, userLists: UserListEvent[]): number[] { + return userLists + .filter(list => list.pubkeys.includes(pubkey)) + .map(list => list.kind); +} + +/** + * Update profile cache when new follows are discovered + * This ensures follows are always cached and prioritized + * @param pubkeys - Array of pubkeys to cache profiles for + */ +export async function updateProfileCacheForPubkeys(pubkeys: string[], ndk?: NDK): Promise { + if (pubkeys.length === 0) return; + + try { + console.log(`Updating profile cache for ${pubkeys.length} pubkeys`); + + const ndkInstance = ndk || getNdkContext(); + if (!ndkInstance) { + console.warn("updateProfileCacheForPubkeys: No NDK instance available"); + return; + } + + // Fetch profiles for all pubkeys in batches + const batchSize = 20; + for (let i = 0; i < pubkeys.length; i += batchSize) { + const batch = pubkeys.slice(i, i + batchSize); + + try { + const events = await ndkInstance.fetchEvents({ + kinds: [0], + authors: batch, + }); + + // Cache each profile + for (const event of events) { + if (event.content) { + try { + const profileData = JSON.parse(event.content); + const npub = nip19.npubEncode(event.pubkey); + npubCache.set(npub, profileData); + console.log(`Cached profile for: ${npub}`); + } catch (e) { + console.warn("Failed to parse profile data:", e); + } + } + } + } catch (error) { + console.warn("Failed to fetch batch of profiles:", error); + } + } + + console.log("Profile cache update completed"); + } catch (error) { + console.warn("Failed to update profile cache:", error); + } +} diff --git a/src/lib/utils/websocket_utils.ts b/src/lib/utils/websocket_utils.ts index 834bca3..5113d1c 100644 --- a/src/lib/utils/websocket_utils.ts +++ b/src/lib/utils/websocket_utils.ts @@ -18,7 +18,7 @@ export interface NostrFilter { ids?: string[]; authors?: string[]; kinds?: number[]; - [tag: `#${string}`]: string[] | undefined; + [tag: `#${string}`]: string[] | undefined; since?: number; until?: number; limit?: number; @@ -28,14 +28,16 @@ type ResolveCallback = (value: T | PromiseLike) => void; type RejectCallback = (reason?: any) => void; type EventHandler = (ev: Event) => void; type MessageEventHandler = (ev: MessageEvent) => void; -type EventHandlerReject = (reject: RejectCallback) => EventHandler; -type EventHandlerResolve = (resolve: ResolveCallback) => (reject: RejectCallback) => MessageEventHandler; +type EventHandlerReject = (reject: RejectCallback) => EventHandler; +type EventHandlerResolve = ( + resolve: ResolveCallback, +) => (reject: RejectCallback) => MessageEventHandler; function handleMessage( ev: MessageEvent, subId: string, resolve: (event: NostrEvent) => void, - reject: (reason: any) => void + reject: (reason: any) => void, ) { const data = JSON.parse(ev.data); @@ -64,37 +66,50 @@ function handleMessage( function handleError( ev: Event, - reject: (reason: any) => void + reject: (reason: any) => void, ) { reject(ev); } -export async function fetchNostrEvent(filter: NostrFilter): Promise { +export async function fetchNostrEvent( + filter: NostrFilter, +): Promise { // AI-NOTE: Updated to use active relay stores instead of hardcoded relay URL // This ensures the function uses the user's configured relays and can find events // across multiple relays rather than being limited to a single hardcoded relay. - + // Get available relays from the active relay stores const inboxRelays = get(activeInboxRelays); const outboxRelays = get(activeOutboxRelays); - + // Combine all available relays, prioritizing inbox relays let availableRelays = [...inboxRelays, ...outboxRelays]; - + // AI-NOTE: Use fallback relays when stores are empty (e.g., during SSR) // This ensures publications can still load even when relay stores haven't been populated if (availableRelays.length === 0) { // Import fallback relays from constants const { searchRelays, secondaryRelays } = await import("../consts.ts"); availableRelays = [...searchRelays, ...secondaryRelays]; - + if (availableRelays.length === 0) { availableRelays = ["wss://thecitadel.nostr1.com"]; } } - + + // AI-NOTE: 2025-01-24 - Enhanced relay strategy for better event discovery + // Always include search relays in the relay set for comprehensive event discovery + const { searchRelays, secondaryRelays } = await import("../consts.ts"); + const allRelays = [...availableRelays, ...searchRelays, ...secondaryRelays]; + const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates + + console.debug( + `[fetchNostrEvent] Trying ${uniqueRelays.length} relays for event discovery:`, + uniqueRelays, + ); + // Try all available relays in parallel and return the first result - const relayPromises = availableRelays.map(async (relay) => { + const relayPromises = uniqueRelays.map(async (relay) => { try { const ws = await WebSocketPool.instance.acquire(relay); const subId = crypto.randomUUID(); @@ -102,16 +117,15 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise (resolve: ResolveCallback) => (reject: RejectCallback) => MessageEventHandler = - (subId) => - (resolve) => - (reject) => - (ev: MessageEvent) => - handleMessage(ev, subId, resolve, reject); - const curriedErrorHandler: EventHandlerReject = - (reject) => - (ev: Event) => - handleError(ev, reject); + const curriedMessageHandler: ( + subId: string, + ) => ( + resolve: ResolveCallback, + ) => (reject: RejectCallback) => MessageEventHandler = + (subId) => (resolve) => (reject) => (ev: MessageEvent) => + handleMessage(ev, subId, resolve, reject); + const curriedErrorHandler: EventHandlerReject = (reject) => (ev: Event) => + handleError(ev, reject); // AI-NOTE: These variables store references to partially-applied handlers so that the `finally` // block receives the correct references to clean up the listeners. @@ -125,20 +139,20 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise { - ws.removeEventListener("message", messageHandler); - ws.removeEventListener("error", errorHandler); - WebSocketPool.instance.release(ws); - }); + .withTimeout(2000) + .finally(() => { + ws.removeEventListener("message", messageHandler); + ws.removeEventListener("error", errorHandler); + WebSocketPool.instance.release(ws); + }); ws.send(JSON.stringify(["REQ", subId, filter])); - + const result = await res; if (result) { return result; } - + return null; } catch (err) { return null; @@ -147,14 +161,14 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise { try { const event = await fetchNostrEvent({ "#d": [dTag], limit: 1 }); if (!event) { - error(404, `Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`); + error( + 404, + `Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`, + ); } return event; } catch (err) { @@ -207,7 +224,10 @@ export async function fetchEventByNaddr(naddr: string): Promise { }; const event = await fetchNostrEvent(filter); if (!event) { - error(404, `Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`); + error( + 404, + `Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`, + ); } return event; } catch (err) { @@ -226,7 +246,10 @@ export async function fetchEventByNevent(nevent: string): Promise { const decoded = neventDecode(nevent); const event = await fetchNostrEvent({ ids: [decoded.id], limit: 1 }); if (!event) { - error(404, `Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`); + error( + 404, + `Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`, + ); } return event; } catch (err) { diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte index 2fff8a9..d4ff137 100644 --- a/src/routes/+layout.svelte +++ b/src/routes/+layout.svelte @@ -1,11 +1,17 @@ - @@ -47,5 +183,5 @@
- + {@render children()}
diff --git a/src/routes/+layout.ts b/src/routes/+layout.ts index ac50221..b120f84 100644 --- a/src/routes/+layout.ts +++ b/src/routes/+layout.ts @@ -1,136 +1,8 @@ -import { getPersistedLogin, initNdk, ndkInstance } from "../lib/ndk.ts"; -import { - loginWithExtension, - loginWithAmber, - loginWithNpub, -} from "../lib/stores/userStore.ts"; -import { loginMethodStorageKey } from "../lib/stores/userStore.ts"; -import Pharos, { pharosInstance } from "../lib/parser.ts"; import type { LayoutLoad } from "./$types"; -import { get } from "svelte/store"; -import { browser } from "$app/environment"; - -// AI-NOTE: Leave SSR off until event fetches are implemented server-side. -export const ssr = false; - -/** - * Attempts to restore the user's authentication session from localStorage. - * Handles extension, Amber (NIP-46), and npub login methods. - */ -function restoreAuthSession() { - try { - const pubkey = getPersistedLogin(); - const loginMethod = localStorage.getItem(loginMethodStorageKey); - const logoutFlag = localStorage.getItem("alexandria/logout/flag"); - console.log("Layout load - persisted pubkey:", pubkey); - console.log("Layout load - persisted login method:", loginMethod); - console.log("Layout load - logout flag:", logoutFlag); - console.log("All localStorage keys:", Object.keys(localStorage)); - - if (pubkey && loginMethod && !logoutFlag) { - if (loginMethod === "extension") { - console.log("Restoring extension login..."); - loginWithExtension(); - } else if (loginMethod === "amber") { - // Attempt to restore Amber (NIP-46) session from localStorage - const relay = "wss://relay.nsec.app"; - const localNsec = localStorage.getItem("amber/nsec"); - if (localNsec) { - import("@nostr-dev-kit/ndk").then( - async ({ NDKNip46Signer }) => { - const ndk = get(ndkInstance); - try { - // deno-lint-ignore no-explicit-any - const amberSigner = (NDKNip46Signer as any).nostrconnect( - ndk, - relay, - localNsec, - { - name: "Alexandria", - perms: "sign_event:1;sign_event:4", - }, - ); - // Try to reconnect (blockUntilReady will resolve if Amber is running and session is valid) - await amberSigner.blockUntilReady(); - const user = await amberSigner.user(); - await loginWithAmber(amberSigner, user); - console.log("Amber session restored."); - } catch { - // If reconnection fails, automatically fallback to npub-only mode - console.warn( - "Amber session could not be restored. Falling back to npub-only mode.", - ); - try { - // Set the flag first, before login - localStorage.setItem("alexandria/amber/fallback", "1"); - console.log("Set fallback flag in localStorage"); - - // Small delay to ensure flag is set - await new Promise((resolve) => setTimeout(resolve, 100)); - - await loginWithNpub(pubkey); - console.log("Successfully fell back to npub-only mode."); - } catch (fallbackErr) { - console.error( - "Failed to fallback to npub-only mode:", - fallbackErr, - ); - } - } - }, - ); - } else { - // No session data, automatically fallback to npub-only mode - console.log( - "No Amber session data found. Falling back to npub-only mode.", - ); - - // Set the flag first, before login - localStorage.setItem("alexandria/amber/fallback", "1"); - console.log("Set fallback flag in localStorage"); - - // Small delay to ensure flag is set - setTimeout(async () => { - try { - await loginWithNpub(pubkey); - console.log("Successfully fell back to npub-only mode."); - } catch (fallbackErr) { - console.error( - "Failed to fallback to npub-only mode:", - fallbackErr, - ); - } - }, 100); - } - } else if (loginMethod === "npub") { - console.log("Restoring npub login..."); - loginWithNpub(pubkey); - } - } else if (logoutFlag) { - console.log("Skipping auto-login due to logout flag"); - localStorage.removeItem("alexandria/logout/flag"); - } - } catch (e) { - console.warn( - `Failed to restore login: ${e}\n\nContinuing with anonymous session.`, - ); - } -} +import { initNdk } from "$lib/ndk"; export const load: LayoutLoad = () => { - // Initialize NDK with new relay management system - const ndk = initNdk(); - ndkInstance.set(ndk); - - if (browser) { - restoreAuthSession(); - } - - const parser = new Pharos(ndk); - pharosInstance.set(parser); - return { - ndk, - parser, + ndk: initNdk(), }; -}; +} diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte index 1439a42..54bd05a 100644 --- a/src/routes/+page.svelte +++ b/src/routes/+page.svelte @@ -1,13 +1,54 @@
@@ -22,13 +63,51 @@
{#if eventCount.total > 0} -
- Showing {eventCount.displayed} of {eventCount.total} events. +
+ Showing {eventCount.displayed} of {eventCount.total} events. + + + {#if $userStore.signedIn} +
+ + +
+ {/if}
{/if} + + + +
+

+ Clear Search? +

+

+ Switching to "Show only my publications" will clear your current search. + Are you sure you want to continue? +

+
+ + +
+
+
diff --git a/src/routes/[...catchall]/+page.svelte b/src/routes/[...catchall]/+page.svelte index 0224b3d..1e3a0b1 100644 --- a/src/routes/[...catchall]/+page.svelte +++ b/src/routes/[...catchall]/+page.svelte @@ -11,13 +11,13 @@ >The page you are looking for does not exist or has been moved.

- window.history.back()}>Go Back
diff --git a/src/routes/contact/+page.svelte b/src/routes/contact/+page.svelte index 4137220..0b11b4d 100644 --- a/src/routes/contact/+page.svelte +++ b/src/routes/contact/+page.svelte @@ -9,9 +9,9 @@ Input, Modal, } from "flowbite-svelte"; - import { ndkInstance, ndkSignedIn, activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; + import { activeInboxRelays, activeOutboxRelays, getNdkContext } from "$lib/ndk"; import { userStore } from "$lib/stores/userStore"; - import { communityRelays } from "$lib/consts"; + import { anonymousRelays } from "$lib/consts"; import type NDK from "@nostr-dev-kit/ndk"; import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk"; // @ts-ignore - Workaround for Svelte component import issue @@ -21,6 +21,8 @@ import { getMimeTags } from "$lib/utils/mime"; import { userBadge } from "$lib/snippets/UserSnippets.svelte"; + const ndk = getNdkContext(); + // Function to close the success message function closeSuccessMessage() { submissionSuccess = false; @@ -62,13 +64,11 @@ const repoAddress = "naddr1qvzqqqrhnypzplfq3m5v3u5r0q9f255fdeyz8nyac6lagssx8zy4wugxjs8ajf7pqy88wumn8ghj7mn0wvhxcmmv9uqq5stvv4uxzmnywf5kz2elajr"; - // Use the new relay management system instead of hardcoded relays + // Use the new relay management system with anonymous relays as fallbacks const allRelays = [ - "wss://relay.damus.io", - "wss://relay.nostr.band", - "wss://nos.lol", ...$activeInboxRelays, ...$activeOutboxRelays, + ...anonymousRelays, ]; // Hard-coded repository owner pubkey and ID from the task @@ -195,7 +195,6 @@ try { // Get NDK instance - const ndk = $ndkInstance; if (!ndk) { throw new Error("NDK instance not available"); } @@ -213,7 +212,7 @@ ...(ndk.pool ? Array.from(ndk.pool.relays.values()) .filter( - (relay) => relay.url && !relay.url.includes("wss://nos.lol"), + (relay) => relay.url, ) .map((relay) => normalizeRelayUrl(relay.url)) : []), diff --git a/src/routes/events/+page.svelte b/src/routes/events/+page.svelte index 15c469c..cdc1428 100644 --- a/src/routes/events/+page.svelte +++ b/src/routes/events/+page.svelte @@ -1,6 +1,5 @@
-
+
-
+
Events - {#if showSidePanel} - - {/if} +
+ {#if showSidePanel && (searchResults.length > 0 || secondOrderResults.length > 0 || tTagResults.length > 0)} + + {/if} + {#if showSidePanel} + + {/if} +

- Use this page to view any event (npub, nprofile, nevent, naddr, note, - pubkey, or eventID). You can also search for events by d-tag using the - format "d:tag-name". + Search and explore Nostr events across the network. Find events by: +

+
    +
  • + Event identifiers: nevent, note, naddr, npub, nprofile, + pubkey, or event ID +
  • +
  • NIP-05 addresses: username@domain.com
  • +
  • + Profile names: Search by display name or username (use + "n:" prefix for exact matches) +
  • +
  • + D-tags: Find events with specific d-tags using "d:tag-name" +
  • +
  • + T-tags: Find events tagged with specific topics using + "t:topic" +
  • +
+

+ The page shows primary search results, second-order references + (replies, quotes, mentions), and related tagged events. Click any + event to view details, comments, and relay information.

0}
- - {#if searchType === "n"} - Search Results for name: "{searchTerm}" ({searchResults.length} profiles) - {:else if searchType === "t"} - Search Results for t-tag: "{searchTerm}" ({searchResults.length} - events) - {:else} - Search Results for d-tag: "{searchTerm || - dTagValue?.toLowerCase()}" ({searchResults.length} events) - {/if} - -
- {#each searchResults as result, index} -
- {#if getSummary(result)} -
- {getSummary(result)} -
- {/if} - {#if getDeferralNaddr(result)} -
- Read - { - e.stopPropagation(); - navigateToPublication( - getDeferralNaddr(result) || "", - ); - }} - onkeydown={(e) => { - if (e.key === "Enter" || e.key === " ") { - e.preventDefault(); - e.stopPropagation(); - navigateToPublication( - getDeferralNaddr(result) || "", - ); - } - }} - tabindex="0" - role="button" - > - {getDeferralNaddr(result)} - -
- {/if} - {#if isAddressableEvent(result)} -
- -
- {/if} - {#if result.content} -
- {result.content.slice(0, 200)}{result.content.length > - 200 - ? "..." - : ""} -
- {/if} -
- - {/each} + + {/each} +
{/if} {#if secondOrderResults.length > 0}
- - Second-Order Events (References, Replies, Quotes) ({secondOrderResults.length} - events) - - {#if (searchType === "n" || searchType === "d") && secondOrderResults.length === 100} +
+ + Second-Order Events (References, Replies, Quotes) ({secondOrderResults.length} + events) + + {#if (searchType === "n" || searchType === "d") && secondOrderResults.length === 100} +

+ Showing the 100 newest events. More results may be available. +

+ {/if}

- Showing the 100 newest events. More results may be available. + Events that reference, reply to, highlight, or quote the + original events.

- {/if} -

- Events that reference, reply to, highlight, or quote the original - events. -

-
- {#each secondOrderResults as result, index} -
- - {/each} + {#if result.kind === 0 && profileData} +
+ {#if profileData.picture} + Profile { + (e.target as HTMLImageElement).style.display = + "none"; + }} + /> + {:else} +
+ + {( + profileData.display_name || + profileData.name || + result.pubkey.slice(0, 1) + ).toUpperCase()} + +
+ {/if} +
+ {#if profileData.display_name || profileData.name} + + {profileData.display_name || profileData.name} + + {/if} + {#if profileData.about} + + {profileData.about} + + {/if} +
+
+ {:else} + {#if getSummary(result)} +
+ {getSummary(result)} +
+ {/if} + {#if getDeferralNaddr(result)} +
+ Read + { + e.stopPropagation(); + navigateToPublication( + getDeferralNaddr(result) || "", + ); + }} + onkeydown={(e) => { + if (e.key === "Enter" || e.key === " ") { + e.preventDefault(); + e.stopPropagation(); + navigateToPublication( + getDeferralNaddr(result) || "", + ); + } + }} + tabindex="0" + role="button" + > + {getDeferralNaddr(result)} + +
+ {/if} + {#if isAddressableEvent(result)} +
+ +
+ {/if} + {#if result.content} +
+ +
+ {/if} + {/if} +
+ + {/each} +
{/if} {#if tTagResults.length > 0}
- - Search Results for t-tag: "{searchTerm || - dTagValue?.toLowerCase()}" ({tTagResults.length} events) - -

- Events that are tagged with the t-tag. -

-
- {#each tTagResults as result, index} -
- {#if getSummary(result)} -
- {getSummary(result)} -
- {/if} - {#if getDeferralNaddr(result)} -
- Read - { - e.stopPropagation(); - navigateToPublication( - getDeferralNaddr(result) || "", - ); - }} - onkeydown={(e) => { - if (e.key === "Enter" || e.key === " ") { - e.preventDefault(); - e.stopPropagation(); - navigateToPublication( - getDeferralNaddr(result) || "", - ); - } - }} - tabindex="0" - role="button" - > - {getDeferralNaddr(result)} - -
- {/if} - {#if isAddressableEvent(result)} -
- -
- {/if} - {#if result.content} -
- {result.content.slice(0, 200)}{result.content.length > - 200 - ? "..." - : ""} -
- {/if} -
- - {/each} + + {/each} +
{/if} - {#if !event && searchResults.length === 0 && secondOrderResults.length === 0 && tTagResults.length === 0 && !searchValue && !dTagValue && !searchInProgress} + {#if !event && searchResults.length === 0 && secondOrderResults.length === 0 && tTagResults.length === 0 && !searchValue && !searchInProgress}
+ Publish Nostr Event +

+ Create and publish new Nostr events to the network. This form + supports various event kinds including: +

+
    +
  • + Kind 30040: Publication indexes that organize AsciiDoc + content into structured publications +
  • +
  • + Kind 30041: Individual section content for publications +
  • +
  • + Other kinds: Standard Nostr events with custom tags + and content +
  • +
{/if} @@ -775,11 +1158,15 @@ {#if showSidePanel && event} -
-
- Event Details +
+
+ Event Details
{#if event.kind !== 0} -
+
{/if} - - +
+ +
+
+ +
- {#if isLoggedIn && userPubkey} -
- Add Comment +
+ +
+ + {#if user?.signedIn} +
+ Add Comment
{:else} -
+

Please sign in to add comments.

{/if} diff --git a/src/routes/my-notes/+page.svelte b/src/routes/my-notes/+page.svelte index 1e02ef2..9e7dfa4 100644 --- a/src/routes/my-notes/+page.svelte +++ b/src/routes/my-notes/+page.svelte @@ -1,34 +1,37 @@
toggleTagType(type)} + onclick={() => toggleTagType(type)} > {#if type.length === 1} {type} @@ -200,7 +247,7 @@ {#if tagsToShow.length > 0} @@ -226,7 +273,9 @@

My Notes

- {#if loading} + {#if checkingAuth} +
Checking authentication...
+ {:else if loading}
Loading…
{:else if error}
{error}
@@ -240,7 +289,7 @@
{getTitle(event)}
{/each} diff --git a/src/routes/new/edit/+page.svelte b/src/routes/new/edit/+page.svelte index 1ef59ae..1c43d6d 100644 --- a/src/routes/new/edit/+page.svelte +++ b/src/routes/new/edit/+page.svelte @@ -4,7 +4,6 @@ Textarea, Toolbar, ToolbarButton, - Tooltip, } from "flowbite-svelte"; import { CodeOutline, @@ -13,8 +12,11 @@ } from "flowbite-svelte-icons"; import Preview from "$lib/components/Preview.svelte"; import Pharos, { pharosInstance } from "$lib/parser"; - import { ndkInstance } from "$lib/ndk"; import { goto } from "$app/navigation"; + import { getNdkContext } from "$lib/ndk"; + + const ndk = getNdkContext(); + let someIndexValue = 0; // TODO: Prompt user to sign in before editing. @@ -26,7 +28,7 @@ const showPreview = () => { try { - $pharosInstance ??= new Pharos($ndkInstance); + $pharosInstance ??= new Pharos(ndk); $pharosInstance.reset(); $pharosInstance.parse(editorText); } catch (e) { @@ -53,7 +55,7 @@ return; } - $pharosInstance.generate($ndkInstance.activeUser?.pubkey!); + $pharosInstance.generate(ndk.activeUser?.pubkey!); goto("/new/compose"); }; @@ -71,10 +73,10 @@ bind:value={editorText} > - + - + @@ -87,10 +89,10 @@ - + - + diff --git a/src/routes/proxy+layout.ts b/src/routes/proxy+layout.ts deleted file mode 100644 index 8a97a72..0000000 --- a/src/routes/proxy+layout.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { LayoutLoad } from "./$types"; - -export const load: LayoutLoad = async () => { - return {}; -}; \ No newline at end of file diff --git a/src/routes/publication/+page.server.ts b/src/routes/publication/+page.server.ts index fa30a0d..0be4172 100644 --- a/src/routes/publication/+page.server.ts +++ b/src/routes/publication/+page.server.ts @@ -5,7 +5,7 @@ import type { PageServerLoad } from "./$types"; const ROUTES = { PUBLICATION_BASE: "/publication", NADDR: "/publication/naddr", - NEVENT: "/publication/nevent", + NEVENT: "/publication/nevent", ID: "/publication/id", D_TAG: "/publication/d", START: "/start", @@ -17,7 +17,7 @@ const IDENTIFIER_PREFIXES = { NEVENT: "nevent", } as const; -export const load: PageServerLoad = ({ url }) => { +export const load: PageServerLoad = ({ url }: { url: URL }) => { const id = url.searchParams.get("id"); const dTag = url.searchParams.get("d"); @@ -38,4 +38,4 @@ export const load: PageServerLoad = ({ url }) => { // If no query parameters, redirect to the start page redirect(301, ROUTES.START); -}; \ No newline at end of file +}; diff --git a/src/routes/publication/[type]/[identifier]/+layout.server.ts b/src/routes/publication/[type]/[identifier]/+layout.server.ts index 2a90624..4670248 100644 --- a/src/routes/publication/[type]/[identifier]/+layout.server.ts +++ b/src/routes/publication/[type]/[identifier]/+layout.server.ts @@ -1,34 +1,12 @@ -import { error } from "@sveltejs/kit"; import type { LayoutServerLoad } from "./$types"; -import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts"; -// AI-NOTE: Server-side event fetching for SEO metadata -async function fetchEventServerSide(type: string, identifier: string): Promise { - // For now, return null to indicate server-side fetch not implemented - // This will fall back to client-side fetching - return null; -} -export const load: LayoutServerLoad = async ({ params, url }) => { - const { type, identifier } = params; - - // Try to fetch event server-side for metadata - const indexEvent = await fetchEventServerSide(type, identifier); - - // Extract metadata for meta tags (use fallbacks if no event found) - const title = indexEvent?.tags.find((tag) => tag[0] === "title")?.[1] || "Alexandria Publication"; - const summary = indexEvent?.tags.find((tag) => tag[0] === "summary")?.[1] || - "Alexandria is a digital library, utilizing Nostr events for curated publications and wiki pages."; - const image = indexEvent?.tags.find((tag) => tag[0] === "image")?.[1] || "/screenshots/old_books.jpg"; +export const load: LayoutServerLoad = ({ url }: { url: URL }) => { const currentUrl = `${url.origin}${url.pathname}`; return { - indexEvent, // Will be null, triggering client-side fetch metadata: { - title, - summary, - image, currentUrl, }, }; -}; \ No newline at end of file +}; diff --git a/src/routes/publication/[type]/[identifier]/+layout.svelte b/src/routes/publication/[type]/[identifier]/+layout.svelte index c14d288..c40149c 100644 --- a/src/routes/publication/[type]/[identifier]/+layout.svelte +++ b/src/routes/publication/[type]/[identifier]/+layout.svelte @@ -6,29 +6,38 @@ // AI-NOTE: Use metadata from server-side load for SEO and social sharing const { metadata } = data; + + // Type assertion for optional metadata properties + const meta = metadata as typeof metadata & { + title?: string; + summary?: string; + image?: string; + }; - {metadata.title} - + {meta.title || "Alexandria - Nostr Publications"} + - - - + + + - + {#if meta.image} + + {/if} - - - + + + {#if meta.image} + + {/if} -{#if browser} - {@render children()} -{/if} +{@render children()} \ No newline at end of file diff --git a/src/routes/publication/[type]/[identifier]/+page.svelte b/src/routes/publication/[type]/[identifier]/+page.svelte index fb1cf56..2b888cd 100644 --- a/src/routes/publication/[type]/[identifier]/+page.svelte +++ b/src/routes/publication/[type]/[identifier]/+page.svelte @@ -9,35 +9,98 @@ import { page } from "$app/state"; import { goto } from "$app/navigation"; import { createNDKEvent } from "$lib/utils/nostrUtils"; + import { browser } from "$app/environment"; + import { + fetchEventByDTag, + fetchEventById, + fetchEventByNaddr, + fetchEventByNevent, + } from "$lib/utils/websocket_utils.ts"; + import type { NostrEvent } from "$lib/utils/websocket_utils.ts"; + import type { NDKEvent } from "@nostr-dev-kit/ndk"; let { data }: PageProps = $props(); - // data.indexEvent can be null from server-side rendering - // We need to handle this case properly - // AI-NOTE: Always create NDK event since we now ensure NDK is available - console.debug('[Publication] data.indexEvent:', data.indexEvent); - console.debug('[Publication] data.ndk:', data.ndk); - - const indexEvent = data.indexEvent && data.ndk - ? createNDKEvent(data.ndk, data.indexEvent) - : null; // No event if no NDK or no event data - - console.debug('[Publication] indexEvent created:', indexEvent); - - // Only create publication tree if we have a valid index event - const publicationTree = indexEvent ? new SveltePublicationTree(indexEvent, data.ndk) : null; - const toc = indexEvent ? new TableOfContents( - indexEvent.tagAddress(), - publicationTree!, - page.url.pathname ?? "", - ) : null; - - setContext("publicationTree", publicationTree); - setContext("toc", toc); - setContext("asciidoctor", Processor()); + // AI-NOTE: Handle client-side loading when event is not available during SSR + let indexEvent = $state(null); + let loading = $state(false); + let error = $state(null); + let publicationTree = $state(null); + let toc = $state(null); + let initialized = $state(false); + + // AI-NOTE: Initialize with server-side data if available + $effect(() => { + if (initialized) return; // Prevent re-initialization + + if (data.indexEvent && data.ndk) { + const serverEvent = createNDKEvent(data.ndk, data.indexEvent); + indexEvent = serverEvent; + initializePublicationComponents(serverEvent); + initialized = true; + } else if (browser && data.identifierInfo && !loading) { + // AI-NOTE: Client-side loading when server-side data is not available + loadEventClientSide(); + } + }); + + async function loadEventClientSide() { + if (!browser || !data.identifierInfo || loading) return; + + loading = true; + error = null; + + try { + const { type, identifier } = data.identifierInfo; + let fetchedEvent: NostrEvent | null = null; + + // Handle different identifier types + switch (type) { + case "id": + fetchedEvent = await fetchEventById(identifier); + break; + case "d": + fetchedEvent = await fetchEventByDTag(identifier); + break; + case "naddr": + fetchedEvent = await fetchEventByNaddr(identifier); + break; + case "nevent": + fetchedEvent = await fetchEventByNevent(identifier); + break; + default: + throw new Error(`Unsupported identifier type: ${type}`); + } - // Only set up bookmark handling if we have a valid publication tree - if (publicationTree && indexEvent) { + if (fetchedEvent && data.ndk) { + const clientEvent = createNDKEvent(data.ndk, fetchedEvent); + indexEvent = clientEvent; + initializePublicationComponents(clientEvent); + initialized = true; + } else { + throw new Error("Failed to fetch event from relays"); + } + } catch (err) { + console.error("[Publication] Client-side loading failed:", err); + error = err instanceof Error ? err.message : "Failed to load publication"; + } finally { + loading = false; + } + } + + function initializePublicationComponents(event: NDKEvent) { + if (!data.ndk) return; + + console.log("[Publication] Initializing publication components for event:", event.tagAddress()); + + publicationTree = new SveltePublicationTree(event, data.ndk); + toc = new TableOfContents( + event.tagAddress(), + publicationTree, + page.url.pathname ?? "", + ); + + // Set up bookmark handling publicationTree.onBookmarkMoved((address) => { goto(`#${address}`, { replaceState: true, @@ -55,12 +118,27 @@ db.onsuccess = () => { const transaction = db.result.transaction(["bookmarks"], "readwrite"); const store = transaction.objectStore("bookmarks"); - const bookmarkKey = `${indexEvent.tagAddress()}`; + const bookmarkKey = `${event.tagAddress()}`; store.put({ key: bookmarkKey, address }); }; }); } + // AI-NOTE: Set context values reactively to avoid capturing initial null values + $effect(() => { + if (publicationTree) { + setContext("publicationTree", publicationTree); + } + }); + + $effect(() => { + if (toc) { + setContext("toc", toc); + } + }); + + setContext("asciidoctor", Processor()); + onMount(() => { // Only handle bookmarks if we have valid components if (!publicationTree || !indexEvent) return; @@ -77,11 +155,11 @@ db.onsuccess = () => { const transaction = db.result.transaction(["bookmarks"], "readonly"); const store = transaction.objectStore("bookmarks"); - const bookmarkKey = `${indexEvent.tagAddress()}`; + const bookmarkKey = `${indexEvent!.tagAddress()}`; const request = store.get(bookmarkKey); request.onsuccess = () => { - if (request.result?.address) { + if (request.result?.address && publicationTree && indexEvent) { // Set the bookmark in the publication tree publicationTree.setBookmark(request.result.address); @@ -99,12 +177,12 @@ }); -{#if indexEvent && data.indexEvent} - {@const debugInfo = `indexEvent: ${!!indexEvent}, data.indexEvent: ${!!data.indexEvent}`} +{#if indexEvent && publicationTree && toc} + {@const debugInfo = `indexEvent: ${!!indexEvent}, publicationTree: ${!!publicationTree}, toc: ${!!toc}`} {@const debugElement = console.debug('[Publication] Rendering publication with:', debugInfo)} @@ -113,10 +191,33 @@ rootAddress={indexEvent.tagAddress()} publicationType={data.publicationType} indexEvent={indexEvent} + publicationTree={publicationTree} + toc={toc} /> +{:else if loading} +
+
+

Loading publication...

+
+
+{:else if error} +
+
+
+

Failed to load publication

+

{error}

+ +
+
+
{:else} - {@const debugInfo = `indexEvent: ${!!indexEvent}, data.indexEvent: ${!!data.indexEvent}`} + {@const debugInfo = `indexEvent: ${!!indexEvent}, publicationTree: ${!!publicationTree}, toc: ${!!toc}`} {@const debugElement = console.debug('[Publication] NOT rendering publication with:', debugInfo)}
diff --git a/src/routes/publication/[type]/[identifier]/+page.ts b/src/routes/publication/[type]/[identifier]/+page.ts index 8f3bbaf..5a4a288 100644 --- a/src/routes/publication/[type]/[identifier]/+page.ts +++ b/src/routes/publication/[type]/[identifier]/+page.ts @@ -1,80 +1,65 @@ import { error } from "@sveltejs/kit"; import type { PageLoad } from "./$types"; -import { fetchEventByDTag, fetchEventById, fetchEventByNaddr, fetchEventByNevent } from "../../../../lib/utils/websocket_utils.ts"; +import { + fetchEventByDTag, + fetchEventById, + fetchEventByNaddr, + fetchEventByNevent, +} from "../../../../lib/utils/websocket_utils.ts"; import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts"; +import { browser } from "$app/environment"; -export const load: PageLoad = async ({ params, parent }: { params: { type: string; identifier: string }; parent: any }) => { +export const load: PageLoad = async ( + { params }: { + params: { type: string; identifier: string }; + }, +) => { const { type, identifier } = params; - - // Get layout data (no server-side data since SSR is disabled) - const layoutData = await parent(); - // AI-NOTE: Always fetch client-side since server-side fetch returns null for now + // AI-NOTE: Only fetch client-side since server-side fetch fails due to missing relay connections + // This prevents 404 errors when refreshing publication pages during SSR let indexEvent: NostrEvent | null = null; - - try { - // Handle different identifier types - switch (type) { - case 'id': - indexEvent = await fetchEventById(identifier); - break; - case 'd': - indexEvent = await fetchEventByDTag(identifier); - break; - case 'naddr': - indexEvent = await fetchEventByNaddr(identifier); - break; - case 'nevent': - indexEvent = await fetchEventByNevent(identifier); - break; - default: - error(400, `Unsupported identifier type: ${type}`); - } - } catch (err) { - throw err; - } - - if (!indexEvent) { - // AI-NOTE: Handle case where no relays are available during preloading - // This prevents 404 errors when relay stores haven't been populated yet - - // Create appropriate search link based on type - let searchParam = ''; - switch (type) { - case 'id': - searchParam = `id=${identifier}`; - break; - case 'd': - searchParam = `d=${identifier}`; - break; - case 'naddr': - case 'nevent': - searchParam = `id=${identifier}`; - break; - default: - searchParam = `q=${identifier}`; + + // Only attempt to fetch if we're in a browser environment + if (browser) { + try { + // Handle different identifier types + switch (type) { + case "id": + indexEvent = await fetchEventById(identifier); + break; + case "d": + indexEvent = await fetchEventByDTag(identifier); + break; + case "naddr": + indexEvent = await fetchEventByNaddr(identifier); + break; + case "nevent": + indexEvent = await fetchEventByNevent(identifier); + break; + default: + error(400, `Unsupported identifier type: ${type}`); + } + } catch (err) { + // AI-NOTE: Don't throw error immediately - let the component handle it + // This allows for better error handling and retry logic + console.warn(`[Publication Load] Failed to fetch event:`, err); } - - error(404, `Event not found for ${type}: ${identifier}. href="/events?${searchParam}"`); } - const publicationType = indexEvent.tags.find((tag) => tag[0] === "type")?.[1] ?? ""; - - // AI-NOTE: Use proper NDK instance from layout or create one with relays - let ndk = layoutData?.ndk; - if (!ndk) { - // Import NDK dynamically to avoid SSR issues - const NDK = (await import("@nostr-dev-kit/ndk")).default; - // Import initNdk to get properly configured NDK with relays - const { initNdk } = await import("$lib/ndk"); - ndk = initNdk(); - } + // AI-NOTE: Return null for indexEvent during SSR or when fetch fails + // The component will handle client-side loading and error states + const publicationType = indexEvent?.tags.find((tag) => tag[0] === "type")?.[1] ?? ""; const result = { publicationType, indexEvent, - ndk, // Use minimal NDK instance + // AI-NOTE: Pass the identifier info for client-side retry + identifierInfo: { + type, + identifier, + }, }; - + return result; }; diff --git a/src/routes/visualize/+page.svelte b/src/routes/visualize/+page.svelte index 91925ec..50dd4dd 100644 --- a/src/routes/visualize/+page.svelte +++ b/src/routes/visualize/+page.svelte @@ -8,7 +8,6 @@ import { onMount } from "svelte"; import { get } from "svelte/store"; import EventNetwork from "$lib/navigator/EventNetwork/index.svelte"; - import { ndkInstance } from "$lib/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk"; import { filterValidIndexEvents } from "$lib/utils"; import { networkFetchLimit } from "$lib/state"; @@ -16,8 +15,9 @@ import { filterByDisplayLimits, detectMissingEvents, buildCoordinateMap } from "$lib/utils/displayLimits"; import type { PageData } from './$types'; import { getEventKindColor, getEventKindName } from "$lib/utils/eventColors"; - import { extractPubkeysFromEvents, batchFetchProfiles } from "$lib/utils/profileCache"; - import { activePubkey } from "$lib/ndk"; + import { extractPubkeysFromEvents, batchFetchProfiles } from "$lib/utils/npubCache"; + import { userStore } from "$lib/stores/userStore"; + import { getNdkContext } from "$lib/ndk"; // Import utility functions for tag-based event fetching // These functions handle the complex logic of finding publications by tags // and extracting their associated content events @@ -28,6 +28,8 @@ } from "$lib/utils/tag_event_fetch"; import { deduplicateAndCombineEvents } from "$lib/utils/eventDeduplication"; import type { EventCounts } from "$lib/types"; + + const ndk = getNdkContext(); // Configuration const DEBUG = true; // Set to true to enable debug logging @@ -122,7 +124,7 @@ } // Get the current user's pubkey - const currentUserPubkey = get(activePubkey); + const currentUserPubkey = get(userStore).pubkey; if (!currentUserPubkey) { console.warn("No logged-in user, cannot fetch user's follow list"); return []; @@ -130,7 +132,7 @@ // If limit is 1, only fetch the current user's follow list if (config.limit === 1) { - const userFollowList = await $ndkInstance.fetchEvents({ + const userFollowList = await ndk.fetchEvents({ kinds: [3], authors: [currentUserPubkey], limit: 1 @@ -148,7 +150,7 @@ debug(`Fetched user's follow list`); } else { // If limit > 1, fetch the user's follow list plus additional ones from people they follow - const userFollowList = await $ndkInstance.fetchEvents({ + const userFollowList = await ndk.fetchEvents({ kinds: [3], authors: [currentUserPubkey], limit: 1 @@ -180,7 +182,7 @@ debug(`Fetching ${pubkeysToFetch.length} additional follow lists (total limit: ${config.limit})`); - const additionalFollowLists = await $ndkInstance.fetchEvents({ + const additionalFollowLists = await ndk.fetchEvents({ kinds: [3], authors: pubkeysToFetch }); @@ -215,7 +217,7 @@ debug(`Fetching level ${level} follow lists for ${currentLevelPubkeys.length} pubkeys`); // Fetch follow lists for this level - const levelFollowLists = await $ndkInstance.fetchEvents({ + const levelFollowLists = await ndk.fetchEvents({ kinds: [3], authors: currentLevelPubkeys }); @@ -362,7 +364,7 @@ const followEvents = await fetchFollowLists(config); allFetchedEvents.push(...followEvents); } else { - const fetchedEvents = await $ndkInstance.fetchEvents( + const fetchedEvents = await ndk.fetchEvents( { kinds: [config.kind], limit: config.limit @@ -394,7 +396,7 @@ if (data.eventId) { // Fetch specific publication debug(`Fetching specific publication: ${data.eventId}`); - const event = await $ndkInstance.fetchEvent(data.eventId); + const event = await ndk.fetchEvent(data.eventId); if (!event) { throw new Error(`Publication not found: ${data.eventId}`); @@ -414,7 +416,7 @@ const indexConfig = publicationConfigs.find(ec => ec.kind === INDEX_EVENT_KIND); const indexLimit = indexConfig?.limit || 20; - const indexEvents = await $ndkInstance.fetchEvents( + const indexEvents = await ndk.fetchEvents( { kinds: [INDEX_EVENT_KIND], limit: indexLimit @@ -455,7 +457,7 @@ const contentEventPromises = Array.from(referencesByAuthor.entries()).map( async ([author, refs]) => { const dTags = [...new Set(refs.map(r => r.dTag))]; // Dedupe d-tags - return $ndkInstance.fetchEvents({ + return ndk.fetchEvents({ kinds: enabledContentKinds, // Only fetch enabled kinds authors: [author], "#d": dTags, @@ -577,7 +579,8 @@ profileEvents = await batchFetchProfiles( Array.from(allPubkeys), - (fetched, total) => { + ndk, + (fetched: number, total: number) => { profileLoadingProgress = { current: fetched, total }; } ); @@ -680,6 +683,7 @@ tags, existingEventIds, baseEvents, + ndk, debug ); newPublications = result.publications; @@ -697,6 +701,7 @@ await fetchProfilesForNewEvents( newPublications, newContentEvents, + ndk, (progress: { current: number; total: number } | null) => { profileLoadingProgress = progress; }, debug ); diff --git a/src/routes/visualize/+page.ts b/src/routes/visualize/+page.ts index 3a0c7d1..b63dcee 100644 --- a/src/routes/visualize/+page.ts +++ b/src/routes/visualize/+page.ts @@ -1,9 +1,9 @@ -import type { PageLoad } from './$types'; +import type { PageLoad } from "./$types"; export const load: PageLoad = async ({ url }) => { - const eventId = url.searchParams.get('event'); - + const eventId = url.searchParams.get("event"); + return { - eventId + eventId, }; -}; \ No newline at end of file +}; diff --git a/src/styles/events.css b/src/styles/events.css deleted file mode 100644 index 3c61536..0000000 --- a/src/styles/events.css +++ /dev/null @@ -1,5 +0,0 @@ -@layer components { - canvas.qr-code { - @apply block mx-auto my-4; - } -} diff --git a/src/styles/notifications.css b/src/styles/notifications.css new file mode 100644 index 0000000..c11a0ea --- /dev/null +++ b/src/styles/notifications.css @@ -0,0 +1,220 @@ +/* Notifications Component Styles */ + +/* Loading spinner animation */ +.notifications-loading-spinner { + animation: spin 1s linear infinite; +} + +@keyframes spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} + +/* Message highlighting for jump-to functionality */ +.message-highlight { + transition: all 0.2s ease-in-out; +} + +.message-highlight.ring-2 { + box-shadow: 0 0 0 2px rgb(59 130 246); +} + +/* Modal content styling */ +.modal-content { + max-height: 80vh; + overflow-y: auto; +} + +/* Recipient search results */ +.recipient-results { + max-height: 16rem; + overflow-y: auto; +} + +/* Message content area */ +.message-content { + min-width: 0; + word-wrap: break-word; +} + +/* Profile picture fallback */ +.profile-picture-fallback { + background: linear-gradient(135deg, #e5e7eb 0%, #d1d5db 100%); +} + +.dark .profile-picture-fallback { + background: linear-gradient(135deg, #4b5563 0%, #374151 100%); +} + +/* Filter button states */ +.filter-button-active { + background-color: rgb(107 114 128); + color: rgb(243 244 246); +} + +.dark .filter-button-active { + background-color: rgb(107 114 128); + color: rgb(243 244 246); +} + +/* Reply button hover states */ +.reply-button:hover { + background-color: rgb(37 99 235); +} + +.dark .reply-button:hover { + background-color: rgb(29 78 216); +} + +/* Community status indicator */ +.community-status-indicator { + background: linear-gradient(135deg, #fef3c7 0%, #fde68a 100%); +} + +.dark .community-status-indicator { + background: linear-gradient(135deg, #78350f 0%, #92400e 100%); +} + +/* Quoted content styling */ +.quoted-content { + border-left: 4px solid rgb(156 163 175); + background-color: rgb(249 250 251); +} + +.dark .quoted-content { + border-left-color: rgb(107 114 128); + background-color: rgb(31 41 55); +} + +/* Recipient selection styling */ +.recipient-selection { + background-color: rgb(243 244 246); + border: 1px solid rgb(229 231 235); +} + +.dark .recipient-selection { + background-color: rgb(55 65 81); + border-color: rgb(75 85 99); +} + +/* Message container hover effects */ +.message-container { + transition: all 0.2s ease-in-out; +} + +.message-container:hover { + box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.1), 0 2px 4px -2px rgb(0 0 0 / 0.1); +} + +.dark .message-container:hover { + box-shadow: 0 4px 6px -1px rgb(0 0 0 / 0.3), 0 2px 4px -2px rgb(0 0 0 / 0.2); +} + +/* Filter indicator styling */ +.filter-indicator { + background: linear-gradient(135deg, #dbeafe 0%, #bfdbfe 100%); + border: 1px solid rgb(147 197 253); +} + +.dark .filter-indicator { + background: linear-gradient(135deg, #1e3a8a 0%, #1e40af 100%); + border-color: rgb(59 130 246); +} + +/* Textarea focus states */ +.message-textarea:focus { + outline: none; + border-color: transparent; + box-shadow: 0 0 0 2px rgb(59 130 246); +} + +/* Button disabled states */ +.button-disabled { + opacity: 0.5; + cursor: not-allowed; +} + +/* Search input focus states */ +.search-input:focus { + border-color: rgb(59 130 246); + box-shadow: 0 0 0 2px rgb(59 130 246 / 0.2); +} + +.dark .search-input:focus { + border-color: rgb(59 130 246); + box-shadow: 0 0 0 2px rgb(59 130 246 / 0.3); +} + +/* Transition utilities */ +.transition-colors { + transition: + color 0.15s ease-in-out, + background-color 0.15s ease-in-out, + border-color 0.15s ease-in-out, + text-decoration-color 0.15s ease-in-out, + fill 0.15s ease-in-out, + stroke 0.15s ease-in-out; +} + +.transition-all { + transition: all 0.15s ease-in-out; +} + +/* Mode toggle button states */ +.mode-toggle-button { + transition: all 0.15s ease-in-out; +} + +.mode-toggle-button.active { + background-color: rgb(255 255 255); + color: rgb(17 24 39); + box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); +} + +.dark .mode-toggle-button.active { + background-color: rgb(31 41 55); + color: rgb(243 244 246); +} + +.mode-toggle-button.inactive { + color: rgb(55 65 81); +} + +.dark .mode-toggle-button.inactive { + color: rgb(156 163 175); +} + +.mode-toggle-button.inactive:hover { + color: rgb(17 24 39); +} + +.dark .mode-toggle-button.inactive:hover { + color: rgb(243 244 246); +} + +/* Filter button transitions */ +.filter-button { + transition: all 0.15s ease-in-out; +} + +/* Recipient selection button transitions */ +.recipient-selection-button { + transition: all 0.15s ease-in-out; +} + +.recipient-selection-button:hover { + background-color: rgb(249 250 251); +} + +.dark .recipient-selection-button:hover { + background-color: rgb(55 65 81); +} + +.recipient-selection-button:focus { + outline: none; + box-shadow: 0 0 0 2px rgb(59 130 246); +} diff --git a/src/styles/publications.css b/src/styles/publications.css index 71b70b6..9ac48b0 100644 --- a/src/styles/publications.css +++ b/src/styles/publications.css @@ -100,7 +100,8 @@ /* blockquote; prose and poetry quotes */ .publication-leather .quoteblock, .publication-leather .verseblock { - @apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50 dark:border-primary-500 dark:bg-primary-700; + @apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50 + dark:border-primary-500 dark:bg-primary-700; } .publication-leather .verseblock pre.content { @@ -154,7 +155,8 @@ } .publication-leather .admonitionblock.tip { - @apply rounded overflow-hidden border border-success-100 dark:border-success-800; + @apply rounded overflow-hidden border border-success-100 + dark:border-success-800; } .publication-leather .admonitionblock.tip .icon, @@ -172,7 +174,8 @@ } .publication-leather .admonitionblock.important { - @apply rounded overflow-hidden border border-primary-200 dark:border-primary-700; + @apply rounded overflow-hidden border border-primary-200 + dark:border-primary-700; } .publication-leather .admonitionblock.important .icon, @@ -181,7 +184,8 @@ } .publication-leather .admonitionblock.caution { - @apply rounded overflow-hidden border border-warning-200 dark:border-warning-700; + @apply rounded overflow-hidden border border-warning-200 + dark:border-warning-700; } .publication-leather .admonitionblock.caution .icon, @@ -190,7 +194,8 @@ } .publication-leather .admonitionblock.warning { - @apply rounded overflow-hidden border border-danger-200 dark:border-danger-800; + @apply rounded overflow-hidden border border-danger-200 + dark:border-danger-800; } .publication-leather .admonitionblock.warning .icon, @@ -201,7 +206,7 @@ /* listingblock, literalblock */ .publication-leather .listingblock, .publication-leather .literalblock { - @apply p-4 rounded bg-highlight dark:bg-primary-700; + @apply p-4 rounded bg-highlight dark:bg-primary-700; } .publication-leather .sidebarblock .title, @@ -254,7 +259,8 @@ @screen lg { @media (hover: hover) { .blog .discreet .card-leather:not(:hover) { - @apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition duration-500 ease-in-out; + @apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition + duration-500 ease-in-out; } .blog .discreet .group { @apply bg-transparent; diff --git a/src/styles/scrollbar.css b/src/styles/scrollbar.css index 4691a9b..c337549 100644 --- a/src/styles/scrollbar.css +++ b/src/styles/scrollbar.css @@ -1,7 +1,8 @@ @layer components { /* Global scrollbar styles */ * { - scrollbar-color: rgba(87, 66, 41, 0.8) transparent; /* Transparent track, default scrollbar thumb */ + scrollbar-color: rgba(87, 66, 41, 0.8) + transparent; /* Transparent track, default scrollbar thumb */ } /* Webkit Browsers (Chrome, Safari, Edge) */ @@ -14,7 +15,8 @@ } *::-webkit-scrollbar-thumb { - @apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600 dark:hover:bg-primary-800; + @apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600 + dark:hover:bg-primary-800; border-radius: 6px; /* Rounded scrollbar */ } } diff --git a/src/styles/visualize.css b/src/styles/visualize.css index d0631b5..ea8f9bd 100644 --- a/src/styles/visualize.css +++ b/src/styles/visualize.css @@ -30,7 +30,8 @@ } .legend-letter { - @apply absolute inset-0 flex items-center justify-center text-black text-xs font-bold; + @apply absolute inset-0 flex items-center justify-center text-black text-xs + font-bold; } .legend-text { @@ -39,7 +40,8 @@ /* Network visualization styles - specific to visualization */ .network-container { - @apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px] max-h-[900px]; + @apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px] + max-h-[900px]; } .network-svg-container { @@ -48,11 +50,15 @@ .network-svg { @apply w-full sm:h-[100%] border; - @apply border border-primary-200 has-[:hover]:border-primary-700 dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500 rounded; + @apply border border-primary-200 has-[:hover]:border-primary-700 + dark:bg-primary-1000 dark:border-primary-800 + dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500 + rounded; } .network-error { - @apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200 rounded-lg mb-4; + @apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200 + rounded-lg mb-4; } .network-error-title { @@ -78,8 +84,9 @@ /* Tooltip styles - specific to visualization tooltips */ .tooltip-close-btn { - @apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-600 - rounded-full p-1 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200; + @apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 + dark:hover:bg-gray-600 rounded-full p-1 text-gray-500 hover:text-gray-700 + dark:text-gray-400 dark:hover:text-gray-200; } .tooltip-content { @@ -91,7 +98,8 @@ } .tooltip-title-link { - @apply text-gray-800 hover:text-blue-600 dark:text-gray-200 dark:hover:text-blue-400; + @apply text-gray-800 hover:text-blue-600 dark:text-gray-200 + dark:hover:text-blue-400; } .tooltip-metadata { @@ -99,11 +107,13 @@ } .tooltip-summary { - @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40; + @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto + max-h-40; } .tooltip-content-preview { - @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40; + @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto + max-h-40; } .tooltip-help-text { diff --git a/test_data/LaTeXtestfile.json b/test_data/LaTeXtestfile.json deleted file mode 100644 index 1dc63f1..0000000 --- a/test_data/LaTeXtestfile.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "created_at": 1752150799, - "content": "# This is a test file for writing mathematical formulas in #NostrMarkup\n\nThis document covers the rendering of formulas in TeX/LaTeX and AsciiMath notation, or some combination of those within the same page. It is meant to be rendered by clients utilizing MathJax.\n\nIf you want the entire document to be rendered as mathematics, place the entire thing in a back-tick code-block, but know that this makes the document slower to load, it is harder to format the prose, and the result is less legible. It also doesn't increase portability, as it's easy to export markup as LaTeX files, or as PDFs, with the formulas rendered.\n\nThe general idea, is that anything placed within `single back-ticks` is inline code, and inline-code will all be scanned for typical mathematics statements and rendered with best-effort. (For more precise rendering, use AsciiDoc.) We will not render text that is not marked as inline code, as mathematical formulas, as that is prose.\n\nIf you want the TeX to be blended into the surrounding text, wrap the text within single `$`. Otherwise, use double `$$` symbols, for display math, and it will appear on its own line.\n\n## TeX Examples\n\nInline equation: `$\\sqrt{x}$`\n\nSame equation, in the display mode: `$$\\sqrt{x}$$`\n\nSomething more complex, inline: `$\\mathbb{N} = \\{ a \\in \\mathbb{Z} : a > 0 \\}$`\n\nSomething complex, in display mode: `$$P \\left( A=2 \\, \\middle| \\, \\dfrac{A^2}{B}>4 \\right)$$`\n\nAnother example of `$$\\prod_{i=1}^{n} x_i - 1$$` inline formulas.\n\nFunction example: \n`$$\nf(x)=\n\\begin{cases}\n1/d_{ij} & \\quad \\text{when $d_{ij} \\leq 160$}\\\\ \n0 & \\quad \\text{otherwise}\n\\end{cases}\n$$`\n\nAnd a matrix:\n`$$\nM = \n\\begin{bmatrix}\n\\frac{5}{6} & \\frac{1}{6} & 0 \\\\[0.3em]\n\\frac{5}{6} & 0 & \\frac{1}{6} \\\\[0.3em]\n0 & \\frac{5}{6} & \\frac{1}{6}\n\\end{bmatrix}\n$$`\n\nLaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this sort of thing.\n\n`\\\\begin{tabular}{|c|c|c|l|r|}\n\\\\hline\n\\\\multicolumn{3}{|l|}{test} & A & B \\\\\\\\\n\\\\hline\n1 & 2 & 3 & 4 & 5 \\\\\\\\\n\\\\hline\n\\\\end{tabular}`\n\nWe also recognize common LaTeX statements:\n\n`\\[\n\\begin{array}{ccccc}\n1 & 2 & 3 & 4 & 5 \\\\\n\\end{array}\n\\]`\n\n`\\[ x^n + y^n = z^n \\]`\n\n`\\sqrt{x^2+1}`\n\nGreek letters are a snap: `$\\Psi$`, `$\\psi$`, `$\\Phi$`, `$\\phi$`. \n\nEquations within text are easy--- A well known Maxwell thermodynamic relation is `$\\left.{\\partial T \\over \\partial P}\\right|_{s} = \\left.{\\partial v \\over \\partial s}\\right|_{P}$`.\n\nYou can also set aside equations like so: `\\begin{eqnarray} du &=& T\\ ds -P\\ dv, \\qquad \\mbox{first law.}\\label{fl}\\\\ ds &\\ge& {\\delta q \\over T}.\\qquad \\qquad \\mbox{second law.} \\label{sl} \\end {eqnarray}`\n\n## And some good ole Asciimath\n\nAsciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy stuff easier to find. If you want it inline, include it inline. If you want it on a separate line, put a hard-return before and after.\n\nInline text example here `$E=mc^2$` and another `$1/(x+1)$`; very simple.\n\nDisplaying on a separate line:\n\n`$$sum_(k=1)^n k = 1+2+ cdots +n=(n(n+1))/2$$`\n\n`$$int_0^1 x^2 dx$$`\n\n`$$x = (-6 +- sqrt((-6)^2 - 4 (1)(4)))/(2 xx 1)$$`\n\n`$$|x|= {(x , if x ge 0 text(,)),(-x , if x <0.):}$$`\n\nDisplaying with wider spacing:\n\n`$a=3, \\ \\ \\ b=-3,\\ \\ $` and `$ \\ \\ c=2$`.\n\nThus `$(a+b)(c+b)=0$`.\n\nDisplaying with indentations:\n\nUsing the quadratic formula, the roots of `$x^2-6x+4=0$` are\n\n`$$x = (-6 +- sqrt((-6)^2 - 4 (1)(4)))/(2 xx 1)$$`\n\n`$$ \\ \\ = (-6 +- sqrt(36 - 16))/2$$`\n\n`$$ \\ \\ =(-6 +- sqrt(20))/2$$`\n\n`$$ \\ \\ = -0.8 or 2.2 \\ \\ \\ $$` to 1 decimal place.\n\nAdvanced alignment and matrices looks like this:\n\nA `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or vector, `$$((1),(0))$$`.\n\nThe outer brackets determine the delimiters e.g. `$|(a,b),(c,d)|=ad-bc$`.\n\nA general `$m xx n$` matrix `$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$`\n\n## Mixed Examples\n\nHere are some examples mixing LaTeX and AsciiMath:\n\n- LaTeX inline: `$\\frac{1}{2}$` vs AsciiMath inline: `$1/2$`\n- LaTeX display: `$$\\sum_{i=1}^n x_i$$` vs AsciiMath display: `$$sum_(i=1)^n x_i$$`\n- LaTeX matrix: `$$\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix}$$` vs AsciiMath matrix: `$$((a,b),(c,d))$$`\n\n## Edge Cases\n\n- Empty math: `$$`\n- Just delimiters: `$ $`\n- Dollar signs in text: The price is $10.50\n- Currency: `$19.99`\n- Shell command: `echo \"Price: $100\"`\n- JavaScript template: `const price = \\`$${amount}\\``\n- CSS with dollar signs: `color: $primary-color`\n\nThis document should demonstrate that:\n1. LaTeX is processed within inline code blocks with proper delimiters\n2. AsciiMath is processed within inline code blocks with proper delimiters\n3. Regular code blocks remain unchanged\n4. Mixed content is handled correctly\n5. Edge cases are handled gracefully", - "tags": [ - ["t", "test"], - ["t", "Asciimath"], - ["t", "TeX"], - ["t", "LaTeX"], - [ - "d", - "this-is-a-test-file-for-writing-mathematical-formulas-in-nostrmarkup" - ], - [ - "title", - "This is a test file for writing mathematical formulas in #NostrMarkup" - ] - ], - "kind": 30023, - "pubkey": "fd208ee8c8f283780a9552896e4823cc9dc6bfd442063889577106940fd927c1", - "id": "91be487e67cb68cfe3c7e965a654642b7bcedecb68340523a8c1b865b21fa5dc", - "sig": "59b7f87fe2c2d318152cf5b4796580f79a26936d515a816ddcb89b89ba337992eaa3d50896d3bde345d25be99c9caa3a237d476abeb8537589256cbcceeb2e75" -} diff --git a/test_data/LaTeXtestfile.md b/test_data/LaTeXtestfile.md deleted file mode 100644 index eec857c..0000000 --- a/test_data/LaTeXtestfile.md +++ /dev/null @@ -1,142 +0,0 @@ -# This is a testfile for writing mathematic formulas in NostrMarkup - -This document covers the rendering of formulas in TeX/LaTeX and AsciiMath notation, or some combination of those within the same page. It is meant to be rendered by clients utilizing MathJax. - -If you want the entire document to be rendered as mathematics, place the entire thing in a backtick-codeblock, but know that this makes the document slower to load, it is harder to format the prose, and the result is less legible. It also doesn't increase portability, as it's easy to export markup as LaTeX files, or as PDFs, with the formulas rendered. - -The general idea, is that anything placed within `single backticks` is inline code, and inline-code will all be scanned for typical mathematics statements and rendered with best-effort. (For more precise rendering, use Asciidoc.) We will not render text that is not marked as inline code, as mathematical formulas, as that is prose. - -If you want the TeX to be blended into the surrounding text, wrap the text within single `$`. Otherwise, use double `$$` symbols, for display math, and it will appear on its own line. - -## TeX Examples - -Inline equation: `$\sqrt{x}$` - -Same equation, in the display mode: `$$\sqrt{x}$$` - -Something more complex, inline: `$\mathbb{N} = \{ a \in \mathbb{Z} : a > 0 \}$` - -Something complex, in display mode: `$$P \left( A=2 \, \middle| \, \dfrac{A^2}{B}>4 \right)$$` - -Another example of `$$\prod_{i=1}^{n} x_i - 1$$` inline formulas. - -Function example: -`$$ -f(x)= -\begin{cases} -1/d_{ij} & \quad \text{when $d_{ij} \leq 160$}\\ -0 & \quad \text{otherwise} -\end{cases} - -$$ -` - -And a matrix: -` -$$ - -M = -\begin{bmatrix} -\frac{5}{6} & \frac{1}{6} & 0 \\[0.3em] -\frac{5}{6} & 0 & \frac{1}{6} \\[0.3em] -0 & \frac{5}{6} & \frac{1}{6} -\end{bmatrix} - -$$ -` - -LaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this sort of thing. - -`\\begin{tabular}{|c|c|c|l|r|} -\\hline -\\multicolumn{3}{|l|}{test} & A & B \\\\ -\\hline -1 & 2 & 3 & 4 & 5 \\\\ -\\hline -\\end{tabular}` - -We also recognize common LaTeX statements: - -`\[ -\begin{array}{ccccc} -1 & 2 & 3 & 4 & 5 \\ -\end{array} -\]` - -`\[ x^n + y^n = z^n \]` - -`\sqrt{x^2+1}` - -Greek letters are a snap: `$\Psi$`, `$\psi$`, `$\Phi$`, `$\phi$`. - -Equations within text are easy--- A well known Maxwell thermodynamic relation is `$\left.{\partial T \over \partial P}\right|_{s} = \left.{\partial v \over \partial s}\right|_{P}$`. - -You can also set aside equations like so: `\begin{eqnarray} du &=& T\ ds -P\ dv, \qquad \mbox{first law.}\label{fl}\\ ds &\ge& {\delta q \over T}.\qquad \qquad \mbox{second law.} \label{sl} \end {eqnarray}` - -## And some good ole Asciimath - -Asciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy stuff easier to find. If you want it inline, include it inline. If you want it on a separate line, put a hard-return before and after. - -Inline text example here `$E=mc^2$` and another `$1/(x+1)$`; very simple. - -Displaying on a separate line: - -`$$sum_(k=1)^n k = 1+2+ cdots +n=(n(n+1))/2$$` - -`$$int_0^1 x^2 dx$$` - -`$$x = (-6 +- sqrt((-6)^2 - 4 (1)(4)))/(2 xx 1)$$` - -`$$|x|= {(x , if x ge 0 text(,)),(-x , if x <0.):}$$` - -Displaying with wider spacing: - -`$a=3, \ \ \ b=-3,\ \ $` and `$ \ \ c=2$`. - -Thus `$(a+b)(c+b)=0$`. - -Displaying with indentations: - -Using the quadratic formula, the roots of `$x^2-6x+4=0$` are - -`$$x = (-6 +- sqrt((-6)^2 - 4 (1)(4)))/(2 xx 1)$$` - -`$$ \ \ = (-6 +- sqrt(36 - 16))/2$$` - -`$$ \ \ =(-6 +- sqrt(20))/2$$` - -`$$ \ \ = -0.8 or 2.2 \ \ \ $$` to 1 decimal place. - -Advanced alignment and matrices looks like this: - -A `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or vector, `$$((1),(0))$$`. - -The outer brackets determine the delimiters e.g. `$|(a,b),(c,d)|=ad-bc$`. - -A general `$m xx n$` matrix `$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$` - -## Mixed Examples - -Here are some examples mixing LaTeX and AsciiMath: - -- LaTeX inline: `$\frac{1}{2}$` vs AsciiMath inline: `$1/2$` -- LaTeX display: `$$\sum_{i=1}^n x_i$$` vs AsciiMath display: `$$sum_(i=1)^n x_i$$` -- LaTeX matrix: `$$\begin{pmatrix} a & b \\ c & d \end{pmatrix}$$` vs AsciiMath matrix: `$$((a,b),(c,d))$$` - -## Edge Cases - -- Empty math: `$$` -- Just delimiters: `$ $` -- Dollar signs in text: The price is $10.50 -- Currency: `$19.99` -- Shell command: `echo "Price: $100"` -- JavaScript template: `const price = \`$${amount}\`` -- CSS with dollar signs: `color: $primary-color` - -This document should demonstrate that: -1. LaTeX is processed within inline code blocks with proper delimiters -2. AsciiMath is processed within inline code blocks with proper delimiters -3. Regular code blocks remain unchanged -4. Mixed content is handled correctly -5. Edge cases are handled gracefully -$$ diff --git a/test_output.log b/test_output.log new file mode 100644 index 0000000..ce551de --- /dev/null +++ b/test_output.log @@ -0,0 +1,4178 @@ + +> alexandria@0.0.2 test +> vitest + + + DEV v3.2.4 /home/madmin/Projects/GitCitadel/gc-alexandria + + ✓ tests/unit/ZettelEditor.test.ts (24 tests) 20ms +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Addressable Event Deduplication > should keep only the most recent version of addressable events by coordinate +[eventDeduplication] Found 1 duplicate events out of 3 total events +[eventDeduplication] Reduced to 2 unique coordinates +[eventDeduplication] Duplicate details: [ + { + coordinate: '30041:pubkey1:chapter-1', + count: 2, + events: [ 'event1 (created_at: 1000)', 'event2 (created_at: 2000)' ] + } +] + +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Addressable Event Deduplication > should handle events with missing d-tags gracefully +[eventDeduplication] No duplicates found in 1 events + +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Addressable Event Deduplication > should handle events with missing timestamps +[eventDeduplication] Found 1 duplicate events out of 2 total events +[eventDeduplication] Reduced to 1 unique coordinates +[eventDeduplication] Duplicate details: [ + { + coordinate: '30041:pubkey1:chapter-3', + count: 2, + events: [ 'event7 (created_at: 0)', 'event8 (created_at: 1500)' ] + } +] + +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Mixed Event Type Deduplication > should only deduplicate addressable events (kinds 30000-39999) +[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates out of 4 replaceable events +[eventDeduplication] deduplicateAndCombineEvents: Reduced from 5 to 4 events (1 removed) +[eventDeduplication] deduplicateAndCombineEvents: Duplicate details: [ + { + coordinate: '30041:pubkey1:chapter-1', + count: 2, + events: [ 'event1 (created_at: 1000)', 'event2 (created_at: 2000)' ] + } +] + +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Edge Cases > should handle events with null/undefined values +[eventDeduplication] No duplicates found in 1 events + +stdout | tests/unit/relayDeduplication.test.ts > Relay Deduplication Behavior Tests > Edge Cases > should handle events from different authors with same d-tag +[eventDeduplication] No duplicates found in 2 events + +stdout | tests/unit/relayDeduplication.test.ts > Relay Behavior Simulation > should simulate what happens when relays return duplicate events +[eventDeduplication] Found 2 duplicate events out of 3 total events +[eventDeduplication] Reduced to 1 unique coordinates +[eventDeduplication] Duplicate details: [ + { + coordinate: '30041:pubkey1:chapter-1', + count: 3, + events: [ + 'event1 (created_at: 1000)', + 'event2 (created_at: 2000)', + 'event3 (created_at: 1500)' + ] + } +] + +stdout | tests/unit/relayDeduplication.test.ts > Relay Behavior Simulation > should simulate multiple relays returning different versions +[eventDeduplication] Found 1 duplicate events out of 2 total events +[eventDeduplication] Reduced to 1 unique coordinates +[eventDeduplication] Duplicate details: [ + { + coordinate: '30041:pubkey1:chapter-1', + count: 2, + events: [ 'event1 (created_at: 1000)', 'event2 (created_at: 2000)' ] + } +] + +stdout | tests/unit/relayDeduplication.test.ts > Real Relay Deduplication Tests > should detect if relays are returning duplicate replaceable events +Note: This test would require actual relay queries to verify deduplication behavior +To run this test properly, we would need to: +1. Query real relays for replaceable events +2. Check if relays return duplicates +3. Verify our deduplication logic works on real data + +stdout | tests/unit/relayDeduplication.test.ts > Real Relay Deduplication Tests > should verify that our deduplication logic works on real relay data +Note: This test would require actual relay queries +To implement this test, we would need to: +1. Set up NDK with real relays +2. Fetch events for a known author with multiple versions +3. Apply deduplication and verify results + +stdout | tests/unit/relayDeduplication.test.ts > Practical Relay Behavior Analysis > should document what we know about relay deduplication behavior + +=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS === + +Based on the code analysis and the comment from onedev: + +1. THEORETICAL BEHAVIOR: + - Relays SHOULD handle deduplication for replaceable events + - Only the most recent version of each coordinate should be stored + - Client-side deduplication should only be needed for cached/local events + +2. REALITY CHECK: + - Not all relays implement deduplication correctly + - Some relays may return multiple versions of the same event + - Network conditions and relay availability can cause inconsistencies + +3. ALEXANDRIA'S APPROACH: + - Implements client-side deduplication as a safety net + - Uses coordinate system (kind:pubkey:d-tag) for addressable events + - Keeps the most recent version based on created_at timestamp + - Only applies to replaceable events (kinds 30000-39999) + +4. WHY KEEP THE DEDUPLICATION: + - Defensive programming against imperfect relay implementations + - Handles multiple relay sources with different data + - Works with cached events that might be outdated + - Ensures consistent user experience regardless of relay behavior + +5. TESTING STRATEGY: + - Unit tests verify our deduplication logic works correctly + - Integration tests would verify relay behavior (when network allows) + - Monitoring can help determine if relays improve over time + +stdout | tests/unit/relayDeduplication.test.ts > Practical Relay Behavior Analysis > should provide recommendations for when to remove deduplication + +=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION === + +The deduplication logic should be kept until: + +1. RELAY STANDARDS: + - NIP-33 (replaceable events) is widely implemented by relays + - Relays consistently return only the most recent version + - No major relay implementations return duplicates + +2. TESTING EVIDENCE: + - Real-world testing shows relays don't return duplicates + - Multiple relay operators confirm deduplication behavior + - No user reports of duplicate content issues + +3. MONITORING: + - Add logging to track when deduplication is actually used + - Monitor relay behavior over time + - Collect metrics on duplicate events found + +4. GRADUAL REMOVAL: + - Make deduplication configurable (on/off) + - Test with deduplication disabled in controlled environments + - Monitor for issues before removing completely + +5. FALLBACK STRATEGY: + - Keep deduplication as a fallback option + - Allow users to enable it if they experience issues + - Maintain the code for potential future use + + ✓ tests/unit/relayDeduplication.test.ts (22 tests) 22ms + ✓ tests/unit/nostr_identifiers.test.ts (12 tests) 9ms + ✓ tests/unit/tagExpansion.test.ts (12 tests) 23ms +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure with Preamble > should build 30040 event set with preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document with Preamble', + authors: [ 'John Doe', 'Section Author' ], + version: '1.0', + publicationDate: '2024-01-15, Alexandria Test', + summary: 'This is a test document with preamble', + tags: [ 'test', 'preamble', 'asciidoc' ] + }, + content: '= Test Document with Preamble\n' + + 'John Doe \n' + + '1.0, 2024-01-15, Alexandria Test\n' + + ':summary: This is a test document with preamble\n' + + ':keywords: test, preamble, asciidoc\n' + + '\n' + + 'This is the preamble content that should be included.\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document with Preamble', + indexDTag: 'test-document-with-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-with-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-with-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document with Preamble' ], + [ 'author', 'John Doe' ], + [ 'author', 'Section Author' ], + [ 'version', '1.0' ], + [ 'published_on', '2024-01-15, Alexandria Test' ], + [ 'summary', 'This is a test document with preamble' ], + [ 't', 'test' ], + [ 't', 'preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-with-preamble' ], + [ 'title', 'Test Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure without Preamble > should build 30040 event set without preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document without Preamble', + authors: [ 'Section Author' ], + version: 'Version', + summary: 'This is a test document without preamble', + tags: [ 'test', 'no-preamble', 'asciidoc' ] + }, + content: '= Test Document without Preamble\n' + + ':summary: This is a test document without preamble\n' + + ':keywords: test, no-preamble, asciidoc\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document without Preamble', + indexDTag: 'test-document-without-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-without-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-without-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document without Preamble' ], + [ 'author', 'Section Author' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a test document without preamble' ], + [ 't', 'test' ], + [ 't', 'no-preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-without-preamble' ], + [ 'title', 'Test Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ❯ tests/unit/metadataExtraction.test.ts (16 tests | 2 failed) 231ms + × AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly 124ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract section metadata correctly 8ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract standalone author names and remove them from content 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should handle multiple standalone author names 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should not extract non-author lines as authors 4ms + ✓ AsciiDoc Metadata Extraction > parseAsciiDocWithMetadata should parse complete document 23ms + ✓ AsciiDoc Metadata Extraction > metadataToTags should convert metadata to Nostr tags 2ms + ✓ AsciiDoc Metadata Extraction > should handle index card format correctly 4ms + ✓ AsciiDoc Metadata Extraction > should handle empty content gracefully 4ms + ✓ AsciiDoc Metadata Extraction > should handle keywords as tags 4ms + ✓ AsciiDoc Metadata Extraction > should handle both tags and keywords 5ms + ✓ AsciiDoc Metadata Extraction > should handle tags only 8ms + ✓ AsciiDoc Metadata Extraction > should handle both summary and description 15ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle section-only content correctly 8ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle minimal document header (just title) correctly 1ms + × AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly 7ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure with Preamble > should build 30040 event set with skeleton structure and preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document with Preamble', + version: 'Version', + summary: 'This is a skeleton document with preamble', + tags: [ 'skeleton', 'preamble', 'empty' ] + }, + content: '= Skeleton Document with Preamble\n' + + ':summary: This is a skeleton document with preamble\n' + + ':keywords: skeleton, preamble, empty\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document with Preamble', + indexDTag: 'skeleton-document-with-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-with-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-with-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-with-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document with preamble' ], + [ 't', 'skeleton' ], + [ 't', 'preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-with-preamble' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure without Preamble > should build 30040 event set with skeleton structure without preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document without Preamble', + version: 'Version', + summary: 'This is a skeleton document without preamble', + tags: [ 'skeleton', 'no-preamble', 'empty' ] + }, + content: '= Skeleton Document without Preamble\n' + + ':summary: This is a skeleton document without preamble\n' + + ':keywords: skeleton, no-preamble, empty\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document without Preamble', + indexDTag: 'skeleton-document-without-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-without-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-without-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-without-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document without preamble' ], + [ 't', 'skeleton' ], + [ 't', 'no-preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-without-preamble' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card format +Parsed AsciiDoc: { + metadata: { title: 'Test Index Card', version: 'Version' }, + content: '= Test Index Card\nindex card', + sections: [] +} +Creating index card format (no sections) + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card with metadata +Parsed AsciiDoc: { + metadata: { + title: 'Test Index Card with Metadata', + version: 'Version', + summary: 'This is an index card with metadata', + tags: [ 'index', 'card', 'metadata' ] + }, + content: '= Test Index Card with Metadata\n' + + ':summary: This is an index card with metadata\n' + + ':keywords: index, card, metadata\n' + + 'index card', + sections: [] +} +Index event: { + documentTitle: 'Test Index Card with Metadata', + indexDTag: 'test-index-card-with-metadata' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'index-card' ], + [ 'title', 'Test Index Card with Metadata' ], + [ 'version', 'Version' ], + [ 'summary', 'This is an index card with metadata' ], + [ 't', 'index' ], + [ 't', 'card' ], + [ 't', 'metadata' ], + [ 'd', 'test-index-card-with-metadata' ], + [ 'title', 'Test Index Card with Metadata' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Complex Metadata Structures > should handle complex metadata with all attribute types +Parsed AsciiDoc: { + metadata: { + title: 'Complex Metadata Document', + authors: [ + 'Jane Smith', + 'Override Author', + 'Third Author', + 'Section Author', + 'Section Co-Author' + ], + version: '2.0', + publicationDate: '2024-03-01', + summary: 'This is a complex document with all metadata types Alternative description field', + publishedBy: 'Alexandria Complex', + type: 'book', + coverImage: 'https://example.com/cover.jpg', + isbn: '978-0-123456-78-9', + source: 'https://github.com/alexandria/complex', + autoUpdate: 'yes', + tags: [ + 'additional', + 'tags', + 'here', + 'complex', + 'metadata', + 'all-types' + ] + }, + content: '= Complex Metadata Document\n' + + 'Jane Smith \n' + + '2.0, 2024-02-20, Alexandria Complex\n' + + ':summary: This is a complex document with all metadata types\n' + + ':description: Alternative description field\n' + + ':keywords: complex, metadata, all-types\n' + + ':tags: additional, tags, here\n' + + ':author: Override Author\n' + + ':author: Third Author\n' + + ':version: 3.0\n' + + ':published_on: 2024-03-01\n' + + ':published_by: Alexandria Complex\n' + + ':type: book\n' + + ':image: https://example.com/cover.jpg\n' + + ':isbn: 978-0-123456-78-9\n' + + ':source: https://github.com/alexandria/complex\n' + + ':auto-update: yes\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Section with Complex Metadata\n' + + ':author: Section Author\n' + + ':author: Section Co-Author\n' + + ':summary: This section has complex metadata\n' + + ':description: Alternative description for section\n' + + ':keywords: section, complex, metadata\n' + + ':tags: section, tags\n' + + ':type: chapter\n' + + ':image: https://example.com/section-image.jpg\n' + + '\n' + + 'This is the section content.', + sections: [ + { + metadata: [Object], + content: 'This is the section content.', + title: 'Section with Complex Metadata' + } + ] +} +Index event: { + documentTitle: 'Complex Metadata Document', + indexDTag: 'complex-metadata-document' +} +Creating section 0: { + title: 'Section with Complex Metadata', + dTag: 'complex-metadata-document-section-with-complex-metadata', + content: 'This is the section content.', + metadata: { + title: 'Section with Complex Metadata', + authors: [ 'Section Author', 'Section Co-Author' ], + summary: 'This section has complex metadata Alternative description for section', + type: 'chapter', + coverImage: 'https://example.com/section-image.jpg', + tags: [ 'section', 'tags', 'complex', 'metadata' ] + } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'complex' ], + [ 'title', 'Complex Metadata Document' ], + [ 'author', 'Jane Smith' ], + [ 'author', 'Override Author' ], + [ 'author', 'Third Author' ], + [ 'author', 'Section Author' ], + [ 'author', 'Section Co-Author' ], + [ 'version', '2.0' ], + [ 'published_on', '2024-03-01' ], + [ 'published_by', 'Alexandria Complex' ], + [ + 'summary', + 'This is a complex document with all metadata types Alternative description field' + ], + [ 'image', 'https://example.com/cover.jpg' ], + [ 'i', '978-0-123456-78-9' ], + [ 'source', 'https://github.com/alexandria/complex' ], + [ 'type', 'book' ], + [ 'auto-update', 'yes' ], + [ 't', 'additional' ], + [ 't', 'tags' ], + [ 't', 'here' ], + [ 't', 'complex' ], + [ 't', 'metadata' ], + [ 't', 'all-types' ], + [ 'd', 'complex-metadata-document' ], + [ 'title', 'Complex Metadata Document' ], + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with only title and no sections +Parsed AsciiDoc: { + metadata: { + title: 'Document with No Sections', + version: 'Version', + summary: 'This document has no sections' + }, + content: '= Document with No Sections\n' + + ':summary: This document has no sections\n' + + '\n' + + 'This is just preamble content.', + sections: [] +} +Index event: { + documentTitle: 'Document with No Sections', + indexDTag: 'document-with-no-sections' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with No Sections' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has no sections' ], + [ 'd', 'document-with-no-sections' ], + [ 'title', 'Document with No Sections' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with special characters in title +Parsed AsciiDoc: { + metadata: { + title: 'Document with Special Characters: Test & More!', + version: 'Version', + summary: 'This document has special characters in the title' + }, + content: '= Document with Special Characters: Test & More!\n' + + ':summary: This document has special characters in the title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'Document with Special Characters: Test & More!', + indexDTag: 'document-with-special-characters-test-more' +} +Creating section 0: { + title: 'Section 1', + dTag: 'document-with-special-characters-test-more-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with Special Characters: Test & More!' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has special characters in the title' ], + [ 'd', 'document-with-special-characters-test-more' ], + [ 'title', 'Document with Special Characters: Test & More!' ], + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with very long title +Parsed AsciiDoc: { + metadata: { + title: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + version: 'Version', + summary: 'This document has a very long title' + }, + content: '= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality\n' + + ':summary: This document has a very long title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + indexDTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' +} +Creating section 0: { + title: 'Section 1', + dTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ 'version', 'Version' ], + [ 'summary', 'This document has a very long title' ], + [ + 'd', + 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' + ], + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ✓ tests/unit/eventInput30040.test.ts (14 tests) 389ms +(node:1443840) Warning: To load an ES module, set "type": "module" in the package.json or use the .mjs extension. +(Use `node --trace-warnings ...` to show where the warning was created) + ✓ tests/unit/mathProcessing.test.ts (18 tests) 16ms + +⎯⎯⎯⎯⎯⎯⎯ Failed Tests 2 ⎯⎯⎯⎯⎯⎯⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:44:30 + 42| + 43| expect(metadata.title).toBe("Test Document with Metadata"); + 44| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 45| expect(metadata.version).toBe("1.0"); + 46| expect(metadata.publicationDate).toBe("2024-01-15"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/2]⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:318:32 + 316| // Should extract document-level metadata + 317| expect(metadata.title).toBe("Test Document"); + 318| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 319| expect(metadata.version).toBe("1.0"); + 320| expect(metadata.publishedBy).toBe("Alexandria Test"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/2]⎯ + + + Test Files 1 failed | 6 passed (7) + Tests 2 failed | 116 passed (118) + Start at 13:16:23 + Duration 2.53s (transform 1.97s, setup 0ms, collect 3.66s, tests 710ms, environment 2ms, prepare 1.10s) + + FAIL Tests failed. Watching for file changes... + press h to show help, press q to quit +c RERUN src/lib/utils/asciidoc_metadata.ts + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure with Preamble > should build 30040 event set with preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document with Preamble', + authors: [ 'John Doe', 'Section Author' ], + version: '1.0', + publicationDate: '2024-01-15, Alexandria Test', + summary: 'This is a test document with preamble', + tags: [ 'test', 'preamble', 'asciidoc' ] + }, + content: '= Test Document with Preamble\n' + + 'John Doe \n' + + '1.0, 2024-01-15, Alexandria Test\n' + + ':summary: This is a test document with preamble\n' + + ':keywords: test, preamble, asciidoc\n' + + '\n' + + 'This is the preamble content that should be included.\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document with Preamble', + indexDTag: 'test-document-with-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-with-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-with-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document with Preamble' ], + [ 'author', 'John Doe' ], + [ 'author', 'Section Author' ], + [ 'version', '1.0' ], + [ 'published_on', '2024-01-15, Alexandria Test' ], + [ 'summary', 'This is a test document with preamble' ], + [ 't', 'test' ], + [ 't', 'preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-with-preamble' ], + [ 'title', 'Test Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure without Preamble > should build 30040 event set without preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document without Preamble', + authors: [ 'Section Author' ], + version: 'Version', + summary: 'This is a test document without preamble', + tags: [ 'test', 'no-preamble', 'asciidoc' ] + }, + content: '= Test Document without Preamble\n' + + ':summary: This is a test document without preamble\n' + + ':keywords: test, no-preamble, asciidoc\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document without Preamble', + indexDTag: 'test-document-without-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-without-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-without-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document without Preamble' ], + [ 'author', 'Section Author' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a test document without preamble' ], + [ 't', 'test' ], + [ 't', 'no-preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-without-preamble' ], + [ 'title', 'Test Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure with Preamble > should build 30040 event set with skeleton structure and preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document with Preamble', + version: 'Version', + summary: 'This is a skeleton document with preamble', + tags: [ 'skeleton', 'preamble', 'empty' ] + }, + content: '= Skeleton Document with Preamble\n' + + ':summary: This is a skeleton document with preamble\n' + + ':keywords: skeleton, preamble, empty\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document with Preamble', + indexDTag: 'skeleton-document-with-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-with-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-with-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-with-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document with preamble' ], + [ 't', 'skeleton' ], + [ 't', 'preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-with-preamble' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ❯ tests/unit/metadataExtraction.test.ts (16 tests | 2 failed) 202ms + × AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly 116ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract section metadata correctly 7ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract standalone author names and remove them from content 4ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should handle multiple standalone author names 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should not extract non-author lines as authors 4ms + ✓ AsciiDoc Metadata Extraction > parseAsciiDocWithMetadata should parse complete document 20ms + ✓ AsciiDoc Metadata Extraction > metadataToTags should convert metadata to Nostr tags 2ms + ✓ AsciiDoc Metadata Extraction > should handle index card format correctly 4ms + ✓ AsciiDoc Metadata Extraction > should handle empty content gracefully 4ms + ✓ AsciiDoc Metadata Extraction > should handle keywords as tags 4ms + ✓ AsciiDoc Metadata Extraction > should handle both tags and keywords 4ms + ✓ AsciiDoc Metadata Extraction > should handle tags only 3ms + ✓ AsciiDoc Metadata Extraction > should handle both summary and description 7ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle section-only content correctly 8ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle minimal document header (just title) correctly 1ms + × AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly 7ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure without Preamble > should build 30040 event set with skeleton structure without preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document without Preamble', + version: 'Version', + summary: 'This is a skeleton document without preamble', + tags: [ 'skeleton', 'no-preamble', 'empty' ] + }, + content: '= Skeleton Document without Preamble\n' + + ':summary: This is a skeleton document without preamble\n' + + ':keywords: skeleton, no-preamble, empty\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document without Preamble', + indexDTag: 'skeleton-document-without-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-without-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-without-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-without-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document without preamble' ], + [ 't', 'skeleton' ], + [ 't', 'no-preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-without-preamble' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card format +Parsed AsciiDoc: { + metadata: { title: 'Test Index Card', version: 'Version' }, + content: '= Test Index Card\nindex card', + sections: [] +} +Creating index card format (no sections) + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card with metadata +Parsed AsciiDoc: { + metadata: { + title: 'Test Index Card with Metadata', + version: 'Version', + summary: 'This is an index card with metadata', + tags: [ 'index', 'card', 'metadata' ] + }, + content: '= Test Index Card with Metadata\n' + + ':summary: This is an index card with metadata\n' + + ':keywords: index, card, metadata\n' + + 'index card', + sections: [] +} +Index event: { + documentTitle: 'Test Index Card with Metadata', + indexDTag: 'test-index-card-with-metadata' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'index-card' ], + [ 'title', 'Test Index Card with Metadata' ], + [ 'version', 'Version' ], + [ 'summary', 'This is an index card with metadata' ], + [ 't', 'index' ], + [ 't', 'card' ], + [ 't', 'metadata' ], + [ 'd', 'test-index-card-with-metadata' ], + [ 'title', 'Test Index Card with Metadata' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Complex Metadata Structures > should handle complex metadata with all attribute types +Parsed AsciiDoc: { + metadata: { + title: 'Complex Metadata Document', + authors: [ + 'Jane Smith', + 'Override Author', + 'Third Author', + 'Section Author', + 'Section Co-Author' + ], + version: '2.0', + publicationDate: '2024-03-01', + summary: 'This is a complex document with all metadata types Alternative description field', + publishedBy: 'Alexandria Complex', + type: 'book', + coverImage: 'https://example.com/cover.jpg', + isbn: '978-0-123456-78-9', + source: 'https://github.com/alexandria/complex', + autoUpdate: 'yes', + tags: [ + 'additional', + 'tags', + 'here', + 'complex', + 'metadata', + 'all-types' + ] + }, + content: '= Complex Metadata Document\n' + + 'Jane Smith \n' + + '2.0, 2024-02-20, Alexandria Complex\n' + + ':summary: This is a complex document with all metadata types\n' + + ':description: Alternative description field\n' + + ':keywords: complex, metadata, all-types\n' + + ':tags: additional, tags, here\n' + + ':author: Override Author\n' + + ':author: Third Author\n' + + ':version: 3.0\n' + + ':published_on: 2024-03-01\n' + + ':published_by: Alexandria Complex\n' + + ':type: book\n' + + ':image: https://example.com/cover.jpg\n' + + ':isbn: 978-0-123456-78-9\n' + + ':source: https://github.com/alexandria/complex\n' + + ':auto-update: yes\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Section with Complex Metadata\n' + + ':author: Section Author\n' + + ':author: Section Co-Author\n' + + ':summary: This section has complex metadata\n' + + ':description: Alternative description for section\n' + + ':keywords: section, complex, metadata\n' + + ':tags: section, tags\n' + + ':type: chapter\n' + + ':image: https://example.com/section-image.jpg\n' + + '\n' + + 'This is the section content.', + sections: [ + { + metadata: [Object], + content: 'This is the section content.', + title: 'Section with Complex Metadata' + } + ] +} +Index event: { + documentTitle: 'Complex Metadata Document', + indexDTag: 'complex-metadata-document' +} +Creating section 0: { + title: 'Section with Complex Metadata', + dTag: 'complex-metadata-document-section-with-complex-metadata', + content: 'This is the section content.', + metadata: { + title: 'Section with Complex Metadata', + authors: [ 'Section Author', 'Section Co-Author' ], + summary: 'This section has complex metadata Alternative description for section', + type: 'chapter', + coverImage: 'https://example.com/section-image.jpg', + tags: [ 'section', 'tags', 'complex', 'metadata' ] + } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'complex' ], + [ 'title', 'Complex Metadata Document' ], + [ 'author', 'Jane Smith' ], + [ 'author', 'Override Author' ], + [ 'author', 'Third Author' ], + [ 'author', 'Section Author' ], + [ 'author', 'Section Co-Author' ], + [ 'version', '2.0' ], + [ 'published_on', '2024-03-01' ], + [ 'published_by', 'Alexandria Complex' ], + [ + 'summary', + 'This is a complex document with all metadata types Alternative description field' + ], + [ 'image', 'https://example.com/cover.jpg' ], + [ 'i', '978-0-123456-78-9' ], + [ 'source', 'https://github.com/alexandria/complex' ], + [ 'type', 'book' ], + [ 'auto-update', 'yes' ], + [ 't', 'additional' ], + [ 't', 'tags' ], + [ 't', 'here' ], + [ 't', 'complex' ], + [ 't', 'metadata' ], + [ 't', 'all-types' ], + [ 'd', 'complex-metadata-document' ], + [ 'title', 'Complex Metadata Document' ], + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with only title and no sections +Parsed AsciiDoc: { + metadata: { + title: 'Document with No Sections', + version: 'Version', + summary: 'This document has no sections' + }, + content: '= Document with No Sections\n' + + ':summary: This document has no sections\n' + + '\n' + + 'This is just preamble content.', + sections: [] +} +Index event: { + documentTitle: 'Document with No Sections', + indexDTag: 'document-with-no-sections' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with No Sections' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has no sections' ], + [ 'd', 'document-with-no-sections' ], + [ 'title', 'Document with No Sections' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with special characters in title +Parsed AsciiDoc: { + metadata: { + title: 'Document with Special Characters: Test & More!', + version: 'Version', + summary: 'This document has special characters in the title' + }, + content: '= Document with Special Characters: Test & More!\n' + + ':summary: This document has special characters in the title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'Document with Special Characters: Test & More!', + indexDTag: 'document-with-special-characters-test-more' +} +Creating section 0: { + title: 'Section 1', + dTag: 'document-with-special-characters-test-more-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with Special Characters: Test & More!' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has special characters in the title' ], + [ 'd', 'document-with-special-characters-test-more' ], + [ 'title', 'Document with Special Characters: Test & More!' ], + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with very long title +Parsed AsciiDoc: { + metadata: { + title: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + version: 'Version', + summary: 'This document has a very long title' + }, + content: '= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality\n' + + ':summary: This document has a very long title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + indexDTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' +} +Creating section 0: { + title: 'Section 1', + dTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ 'version', 'Version' ], + [ 'summary', 'This document has a very long title' ], + [ + 'd', + 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' + ], + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ✓ tests/unit/eventInput30040.test.ts (14 tests) 333ms + +⎯⎯⎯⎯⎯⎯⎯ Failed Tests 2 ⎯⎯⎯⎯⎯⎯⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:44:30 + 42| + 43| expect(metadata.title).toBe("Test Document with Metadata"); + 44| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 45| expect(metadata.version).toBe("1.0"); + 46| expect(metadata.publicationDate).toBe("2024-01-15"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/2]⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:318:32 + 316| // Should extract document-level metadata + 317| expect(metadata.title).toBe("Test Document"); + 318| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 319| expect(metadata.version).toBe("1.0"); + 320| expect(metadata.publishedBy).toBe("Alexandria Test"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/2]⎯ + + + Test Files 1 failed | 1 passed (2) + Tests 2 failed | 28 passed (30) + Start at 13:18:57 + Duration 1.00s + + FAIL Tests failed. Watching for file changes... + press h to show help, press q to quit +c RERUN src/lib/utils/asciidoc_metadata.ts + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure with Preamble > should build 30040 event set with preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document with Preamble', + authors: [ 'John Doe', 'Section Author' ], + version: '1.0', + publicationDate: '2024-01-15, Alexandria Test', + summary: 'This is a test document with preamble', + tags: [ 'test', 'preamble', 'asciidoc' ] + }, + content: '= Test Document with Preamble\n' + + 'John Doe \n' + + '1.0, 2024-01-15, Alexandria Test\n' + + ':summary: This is a test document with preamble\n' + + ':keywords: test, preamble, asciidoc\n' + + '\n' + + 'This is the preamble content that should be included.\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document with Preamble', + indexDTag: 'test-document-with-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-with-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-with-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document with Preamble' ], + [ 'author', 'John Doe' ], + [ 'author', 'Section Author' ], + [ 'version', '1.0' ], + [ 'published_on', '2024-01-15, Alexandria Test' ], + [ 'summary', 'This is a test document with preamble' ], + [ 't', 'test' ], + [ 't', 'preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-with-preamble' ], + [ 'title', 'Test Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure without Preamble > should build 30040 event set without preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document without Preamble', + authors: [ 'Section Author' ], + version: 'Version', + summary: 'This is a test document without preamble', + tags: [ 'test', 'no-preamble', 'asciidoc' ] + }, + content: '= Test Document without Preamble\n' + + ':summary: This is a test document without preamble\n' + + ':keywords: test, no-preamble, asciidoc\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document without Preamble', + indexDTag: 'test-document-without-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-without-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-without-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document without Preamble' ], + [ 'author', 'Section Author' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a test document without preamble' ], + [ 't', 'test' ], + [ 't', 'no-preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-without-preamble' ], + [ 'title', 'Test Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ❯ tests/unit/metadataExtraction.test.ts (16 tests | 2 failed) 228ms + × AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly 129ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract section metadata correctly 8ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract standalone author names and remove them from content 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should handle multiple standalone author names 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should not extract non-author lines as authors 4ms + ✓ AsciiDoc Metadata Extraction > parseAsciiDocWithMetadata should parse complete document 21ms + ✓ AsciiDoc Metadata Extraction > metadataToTags should convert metadata to Nostr tags 2ms + ✓ AsciiDoc Metadata Extraction > should handle index card format correctly 5ms + ✓ AsciiDoc Metadata Extraction > should handle empty content gracefully 4ms + ✓ AsciiDoc Metadata Extraction > should handle keywords as tags 4ms + ✓ AsciiDoc Metadata Extraction > should handle both tags and keywords 5ms + ✓ AsciiDoc Metadata Extraction > should handle tags only 4ms + ✓ AsciiDoc Metadata Extraction > should handle both summary and description 8ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle section-only content correctly 10ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle minimal document header (just title) correctly 1ms + × AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly 9ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure with Preamble > should build 30040 event set with skeleton structure and preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document with Preamble', + version: 'Version', + summary: 'This is a skeleton document with preamble', + tags: [ 'skeleton', 'preamble', 'empty' ] + }, + content: '= Skeleton Document with Preamble\n' + + ':summary: This is a skeleton document with preamble\n' + + ':keywords: skeleton, preamble, empty\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document with Preamble', + indexDTag: 'skeleton-document-with-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-with-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-with-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-with-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document with preamble' ], + [ 't', 'skeleton' ], + [ 't', 'preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-with-preamble' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure without Preamble > should build 30040 event set with skeleton structure without preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document without Preamble', + version: 'Version', + summary: 'This is a skeleton document without preamble', + tags: [ 'skeleton', 'no-preamble', 'empty' ] + }, + content: '= Skeleton Document without Preamble\n' + + ':summary: This is a skeleton document without preamble\n' + + ':keywords: skeleton, no-preamble, empty\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document without Preamble', + indexDTag: 'skeleton-document-without-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-without-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-without-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-without-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document without preamble' ], + [ 't', 'skeleton' ], + [ 't', 'no-preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-without-preamble' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card format +Parsed AsciiDoc: { + metadata: { title: 'Test Index Card', version: 'Version' }, + content: '= Test Index Card\nindex card', + sections: [] +} +Creating index card format (no sections) + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card with metadata +Parsed AsciiDoc: { + metadata: { + title: 'Test Index Card with Metadata', + version: 'Version', + summary: 'This is an index card with metadata', + tags: [ 'index', 'card', 'metadata' ] + }, + content: '= Test Index Card with Metadata\n' + + ':summary: This is an index card with metadata\n' + + ':keywords: index, card, metadata\n' + + 'index card', + sections: [] +} +Index event: { + documentTitle: 'Test Index Card with Metadata', + indexDTag: 'test-index-card-with-metadata' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'index-card' ], + [ 'title', 'Test Index Card with Metadata' ], + [ 'version', 'Version' ], + [ 'summary', 'This is an index card with metadata' ], + [ 't', 'index' ], + [ 't', 'card' ], + [ 't', 'metadata' ], + [ 'd', 'test-index-card-with-metadata' ], + [ 'title', 'Test Index Card with Metadata' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Complex Metadata Structures > should handle complex metadata with all attribute types +Parsed AsciiDoc: { + metadata: { + title: 'Complex Metadata Document', + authors: [ + 'Jane Smith', + 'Override Author', + 'Third Author', + 'Section Author', + 'Section Co-Author' + ], + version: '2.0', + publicationDate: '2024-03-01', + summary: 'This is a complex document with all metadata types Alternative description field', + publishedBy: 'Alexandria Complex', + type: 'book', + coverImage: 'https://example.com/cover.jpg', + isbn: '978-0-123456-78-9', + source: 'https://github.com/alexandria/complex', + autoUpdate: 'yes', + tags: [ + 'additional', + 'tags', + 'here', + 'complex', + 'metadata', + 'all-types' + ] + }, + content: '= Complex Metadata Document\n' + + 'Jane Smith \n' + + '2.0, 2024-02-20, Alexandria Complex\n' + + ':summary: This is a complex document with all metadata types\n' + + ':description: Alternative description field\n' + + ':keywords: complex, metadata, all-types\n' + + ':tags: additional, tags, here\n' + + ':author: Override Author\n' + + ':author: Third Author\n' + + ':version: 3.0\n' + + ':published_on: 2024-03-01\n' + + ':published_by: Alexandria Complex\n' + + ':type: book\n' + + ':image: https://example.com/cover.jpg\n' + + ':isbn: 978-0-123456-78-9\n' + + ':source: https://github.com/alexandria/complex\n' + + ':auto-update: yes\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Section with Complex Metadata\n' + + ':author: Section Author\n' + + ':author: Section Co-Author\n' + + ':summary: This section has complex metadata\n' + + ':description: Alternative description for section\n' + + ':keywords: section, complex, metadata\n' + + ':tags: section, tags\n' + + ':type: chapter\n' + + ':image: https://example.com/section-image.jpg\n' + + '\n' + + 'This is the section content.', + sections: [ + { + metadata: [Object], + content: 'This is the section content.', + title: 'Section with Complex Metadata' + } + ] +} +Index event: { + documentTitle: 'Complex Metadata Document', + indexDTag: 'complex-metadata-document' +} +Creating section 0: { + title: 'Section with Complex Metadata', + dTag: 'complex-metadata-document-section-with-complex-metadata', + content: 'This is the section content.', + metadata: { + title: 'Section with Complex Metadata', + authors: [ 'Section Author', 'Section Co-Author' ], + summary: 'This section has complex metadata Alternative description for section', + type: 'chapter', + coverImage: 'https://example.com/section-image.jpg', + tags: [ 'section', 'tags', 'complex', 'metadata' ] + } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'complex' ], + [ 'title', 'Complex Metadata Document' ], + [ 'author', 'Jane Smith' ], + [ 'author', 'Override Author' ], + [ 'author', 'Third Author' ], + [ 'author', 'Section Author' ], + [ 'author', 'Section Co-Author' ], + [ 'version', '2.0' ], + [ 'published_on', '2024-03-01' ], + [ 'published_by', 'Alexandria Complex' ], + [ + 'summary', + 'This is a complex document with all metadata types Alternative description field' + ], + [ 'image', 'https://example.com/cover.jpg' ], + [ 'i', '978-0-123456-78-9' ], + [ 'source', 'https://github.com/alexandria/complex' ], + [ 'type', 'book' ], + [ 'auto-update', 'yes' ], + [ 't', 'additional' ], + [ 't', 'tags' ], + [ 't', 'here' ], + [ 't', 'complex' ], + [ 't', 'metadata' ], + [ 't', 'all-types' ], + [ 'd', 'complex-metadata-document' ], + [ 'title', 'Complex Metadata Document' ], + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with only title and no sections +Parsed AsciiDoc: { + metadata: { + title: 'Document with No Sections', + version: 'Version', + summary: 'This document has no sections' + }, + content: '= Document with No Sections\n' + + ':summary: This document has no sections\n' + + '\n' + + 'This is just preamble content.', + sections: [] +} +Index event: { + documentTitle: 'Document with No Sections', + indexDTag: 'document-with-no-sections' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with No Sections' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has no sections' ], + [ 'd', 'document-with-no-sections' ], + [ 'title', 'Document with No Sections' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with special characters in title +Parsed AsciiDoc: { + metadata: { + title: 'Document with Special Characters: Test & More!', + version: 'Version', + summary: 'This document has special characters in the title' + }, + content: '= Document with Special Characters: Test & More!\n' + + ':summary: This document has special characters in the title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'Document with Special Characters: Test & More!', + indexDTag: 'document-with-special-characters-test-more' +} +Creating section 0: { + title: 'Section 1', + dTag: 'document-with-special-characters-test-more-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with Special Characters: Test & More!' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has special characters in the title' ], + [ 'd', 'document-with-special-characters-test-more' ], + [ 'title', 'Document with Special Characters: Test & More!' ], + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with very long title +Parsed AsciiDoc: { + metadata: { + title: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + version: 'Version', + summary: 'This document has a very long title' + }, + content: '= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality\n' + + ':summary: This document has a very long title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + indexDTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' +} +Creating section 0: { + title: 'Section 1', + dTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ 'version', 'Version' ], + [ 'summary', 'This document has a very long title' ], + [ + 'd', + 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' + ], + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ✓ tests/unit/eventInput30040.test.ts (14 tests) 424ms + +⎯⎯⎯⎯⎯⎯⎯ Failed Tests 2 ⎯⎯⎯⎯⎯⎯⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:44:30 + 42| + 43| expect(metadata.title).toBe("Test Document with Metadata"); + 44| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 45| expect(metadata.version).toBe("1.0"); + 46| expect(metadata.publicationDate).toBe("2024-01-15"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/2]⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:318:32 + 316| // Should extract document-level metadata + 317| expect(metadata.title).toBe("Test Document"); + 318| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 319| expect(metadata.version).toBe("1.0"); + 320| expect(metadata.publishedBy).toBe("Alexandria Test"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/2]⎯ + + + Test Files 1 failed | 1 passed (2) + Tests 2 failed | 28 passed (30) + Start at 13:19:15 + Duration 1.04s + + FAIL Tests failed. Watching for file changes... + press h to show help, press q to quit +c RERUN src/lib/utils/asciidoc_metadata.ts + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure with Preamble > should build 30040 event set with preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document with Preamble', + authors: [ 'John Doe', 'Section Author' ], + version: '1.0', + publicationDate: '2024-01-15, Alexandria Test', + summary: 'This is a test document with preamble', + tags: [ 'test', 'preamble', 'asciidoc' ] + }, + content: '= Test Document with Preamble\n' + + 'John Doe \n' + + '1.0, 2024-01-15, Alexandria Test\n' + + ':summary: This is a test document with preamble\n' + + ':keywords: test, preamble, asciidoc\n' + + '\n' + + 'This is the preamble content that should be included.\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document with Preamble', + indexDTag: 'test-document-with-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-with-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-with-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document with Preamble' ], + [ 'author', 'John Doe' ], + [ 'author', 'Section Author' ], + [ 'version', '1.0' ], + [ 'published_on', '2024-01-15, Alexandria Test' ], + [ 'summary', 'This is a test document with preamble' ], + [ 't', 'test' ], + [ 't', 'preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-with-preamble' ], + [ 'title', 'Test Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure without Preamble > should build 30040 event set without preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document without Preamble', + authors: [ 'Section Author' ], + version: 'Version', + summary: 'This is a test document without preamble', + tags: [ 'test', 'no-preamble', 'asciidoc' ] + }, + content: '= Test Document without Preamble\n' + + ':summary: This is a test document without preamble\n' + + ':keywords: test, no-preamble, asciidoc\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document without Preamble', + indexDTag: 'test-document-without-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-without-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-without-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document without Preamble' ], + [ 'author', 'Section Author' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a test document without preamble' ], + [ 't', 'test' ], + [ 't', 'no-preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-without-preamble' ], + [ 'title', 'Test Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure with Preamble > should build 30040 event set with skeleton structure and preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document with Preamble', + version: 'Version', + summary: 'This is a skeleton document with preamble', + tags: [ 'skeleton', 'preamble', 'empty' ] + }, + content: '= Skeleton Document with Preamble\n' + + ':summary: This is a skeleton document with preamble\n' + + ':keywords: skeleton, preamble, empty\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document with Preamble', + indexDTag: 'skeleton-document-with-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-with-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-with-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-with-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document with preamble' ], + [ 't', 'skeleton' ], + [ 't', 'preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-with-preamble' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure without Preamble > should build 30040 event set with skeleton structure without preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document without Preamble', + version: 'Version', + summary: 'This is a skeleton document without preamble', + tags: [ 'skeleton', 'no-preamble', 'empty' ] + }, + content: '= Skeleton Document without Preamble\n' + + ':summary: This is a skeleton document without preamble\n' + + ':keywords: skeleton, no-preamble, empty\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document without Preamble', + indexDTag: 'skeleton-document-without-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-without-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-without-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-without-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document without preamble' ], + [ 't', 'skeleton' ], + [ 't', 'no-preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-without-preamble' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ❯ tests/unit/metadataExtraction.test.ts (16 tests | 2 failed) 246ms + × AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly 154ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract section metadata correctly 8ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract standalone author names and remove them from content 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should handle multiple standalone author names 5ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should not extract non-author lines as authors 4ms + ✓ AsciiDoc Metadata Extraction > parseAsciiDocWithMetadata should parse complete document 21ms + ✓ AsciiDoc Metadata Extraction > metadataToTags should convert metadata to Nostr tags 2ms + ✓ AsciiDoc Metadata Extraction > should handle index card format correctly 4ms + ✓ AsciiDoc Metadata Extraction > should handle empty content gracefully 4ms + ✓ AsciiDoc Metadata Extraction > should handle keywords as tags 5ms + ✓ AsciiDoc Metadata Extraction > should handle both tags and keywords 4ms + ✓ AsciiDoc Metadata Extraction > should handle tags only 4ms + ✓ AsciiDoc Metadata Extraction > should handle both summary and description 7ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle section-only content correctly 8ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle minimal document header (just title) correctly 1ms + × AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly 7ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card format +Parsed AsciiDoc: { + metadata: { title: 'Test Index Card', version: 'Version' }, + content: '= Test Index Card\nindex card', + sections: [] +} +Creating index card format (no sections) + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card with metadata +Parsed AsciiDoc: { + metadata: { + title: 'Test Index Card with Metadata', + version: 'Version', + summary: 'This is an index card with metadata', + tags: [ 'index', 'card', 'metadata' ] + }, + content: '= Test Index Card with Metadata\n' + + ':summary: This is an index card with metadata\n' + + ':keywords: index, card, metadata\n' + + 'index card', + sections: [] +} +Index event: { + documentTitle: 'Test Index Card with Metadata', + indexDTag: 'test-index-card-with-metadata' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'index-card' ], + [ 'title', 'Test Index Card with Metadata' ], + [ 'version', 'Version' ], + [ 'summary', 'This is an index card with metadata' ], + [ 't', 'index' ], + [ 't', 'card' ], + [ 't', 'metadata' ], + [ 'd', 'test-index-card-with-metadata' ], + [ 'title', 'Test Index Card with Metadata' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Complex Metadata Structures > should handle complex metadata with all attribute types +Parsed AsciiDoc: { + metadata: { + title: 'Complex Metadata Document', + authors: [ + 'Jane Smith', + 'Override Author', + 'Third Author', + 'Section Author', + 'Section Co-Author' + ], + version: '2.0', + publicationDate: '2024-03-01', + summary: 'This is a complex document with all metadata types Alternative description field', + publishedBy: 'Alexandria Complex', + type: 'book', + coverImage: 'https://example.com/cover.jpg', + isbn: '978-0-123456-78-9', + source: 'https://github.com/alexandria/complex', + autoUpdate: 'yes', + tags: [ + 'additional', + 'tags', + 'here', + 'complex', + 'metadata', + 'all-types' + ] + }, + content: '= Complex Metadata Document\n' + + 'Jane Smith \n' + + '2.0, 2024-02-20, Alexandria Complex\n' + + ':summary: This is a complex document with all metadata types\n' + + ':description: Alternative description field\n' + + ':keywords: complex, metadata, all-types\n' + + ':tags: additional, tags, here\n' + + ':author: Override Author\n' + + ':author: Third Author\n' + + ':version: 3.0\n' + + ':published_on: 2024-03-01\n' + + ':published_by: Alexandria Complex\n' + + ':type: book\n' + + ':image: https://example.com/cover.jpg\n' + + ':isbn: 978-0-123456-78-9\n' + + ':source: https://github.com/alexandria/complex\n' + + ':auto-update: yes\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Section with Complex Metadata\n' + + ':author: Section Author\n' + + ':author: Section Co-Author\n' + + ':summary: This section has complex metadata\n' + + ':description: Alternative description for section\n' + + ':keywords: section, complex, metadata\n' + + ':tags: section, tags\n' + + ':type: chapter\n' + + ':image: https://example.com/section-image.jpg\n' + + '\n' + + 'This is the section content.', + sections: [ + { + metadata: [Object], + content: 'This is the section content.', + title: 'Section with Complex Metadata' + } + ] +} +Index event: { + documentTitle: 'Complex Metadata Document', + indexDTag: 'complex-metadata-document' +} +Creating section 0: { + title: 'Section with Complex Metadata', + dTag: 'complex-metadata-document-section-with-complex-metadata', + content: 'This is the section content.', + metadata: { + title: 'Section with Complex Metadata', + authors: [ 'Section Author', 'Section Co-Author' ], + summary: 'This section has complex metadata Alternative description for section', + type: 'chapter', + coverImage: 'https://example.com/section-image.jpg', + tags: [ 'section', 'tags', 'complex', 'metadata' ] + } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'complex' ], + [ 'title', 'Complex Metadata Document' ], + [ 'author', 'Jane Smith' ], + [ 'author', 'Override Author' ], + [ 'author', 'Third Author' ], + [ 'author', 'Section Author' ], + [ 'author', 'Section Co-Author' ], + [ 'version', '2.0' ], + [ 'published_on', '2024-03-01' ], + [ 'published_by', 'Alexandria Complex' ], + [ + 'summary', + 'This is a complex document with all metadata types Alternative description field' + ], + [ 'image', 'https://example.com/cover.jpg' ], + [ 'i', '978-0-123456-78-9' ], + [ 'source', 'https://github.com/alexandria/complex' ], + [ 'type', 'book' ], + [ 'auto-update', 'yes' ], + [ 't', 'additional' ], + [ 't', 'tags' ], + [ 't', 'here' ], + [ 't', 'complex' ], + [ 't', 'metadata' ], + [ 't', 'all-types' ], + [ 'd', 'complex-metadata-document' ], + [ 'title', 'Complex Metadata Document' ], + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with only title and no sections +Parsed AsciiDoc: { + metadata: { + title: 'Document with No Sections', + version: 'Version', + summary: 'This document has no sections' + }, + content: '= Document with No Sections\n' + + ':summary: This document has no sections\n' + + '\n' + + 'This is just preamble content.', + sections: [] +} +Index event: { + documentTitle: 'Document with No Sections', + indexDTag: 'document-with-no-sections' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with No Sections' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has no sections' ], + [ 'd', 'document-with-no-sections' ], + [ 'title', 'Document with No Sections' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with special characters in title +Parsed AsciiDoc: { + metadata: { + title: 'Document with Special Characters: Test & More!', + version: 'Version', + summary: 'This document has special characters in the title' + }, + content: '= Document with Special Characters: Test & More!\n' + + ':summary: This document has special characters in the title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'Document with Special Characters: Test & More!', + indexDTag: 'document-with-special-characters-test-more' +} +Creating section 0: { + title: 'Section 1', + dTag: 'document-with-special-characters-test-more-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with Special Characters: Test & More!' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has special characters in the title' ], + [ 'd', 'document-with-special-characters-test-more' ], + [ 'title', 'Document with Special Characters: Test & More!' ], + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with very long title +Parsed AsciiDoc: { + metadata: { + title: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + version: 'Version', + summary: 'This document has a very long title' + }, + content: '= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality\n' + + ':summary: This document has a very long title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + indexDTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' +} +Creating section 0: { + title: 'Section 1', + dTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ 'version', 'Version' ], + [ 'summary', 'This document has a very long title' ], + [ + 'd', + 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' + ], + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ✓ tests/unit/eventInput30040.test.ts (14 tests) 374ms + +⎯⎯⎯⎯⎯⎯⎯ Failed Tests 2 ⎯⎯⎯⎯⎯⎯⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:44:30 + 42| + 43| expect(metadata.title).toBe("Test Document with Metadata"); + 44| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 45| expect(metadata.version).toBe("1.0"); + 46| expect(metadata.publicationDate).toBe("2024-01-15"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/2]⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:318:32 + 316| // Should extract document-level metadata + 317| expect(metadata.title).toBe("Test Document"); + 318| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 319| expect(metadata.version).toBe("1.0"); + 320| expect(metadata.publishedBy).toBe("Alexandria Test"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/2]⎯ + + + Test Files 1 failed | 1 passed (2) + Tests 2 failed | 28 passed (30) + Start at 13:20:56 + Duration 1.17s + + FAIL Tests failed. Watching for file changes... + press h to show help, press q to quit +c RERUN src/lib/utils/asciidoc_metadata.ts + + ❯ tests/unit/metadataExtraction.test.ts (16 tests | 2 failed) 270ms + × AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly 158ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract section metadata correctly 8ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should extract standalone author names and remove them from content 6ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should handle multiple standalone author names 6ms + ✓ AsciiDoc Metadata Extraction > extractSectionMetadata should not extract non-author lines as authors 6ms + ✓ AsciiDoc Metadata Extraction > parseAsciiDocWithMetadata should parse complete document 26ms + ✓ AsciiDoc Metadata Extraction > metadataToTags should convert metadata to Nostr tags 2ms + ✓ AsciiDoc Metadata Extraction > should handle index card format correctly 5ms + ✓ AsciiDoc Metadata Extraction > should handle empty content gracefully 5ms + ✓ AsciiDoc Metadata Extraction > should handle keywords as tags 5ms + ✓ AsciiDoc Metadata Extraction > should handle both tags and keywords 4ms + ✓ AsciiDoc Metadata Extraction > should handle tags only 4ms + ✓ AsciiDoc Metadata Extraction > should handle both summary and description 9ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle section-only content correctly 9ms + ✓ AsciiDoc Metadata Extraction > Smart metadata extraction > should handle minimal document header (just title) correctly 1ms + × AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly 11ms + → expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure with Preamble > should build 30040 event set with preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document with Preamble', + authors: [ 'John Doe', 'Section Author' ], + version: '1.0', + publicationDate: '2024-01-15, Alexandria Test', + summary: 'This is a test document with preamble', + tags: [ 'test', 'preamble', 'asciidoc' ] + }, + content: '= Test Document with Preamble\n' + + 'John Doe \n' + + '1.0, 2024-01-15, Alexandria Test\n' + + ':summary: This is a test document with preamble\n' + + ':keywords: test, preamble, asciidoc\n' + + '\n' + + 'This is the preamble content that should be included.\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document with Preamble', + indexDTag: 'test-document-with-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-with-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-with-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document with Preamble' ], + [ 'author', 'John Doe' ], + [ 'author', 'Section Author' ], + [ 'version', '1.0' ], + [ 'published_on', '2024-01-15, Alexandria Test' ], + [ 'summary', 'This is a test document with preamble' ], + [ 't', 'test' ], + [ 't', 'preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-with-preamble' ], + [ 'title', 'Test Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-with-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Normal Structure without Preamble > should build 30040 event set without preamble content +Parsed AsciiDoc: { + metadata: { + title: 'Test Document without Preamble', + authors: [ 'Section Author' ], + version: 'Version', + summary: 'This is a test document without preamble', + tags: [ 'test', 'no-preamble', 'asciidoc' ] + }, + content: '= Test Document without Preamble\n' + + ':summary: This is a test document without preamble\n' + + ':keywords: test, no-preamble, asciidoc\n' + + '\n' + + '== First Section\n' + + ':author: Section Author\n' + + ':summary: This is the first section\n' + + '\n' + + 'This is the content of the first section.\n' + + '\n' + + '== Second Section\n' + + ':summary: This is the second section\n' + + '\n' + + 'This is the content of the second section.', + sections: [ + { + metadata: [Object], + content: 'This is the content of the first section.', + title: 'First Section' + }, + { + metadata: [Object], + content: 'This is the content of the second section.', + title: 'Second Section' + } + ] +} +Index event: { + documentTitle: 'Test Document without Preamble', + indexDTag: 'test-document-without-preamble' +} +Creating section 0: { + title: 'First Section', + dTag: 'test-document-without-preamble-first-section', + content: 'This is the content of the first section.', + metadata: { + title: 'First Section', + authors: [ 'Section Author' ], + summary: 'This is the first section' + } +} +Creating section 1: { + title: 'Second Section', + dTag: 'test-document-without-preamble-second-section', + content: 'This is the content of the second section.', + metadata: { title: 'Second Section', summary: 'This is the second section' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'article' ], + [ 'title', 'Test Document without Preamble' ], + [ 'author', 'Section Author' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a test document without preamble' ], + [ 't', 'test' ], + [ 't', 'no-preamble' ], + [ 't', 'asciidoc' ], + [ 'd', 'test-document-without-preamble' ], + [ 'title', 'Test Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-first-section' + ], + [ + 'a', + '30041:test-pubkey:test-document-without-preamble-second-section' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure with Preamble > should build 30040 event set with skeleton structure and preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document with Preamble', + version: 'Version', + summary: 'This is a skeleton document with preamble', + tags: [ 'skeleton', 'preamble', 'empty' ] + }, + content: '= Skeleton Document with Preamble\n' + + ':summary: This is a skeleton document with preamble\n' + + ':keywords: skeleton, preamble, empty\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document with Preamble', + indexDTag: 'skeleton-document-with-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-with-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-with-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-with-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document with preamble' ], + [ 't', 'skeleton' ], + [ 't', 'preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-with-preamble' ], + [ 'title', 'Skeleton Document with Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-with-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Skeleton Structure without Preamble > should build 30040 event set with skeleton structure without preamble +Parsed AsciiDoc: { + metadata: { + title: 'Skeleton Document without Preamble', + version: 'Version', + summary: 'This is a skeleton document without preamble', + tags: [ 'skeleton', 'no-preamble', 'empty' ] + }, + content: '= Skeleton Document without Preamble\n' + + ':summary: This is a skeleton document without preamble\n' + + ':keywords: skeleton, no-preamble, empty\n' + + '\n' + + '== Empty Section 1\n' + + '\n' + + '== Empty Section 2\n' + + '\n' + + '== Empty Section 3', + sections: [ + { metadata: [Object], content: '', title: 'Empty Section 1' }, + { metadata: [Object], content: '', title: 'Empty Section 2' }, + { metadata: [Object], content: '', title: 'Empty Section 3' } + ] +} +Index event: { + documentTitle: 'Skeleton Document without Preamble', + indexDTag: 'skeleton-document-without-preamble' +} +Creating section 0: { + title: 'Empty Section 1', + dTag: 'skeleton-document-without-preamble-empty-section-1', + content: '', + metadata: { title: 'Empty Section 1' } +} +Creating section 1: { + title: 'Empty Section 2', + dTag: 'skeleton-document-without-preamble-empty-section-2', + content: '', + metadata: { title: 'Empty Section 2' } +} +Creating section 2: { + title: 'Empty Section 3', + dTag: 'skeleton-document-without-preamble-empty-section-3', + content: '', + metadata: { title: 'Empty Section 3' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'skeleton' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ 'version', 'Version' ], + [ 'summary', 'This is a skeleton document without preamble' ], + [ 't', 'skeleton' ], + [ 't', 'no-preamble' ], + [ 't', 'empty' ], + [ 'd', 'skeleton-document-without-preamble' ], + [ 'title', 'Skeleton Document without Preamble' ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-1' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-2' + ], + [ + 'a', + '30041:test-pubkey:skeleton-document-without-preamble-empty-section-3' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card format +Parsed AsciiDoc: { + metadata: { title: 'Test Index Card', version: 'Version' }, + content: '= Test Index Card\nindex card', + sections: [] +} +Creating index card format (no sections) + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Index Card Format > should build 30040 event set for index card with metadata +Parsed AsciiDoc: { + metadata: { + title: 'Test Index Card with Metadata', + version: 'Version', + summary: 'This is an index card with metadata', + tags: [ 'index', 'card', 'metadata' ] + }, + content: '= Test Index Card with Metadata\n' + + ':summary: This is an index card with metadata\n' + + ':keywords: index, card, metadata\n' + + 'index card', + sections: [] +} +Index event: { + documentTitle: 'Test Index Card with Metadata', + indexDTag: 'test-index-card-with-metadata' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'index-card' ], + [ 'title', 'Test Index Card with Metadata' ], + [ 'version', 'Version' ], + [ 'summary', 'This is an index card with metadata' ], + [ 't', 'index' ], + [ 't', 'card' ], + [ 't', 'metadata' ], + [ 'd', 'test-index-card-with-metadata' ], + [ 'title', 'Test Index Card with Metadata' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Complex Metadata Structures > should handle complex metadata with all attribute types +Parsed AsciiDoc: { + metadata: { + title: 'Complex Metadata Document', + authors: [ + 'Jane Smith', + 'Override Author', + 'Third Author', + 'Section Author', + 'Section Co-Author' + ], + version: '2.0', + publicationDate: '2024-03-01', + summary: 'This is a complex document with all metadata types Alternative description field', + publishedBy: 'Alexandria Complex', + type: 'book', + coverImage: 'https://example.com/cover.jpg', + isbn: '978-0-123456-78-9', + source: 'https://github.com/alexandria/complex', + autoUpdate: 'yes', + tags: [ + 'additional', + 'tags', + 'here', + 'complex', + 'metadata', + 'all-types' + ] + }, + content: '= Complex Metadata Document\n' + + 'Jane Smith \n' + + '2.0, 2024-02-20, Alexandria Complex\n' + + ':summary: This is a complex document with all metadata types\n' + + ':description: Alternative description field\n' + + ':keywords: complex, metadata, all-types\n' + + ':tags: additional, tags, here\n' + + ':author: Override Author\n' + + ':author: Third Author\n' + + ':version: 3.0\n' + + ':published_on: 2024-03-01\n' + + ':published_by: Alexandria Complex\n' + + ':type: book\n' + + ':image: https://example.com/cover.jpg\n' + + ':isbn: 978-0-123456-78-9\n' + + ':source: https://github.com/alexandria/complex\n' + + ':auto-update: yes\n' + + '\n' + + 'This is the preamble content.\n' + + '\n' + + '== Section with Complex Metadata\n' + + ':author: Section Author\n' + + ':author: Section Co-Author\n' + + ':summary: This section has complex metadata\n' + + ':description: Alternative description for section\n' + + ':keywords: section, complex, metadata\n' + + ':tags: section, tags\n' + + ':type: chapter\n' + + ':image: https://example.com/section-image.jpg\n' + + '\n' + + 'This is the section content.', + sections: [ + { + metadata: [Object], + content: 'This is the section content.', + title: 'Section with Complex Metadata' + } + ] +} +Index event: { + documentTitle: 'Complex Metadata Document', + indexDTag: 'complex-metadata-document' +} +Creating section 0: { + title: 'Section with Complex Metadata', + dTag: 'complex-metadata-document-section-with-complex-metadata', + content: 'This is the section content.', + metadata: { + title: 'Section with Complex Metadata', + authors: [ 'Section Author', 'Section Co-Author' ], + summary: 'This section has complex metadata Alternative description for section', + type: 'chapter', + coverImage: 'https://example.com/section-image.jpg', + tags: [ 'section', 'tags', 'complex', 'metadata' ] + } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'type', 'complex' ], + [ 'title', 'Complex Metadata Document' ], + [ 'author', 'Jane Smith' ], + [ 'author', 'Override Author' ], + [ 'author', 'Third Author' ], + [ 'author', 'Section Author' ], + [ 'author', 'Section Co-Author' ], + [ 'version', '2.0' ], + [ 'published_on', '2024-03-01' ], + [ 'published_by', 'Alexandria Complex' ], + [ + 'summary', + 'This is a complex document with all metadata types Alternative description field' + ], + [ 'image', 'https://example.com/cover.jpg' ], + [ 'i', '978-0-123456-78-9' ], + [ 'source', 'https://github.com/alexandria/complex' ], + [ 'type', 'book' ], + [ 'auto-update', 'yes' ], + [ 't', 'additional' ], + [ 't', 'tags' ], + [ 't', 'here' ], + [ 't', 'complex' ], + [ 't', 'metadata' ], + [ 't', 'all-types' ], + [ 'd', 'complex-metadata-document' ], + [ 'title', 'Complex Metadata Document' ], + [ + 'a', + '30041:test-pubkey:complex-metadata-document-section-with-complex-metadata' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with only title and no sections +Parsed AsciiDoc: { + metadata: { + title: 'Document with No Sections', + version: 'Version', + summary: 'This document has no sections' + }, + content: '= Document with No Sections\n' + + ':summary: This document has no sections\n' + + '\n' + + 'This is just preamble content.', + sections: [] +} +Index event: { + documentTitle: 'Document with No Sections', + indexDTag: 'document-with-no-sections' +} +A tags: [] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with No Sections' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has no sections' ], + [ 'd', 'document-with-no-sections' ], + [ 'title', 'Document with No Sections' ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with special characters in title +Parsed AsciiDoc: { + metadata: { + title: 'Document with Special Characters: Test & More!', + version: 'Version', + summary: 'This document has special characters in the title' + }, + content: '= Document with Special Characters: Test & More!\n' + + ':summary: This document has special characters in the title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'Document with Special Characters: Test & More!', + indexDTag: 'document-with-special-characters-test-more' +} +Creating section 0: { + title: 'Section 1', + dTag: 'document-with-special-characters-test-more-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ 'title', 'Document with Special Characters: Test & More!' ], + [ 'version', 'Version' ], + [ 'summary', 'This document has special characters in the title' ], + [ 'd', 'document-with-special-characters-test-more' ], + [ 'title', 'Document with Special Characters: Test & More!' ], + [ + 'a', + '30041:test-pubkey:document-with-special-characters-test-more-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + +stdout | tests/unit/eventInput30040.test.ts > EventInput 30040 Publishing > Edge Cases > should handle document with very long title +Parsed AsciiDoc: { + metadata: { + title: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + version: 'Version', + summary: 'This document has a very long title' + }, + content: '= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality\n' + + ':summary: This document has a very long title\n' + + '\n' + + '== Section 1\n' + + '\n' + + 'Content here.', + sections: [ + { + metadata: [Object], + content: 'Content here.', + title: 'Section 1' + } + ] +} +Index event: { + documentTitle: 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality', + indexDTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' +} +Creating section 0: { + title: 'Section 1', + dTag: 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1', + content: 'Content here.', + metadata: { title: 'Section 1' } +} +A tags: [ + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] +] +Final index event: { + kind: 30040, + content: '', + tags: [ + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ 'version', 'Version' ], + [ 'summary', 'This document has a very long title' ], + [ + 'd', + 'this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality' + ], + [ + 'title', + 'This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality' + ], + [ + 'a', + '30041:test-pubkey:this-is-a-very-long-document-title-that-should-be-handled-properly-by-the-system-and-should-not-cause-any-issues-with-the-d-tag-generation-or-any-other-functionality-section-1' + ] + ], + pubkey: 'test-pubkey', + created_at: 1234567890, + id: 'mock-event-id', + sig: 'mock-signature' +} +=== build30040EventSet completed === + + ✓ tests/unit/eventInput30040.test.ts (14 tests) 517ms + +⎯⎯⎯⎯⎯⎯⎯ Failed Tests 2 ⎯⎯⎯⎯⎯⎯⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > extractDocumentMetadata should extract document metadata correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:44:30 + 42| + 43| expect(metadata.title).toBe("Test Document with Metadata"); + 44| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 45| expect(metadata.version).toBe("1.0"); + 46| expect(metadata.publicationDate).toBe("2024-01-15"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[1/2]⎯ + + FAIL tests/unit/metadataExtraction.test.ts > AsciiDoc Metadata Extraction > Smart metadata extraction > should handle document with full header correctly +AssertionError: expected [ 'John Doe', 'Jane Smith', …(1) ] to deeply equal [ 'John Doe', 'Jane Smith' ] + +- Expected ++ Received + + [ + "John Doe", + "Jane Smith", ++ "Section Author", + ] + + ❯ tests/unit/metadataExtraction.test.ts:318:32 + 316| // Should extract document-level metadata + 317| expect(metadata.title).toBe("Test Document"); + 318| expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + | ^ + 319| expect(metadata.version).toBe("1.0"); + 320| expect(metadata.publishedBy).toBe("Alexandria Test"); + +⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯[2/2]⎯ + + + Test Files 1 failed | 1 passed (2) + Tests 2 failed | 28 passed (30) + Start at 13:21:50 + Duration 1.36s + + FAIL Tests failed. Watching for file changes... + press h to show help, press q to quit diff --git a/tests/e2e/my_notes_layout.pw.spec.ts b/tests/e2e/my_notes_layout.pw.spec.ts index 23db168..b45e403 100644 --- a/tests/e2e/my_notes_layout.pw.spec.ts +++ b/tests/e2e/my_notes_layout.pw.spec.ts @@ -1,4 +1,4 @@ -import { test, expect, type Page } from '@playwright/test'; +import { expect, type Page, test } from "@playwright/test"; // Utility to check for horizontal scroll bar async function hasHorizontalScroll(page: Page, selector: string) { @@ -9,16 +9,16 @@ async function hasHorizontalScroll(page: Page, selector: string) { }, selector); } -test.describe('My Notes Layout', () => { +test.describe("My Notes Layout", () => { test.beforeEach(async ({ page }) => { - await page.goto('/my-notes'); + await page.goto("/my-notes"); await page.waitForSelector('h1:text("My Notes")'); }); - test('no horizontal scroll bar for all tag type and tag filter combinations', async ({ page }) => { + test("no horizontal scroll bar for all tag type and tag filter combinations", async ({ page }) => { // Helper to check scroll for current state async function assertNoScroll() { - const hasScroll = await hasHorizontalScroll(page, 'main, body, html'); + const hasScroll = await hasHorizontalScroll(page, "main, body, html"); expect(hasScroll).toBeFalsy(); } @@ -26,9 +26,11 @@ test.describe('My Notes Layout', () => { await assertNoScroll(); // Get all tag type buttons - const tagTypeButtons = await page.locator('aside button').all(); + const tagTypeButtons = await page.locator("aside button").all(); // Only consider tag type buttons (first N) - const tagTypeCount = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-6 > button').count(); + const tagTypeCount = await page.locator( + "aside > div.flex.flex-wrap.gap-2.mb-6 > button", + ).count(); // For each single tag type for (let i = 0; i < tagTypeCount; i++) { // Click tag type button @@ -36,7 +38,9 @@ test.describe('My Notes Layout', () => { await page.waitForTimeout(100); // Wait for UI update await assertNoScroll(); // Get tag filter buttons (after tag type buttons) - const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all(); + const tagFilterButtons = await page.locator( + "aside > div.flex.flex-wrap.gap-2.mb-4 > button", + ).all(); // Try all single tag filter selections for (let j = 0; j < tagFilterButtons.length; j++) { await tagFilterButtons[j].click(); @@ -72,7 +76,9 @@ test.describe('My Notes Layout', () => { await page.waitForTimeout(100); await assertNoScroll(); // Get tag filter buttons for this combination - const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all(); + const tagFilterButtons = await page.locator( + "aside > div.flex.flex-wrap.gap-2.mb-4 > button", + ).all(); // Try all single tag filter selections for (let k = 0; k < tagFilterButtons.length; k++) { await tagFilterButtons[k].click(); @@ -100,4 +106,4 @@ test.describe('My Notes Layout', () => { } } }); -}); \ No newline at end of file +}); diff --git a/tests/unit/ZettelEditor.test.ts b/tests/unit/ZettelEditor.test.ts index 3490286..3bfe172 100644 --- a/tests/unit/ZettelEditor.test.ts +++ b/tests/unit/ZettelEditor.test.ts @@ -1,37 +1,45 @@ -import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { afterEach, beforeEach, describe, expect, it, vi } from "vitest"; import type { AsciiDocMetadata } from "../../src/lib/utils/asciidoc_metadata"; // Mock all Svelte components and dependencies vi.mock("flowbite-svelte", () => ({ Textarea: vi.fn().mockImplementation((props) => { return { - $$render: () => ``, - $$bind: { value: props.bind, oninput: props.oninput } + $$render: () => + ``, + $$bind: { value: props.bind, oninput: props.oninput }, }; }), Button: vi.fn().mockImplementation((props) => { return { - $$render: () => ``, - $$bind: { onclick: props.onclick } + $$render: () => + ``, + $$bind: { onclick: props.onclick }, }; - }) + }), })); vi.mock("flowbite-svelte-icons", () => ({ EyeOutline: vi.fn().mockImplementation(() => ({ - $$render: () => `` - })) + $$render: () => ``, + })), })); vi.mock("asciidoctor", () => ({ default: vi.fn(() => ({ convert: vi.fn((content, options) => { // Mock AsciiDoctor conversion - return simple HTML - return content.replace(/^==\s+(.+)$/gm, '

$1

') - .replace(/\*\*(.+?)\*\*/g, '$1') - .replace(/\*(.+?)\*/g, '$1'); - }) - })) + return content.replace(/^==\s+(.+)$/gm, "

$1

") + .replace(/\*\*(.+?)\*\*/g, "$1") + .replace(/\*(.+?)\*/g, "$1"); + }), + })), })); // Mock sessionStorage @@ -41,21 +49,21 @@ const mockSessionStorage = { removeItem: vi.fn(), clear: vi.fn(), }; -Object.defineProperty(global, 'sessionStorage', { +Object.defineProperty(global, "sessionStorage", { value: mockSessionStorage, - writable: true + writable: true, }); // Mock window object for DOM manipulation -Object.defineProperty(global, 'window', { +Object.defineProperty(global, "window", { value: { sessionStorage: mockSessionStorage, document: { querySelector: vi.fn(), createElement: vi.fn(), - } + }, }, - writable: true + writable: true, }); // Mock DOM methods @@ -64,14 +72,14 @@ const mockCreateElement = vi.fn(); const mockAddEventListener = vi.fn(); const mockRemoveEventListener = vi.fn(); -Object.defineProperty(global, 'document', { +Object.defineProperty(global, "document", { value: { querySelector: mockQuerySelector, createElement: mockCreateElement, addEventListener: mockAddEventListener, removeEventListener: mockRemoveEventListener, }, - writable: true + writable: true, }); describe("ZettelEditor Component Logic", () => { @@ -90,8 +98,9 @@ describe("ZettelEditor Component Logic", () => { describe("Publication Format Detection Logic", () => { it("should detect document header format", () => { - const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; - + const contentWithDocumentHeader = + "= Document Title\n\n== Section 1\nContent"; + // Test the regex pattern used in the component const hasDocumentHeader = contentWithDocumentHeader.match(/^=\s+/m); expect(hasDocumentHeader).toBeTruthy(); @@ -99,12 +108,12 @@ describe("ZettelEditor Component Logic", () => { it("should detect index card format", () => { const contentWithIndexCard = "index card\n\n== Section 1\nContent"; - + // Test the logic used in the component const lines = contentWithIndexCard.split(/\r?\n/); let hasIndexCard = false; for (const line of lines) { - if (line.trim().toLowerCase() === 'index card') { + if (line.trim().toLowerCase() === "index card") { hasIndexCard = true; break; } @@ -113,8 +122,9 @@ describe("ZettelEditor Component Logic", () => { }); it("should not detect publication format for normal section content", () => { - const normalContent = "== Section 1\nContent\n\n== Section 2\nMore content"; - + const normalContent = + "== Section 1\nContent\n\n== Section 2\nMore content"; + // Test the logic used in the component const lines = normalContent.split(/\r?\n/); let hasPublicationHeader = false; @@ -123,7 +133,7 @@ describe("ZettelEditor Component Logic", () => { hasPublicationHeader = true; break; } - if (line.trim().toLowerCase() === 'index card') { + if (line.trim().toLowerCase() === "index card") { hasPublicationHeader = true; break; } @@ -135,26 +145,30 @@ describe("ZettelEditor Component Logic", () => { describe("Content Parsing Logic", () => { it("should parse sections with document header", () => { const content = "== Section 1\n:author: Test Author\n\nContent 1"; - + // Test the parsing logic const hasDocumentHeader = content.match(/^=\s+/m); expect(hasDocumentHeader).toBeFalsy(); // This content doesn't have a document header - + // Test section splitting logic - const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); + const sectionStrings = content.split(/(?=^==\s+)/gm).filter(( + section: string, + ) => section.trim()); expect(sectionStrings).toHaveLength(1); expect(sectionStrings[0]).toContain("== Section 1"); }); it("should parse sections without document header", () => { const content = "== Section 1\nContent 1"; - + // Test the parsing logic const hasDocumentHeader = content.match(/^=\s+/m); expect(hasDocumentHeader).toBeFalsy(); - + // Test section splitting logic - const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); + const sectionStrings = content.split(/(?=^==\s+)/gm).filter(( + section: string, + ) => section.trim()); expect(sectionStrings).toHaveLength(1); expect(sectionStrings[0]).toContain("== Section 1"); }); @@ -168,49 +182,70 @@ describe("ZettelEditor Component Logic", () => { describe("Content Conversion Logic", () => { it("should convert document title to section title", () => { - const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; - + const contentWithDocumentHeader = + "= Document Title\n\n== Section 1\nContent"; + // Test the conversion logic - let convertedContent = contentWithDocumentHeader.replace(/^=\s+(.+)$/gm, '== $1'); - convertedContent = convertedContent.replace(/^index card$/gim, ''); - const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); - + let convertedContent = contentWithDocumentHeader.replace( + /^=\s+(.+)$/gm, + "== $1", + ); + convertedContent = convertedContent.replace(/^index card$/gim, ""); + const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n"); + expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent"); }); it("should remove index card line", () => { const contentWithIndexCard = "index card\n\n== Section 1\nContent"; - + // Test the conversion logic - let convertedContent = contentWithIndexCard.replace(/^=\s+(.+)$/gm, '== $1'); - convertedContent = convertedContent.replace(/^index card$/gim, ''); - const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); - + let convertedContent = contentWithIndexCard.replace( + /^=\s+(.+)$/gm, + "== $1", + ); + convertedContent = convertedContent.replace(/^index card$/gim, ""); + const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n"); + expect(finalContent).toBe("\n\n== Section 1\nContent"); }); it("should clean up double newlines", () => { - const contentWithExtraNewlines = "= Document Title\n\n\n== Section 1\nContent"; - + const contentWithExtraNewlines = + "= Document Title\n\n\n== Section 1\nContent"; + // Test the conversion logic - let convertedContent = contentWithExtraNewlines.replace(/^=\s+(.+)$/gm, '== $1'); - convertedContent = convertedContent.replace(/^index card$/gim, ''); - const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); - + let convertedContent = contentWithExtraNewlines.replace( + /^=\s+(.+)$/gm, + "== $1", + ); + convertedContent = convertedContent.replace(/^index card$/gim, ""); + const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n"); + expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent"); }); }); describe("SessionStorage Integration", () => { it("should store content in sessionStorage when switching to publication editor", () => { - const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; - + const contentWithDocumentHeader = + "= Document Title\n\n== Section 1\nContent"; + // Test the sessionStorage logic - mockSessionStorage.setItem('zettelEditorContent', contentWithDocumentHeader); - mockSessionStorage.setItem('zettelEditorSource', 'publication-format'); - - expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorContent', contentWithDocumentHeader); - expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorSource', 'publication-format'); + mockSessionStorage.setItem( + "zettelEditorContent", + contentWithDocumentHeader, + ); + mockSessionStorage.setItem("zettelEditorSource", "publication-format"); + + expect(mockSessionStorage.setItem).toHaveBeenCalledWith( + "zettelEditorContent", + contentWithDocumentHeader, + ); + expect(mockSessionStorage.setItem).toHaveBeenCalledWith( + "zettelEditorSource", + "publication-format", + ); }); }); @@ -219,7 +254,7 @@ describe("ZettelEditor Component Logic", () => { const sections = [{ title: "Section 1", content: "Content 1", tags: [] }]; const eventCount = sections.length; const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`; - + expect(eventCount).toBe(1); expect(eventText).toBe("1 event"); }); @@ -227,11 +262,11 @@ describe("ZettelEditor Component Logic", () => { it("should calculate correct event count for multiple sections", () => { const sections = [ { title: "Section 1", content: "Content 1", tags: [] }, - { title: "Section 2", content: "Content 2", tags: [] } + { title: "Section 2", content: "Content 2", tags: [] }, ]; const eventCount = sections.length; const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`; - + expect(eventCount).toBe(2); expect(eventText).toBe("2 events"); }); @@ -240,11 +275,17 @@ describe("ZettelEditor Component Logic", () => { describe("Tag Processing Logic", () => { it("should process tags correctly", () => { // Mock the metadataToTags function - const mockMetadataToTags = vi.fn().mockReturnValue([["author", "Test Author"]]); - - const mockMetadata = { title: "Section 1", author: "Test Author" } as AsciiDocMetadata; + const mockMetadataToTags = vi.fn().mockReturnValue([[ + "author", + "Test Author", + ]]); + + const mockMetadata = { + title: "Section 1", + author: "Test Author", + } as AsciiDocMetadata; const tags = mockMetadataToTags(mockMetadata); - + expect(tags).toEqual([["author", "Test Author"]]); expect(mockMetadataToTags).toHaveBeenCalledWith(mockMetadata); }); @@ -252,10 +293,10 @@ describe("ZettelEditor Component Logic", () => { it("should handle empty tags", () => { // Mock the metadataToTags function const mockMetadataToTags = vi.fn().mockReturnValue([]); - + const mockMetadata = { title: "Section 1" } as AsciiDocMetadata; const tags = mockMetadataToTags(mockMetadata); - + expect(tags).toEqual([]); }); }); @@ -264,11 +305,11 @@ describe("ZettelEditor Component Logic", () => { it("should process AsciiDoc content correctly", () => { // Mock the asciidoctor conversion const mockConvert = vi.fn((content, options) => { - return content.replace(/^==\s+(.+)$/gm, '

$1

') - .replace(/\*\*(.+?)\*\*/g, '$1') - .replace(/\*(.+?)\*/g, '$1'); + return content.replace(/^==\s+(.+)$/gm, "

$1

") + .replace(/\*\*(.+?)\*\*/g, "$1") + .replace(/\*(.+?)\*/g, "$1"); }); - + const content = "== Test Section\n\nThis is **bold** and *italic* text."; const processedContent = mockConvert(content, { standalone: false, @@ -278,10 +319,10 @@ describe("ZettelEditor Component Logic", () => { sectids: true, }, }); - - expect(processedContent).toContain('

Test Section

'); - expect(processedContent).toContain('bold'); - expect(processedContent).toContain('italic'); + + expect(processedContent).toContain("

Test Section

"); + expect(processedContent).toContain("bold"); + expect(processedContent).toContain("italic"); }); }); @@ -291,9 +332,9 @@ describe("ZettelEditor Component Logic", () => { const mockParseFunction = vi.fn().mockImplementation(() => { throw new Error("Parsing error"); }); - + const content = "== Section 1\nContent 1"; - + // Should not throw error when called expect(() => { try { @@ -321,12 +362,12 @@ describe("ZettelEditor Component Logic", () => { onContentChange: vi.fn(), onPreviewToggle: vi.fn(), }; - - expect(expectedProps).toHaveProperty('content'); - expect(expectedProps).toHaveProperty('placeholder'); - expect(expectedProps).toHaveProperty('showPreview'); - expect(expectedProps).toHaveProperty('onContentChange'); - expect(expectedProps).toHaveProperty('onPreviewToggle'); + + expect(expectedProps).toHaveProperty("content"); + expect(expectedProps).toHaveProperty("placeholder"); + expect(expectedProps).toHaveProperty("showPreview"); + expect(expectedProps).toHaveProperty("onContentChange"); + expect(expectedProps).toHaveProperty("onPreviewToggle"); }); }); @@ -334,12 +375,12 @@ describe("ZettelEditor Component Logic", () => { it("should integrate with ZettelParser utilities", () => { // Mock the parseAsciiDocSections function const mockParseAsciiDocSections = vi.fn().mockReturnValue([ - { title: "Section 1", content: "Content 1", tags: [] } + { title: "Section 1", content: "Content 1", tags: [] }, ]); - + const content = "== Section 1\nContent 1"; const sections = mockParseAsciiDocSections(content, 2); - + expect(sections).toHaveLength(1); expect(sections[0].title).toBe("Section 1"); }); @@ -348,21 +389,21 @@ describe("ZettelEditor Component Logic", () => { // Mock the utility functions const mockExtractDocumentMetadata = vi.fn().mockReturnValue({ metadata: { title: "Document Title" } as AsciiDocMetadata, - content: "Document content" + content: "Document content", }); - + const mockExtractSectionMetadata = vi.fn().mockReturnValue({ metadata: { title: "Section Title" } as AsciiDocMetadata, content: "Section content", - title: "Section Title" + title: "Section Title", }); - + const documentContent = "= Document Title\nDocument content"; const sectionContent = "== Section Title\nSection content"; - + const documentResult = mockExtractDocumentMetadata(documentContent); const sectionResult = mockExtractSectionMetadata(sectionContent); - + expect(documentResult.metadata.title).toBe("Document Title"); expect(sectionResult.title).toBe("Section Title"); }); @@ -370,27 +411,35 @@ describe("ZettelEditor Component Logic", () => { describe("Content Validation", () => { it("should validate content structure", () => { - const validContent = "== Section 1\nContent here\n\n== Section 2\nMore content"; + const validContent = + "== Section 1\nContent here\n\n== Section 2\nMore content"; const invalidContent = "Just some text without sections"; - + // Test section detection - const validSections = validContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); - const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); - + const validSections = validContent.split(/(?=^==\s+)/gm).filter(( + section: string, + ) => section.trim()); + const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter(( + section: string, + ) => section.trim()); + expect(validSections.length).toBeGreaterThan(0); // The invalid content will have one section (the entire content) since it doesn't start with == expect(invalidSections.length).toBe(1); }); it("should handle mixed content types", () => { - const mixedContent = "= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content"; - + const mixedContent = + "= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content"; + // Test document header detection const hasDocumentHeader = mixedContent.match(/^=\s+/m); expect(hasDocumentHeader).toBeTruthy(); - + // Test section extraction - const sections = mixedContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); + const sections = mixedContent.split(/(?=^==\s+)/gm).filter(( + section: string, + ) => section.trim()); expect(sections.length).toBeGreaterThan(0); }); }); @@ -398,13 +447,13 @@ describe("ZettelEditor Component Logic", () => { describe("String Manipulation", () => { it("should handle string replacements correctly", () => { const originalContent = "= Title\n\n== Section\nContent"; - + // Test various string manipulations const convertedContent = originalContent - .replace(/^=\s+(.+)$/gm, '== $1') - .replace(/^index card$/gim, '') - .replace(/\n\s*\n\s*\n/g, '\n\n'); - + .replace(/^=\s+(.+)$/gm, "== $1") + .replace(/^index card$/gim, "") + .replace(/\n\s*\n\s*\n/g, "\n\n"); + expect(convertedContent).toBe("== Title\n\n== Section\nContent"); }); @@ -414,16 +463,16 @@ describe("ZettelEditor Component Logic", () => { "index card\n\n== Section\nContent", // Index card "= Title\nindex card\n== Section\nContent", // Both ]; - - edgeCases.forEach(content => { + + edgeCases.forEach((content) => { const converted = content - .replace(/^=\s+(.+)$/gm, '== $1') - .replace(/^index card$/gim, '') - .replace(/\n\s*\n\s*\n/g, '\n\n'); - + .replace(/^=\s+(.+)$/gm, "== $1") + .replace(/^index card$/gim, "") + .replace(/\n\s*\n\s*\n/g, "\n\n"); + expect(converted).toBeDefined(); - expect(typeof converted).toBe('string'); + expect(typeof converted).toBe("string"); }); }); }); -}); \ No newline at end of file +}); diff --git a/tests/unit/eventInput30040.test.ts b/tests/unit/eventInput30040.test.ts index c7dadc3..b7687bd 100644 --- a/tests/unit/eventInput30040.test.ts +++ b/tests/unit/eventInput30040.test.ts @@ -1,6 +1,8 @@ -import { describe, it, expect, vi, beforeEach } from "vitest"; -import { build30040EventSet, validate30040EventSet } from "../../src/lib/utils/event_input_utils"; -import { extractDocumentMetadata, parseAsciiDocWithMetadata } from "../../src/lib/utils/asciidoc_metadata"; +import { beforeEach, describe, expect, it, vi } from "vitest"; +import { + build30040EventSet, + validate30040EventSet, +} from "../../src/lib/utils/event_input_utils"; // Mock NDK and other dependencies vi.mock("@nostr-dev-kit/ndk", () => ({ @@ -16,12 +18,12 @@ vi.mock("@nostr-dev-kit/ndk", () => ({ })), })); -vi.mock("../../src/lib/ndk", () => ({ - ndkInstance: { - subscribe: vi.fn(), - }, - getNdk: vi.fn(() => ({})), -})); +// Mock NDK context +const mockNdk = { + subscribe: vi.fn(), + fetchEvents: vi.fn(), + pool: { relays: new Map() }, +}; vi.mock("svelte/store", () => ({ get: vi.fn(() => ({})), @@ -60,16 +62,30 @@ This is the content of the second section.`; const tags: [string, string][] = [["type", "article"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); expect(indexEvent.content).toBe(""); - expect(indexEvent.tags).toContainEqual(["d", "test-document-with-preamble"]); - expect(indexEvent.tags).toContainEqual(["title", "Test Document with Preamble"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "test-document-with-preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Test Document with Preamble", + ]); expect(indexEvent.tags).toContainEqual(["author", "John Doe"]); expect(indexEvent.tags).toContainEqual(["version", "1.0"]); - expect(indexEvent.tags).toContainEqual(["summary", "This is a test document with preamble"]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is a test document with preamble", + ]); expect(indexEvent.tags).toContainEqual(["t", "test"]); expect(indexEvent.tags).toContainEqual(["t", "preamble"]); expect(indexEvent.tags).toContainEqual(["t", "asciidoc"]); @@ -80,22 +96,47 @@ This is the content of the second section.`; // First section expect(sectionEvents[0].kind).toBe(30041); - expect(sectionEvents[0].content).toBe("This is the content of the first section."); - expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-with-preamble-first-section"]); + expect(sectionEvents[0].content).toBe( + "This is the content of the first section.", + ); + expect(sectionEvents[0].tags).toContainEqual([ + "d", + "test-document-with-preamble-first-section", + ]); expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]); - expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); - expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]); + expect(sectionEvents[0].tags).toContainEqual([ + "author", + "Section Author", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "summary", + "This is the first section", + ]); // Second section expect(sectionEvents[1].kind).toBe(30041); - expect(sectionEvents[1].content).toBe("This is the content of the second section."); - expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-with-preamble-second-section"]); + expect(sectionEvents[1].content).toBe( + "This is the content of the second section.", + ); + expect(sectionEvents[1].tags).toContainEqual([ + "d", + "test-document-with-preamble-second-section", + ]); expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]); - expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]); + expect(sectionEvents[1].tags).toContainEqual([ + "summary", + "This is the second section", + ]); // Test a-tags in index event - expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-first-section"]); - expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-second-section"]); + expect(indexEvent.tags).toContainEqual([ + "a", + "30041:test-pubkey:test-document-with-preamble-first-section", + ]); + expect(indexEvent.tags).toContainEqual([ + "a", + "30041:test-pubkey:test-document-with-preamble-second-section", + ]); }); }); @@ -118,32 +159,65 @@ This is the content of the second section.`; const tags: [string, string][] = [["type", "article"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); expect(indexEvent.content).toBe(""); - expect(indexEvent.tags).toContainEqual(["d", "test-document-without-preamble"]); - expect(indexEvent.tags).toContainEqual(["title", "Test Document without Preamble"]); - expect(indexEvent.tags).toContainEqual(["summary", "This is a test document without preamble"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "test-document-without-preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Test Document without Preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is a test document without preamble", + ]); // Test section events expect(sectionEvents).toHaveLength(2); // First section expect(sectionEvents[0].kind).toBe(30041); - expect(sectionEvents[0].content).toBe("This is the content of the first section."); - expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-without-preamble-first-section"]); + expect(sectionEvents[0].content).toBe( + "This is the content of the first section.", + ); + expect(sectionEvents[0].tags).toContainEqual([ + "d", + "test-document-without-preamble-first-section", + ]); expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]); - expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); - expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]); + expect(sectionEvents[0].tags).toContainEqual([ + "author", + "Section Author", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "summary", + "This is the first section", + ]); // Second section expect(sectionEvents[1].kind).toBe(30041); - expect(sectionEvents[1].content).toBe("This is the content of the second section."); - expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-without-preamble-second-section"]); + expect(sectionEvents[1].content).toBe( + "This is the content of the second section.", + ); + expect(sectionEvents[1].tags).toContainEqual([ + "d", + "test-document-without-preamble-second-section", + ]); expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]); - expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]); + expect(sectionEvents[1].tags).toContainEqual([ + "summary", + "This is the second section", + ]); }); }); @@ -163,24 +237,44 @@ This is the preamble content. const tags: [string, string][] = [["type", "skeleton"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); expect(indexEvent.content).toBe(""); - expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-with-preamble"]); - expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document with Preamble"]); - expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document with preamble"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "skeleton-document-with-preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Skeleton Document with Preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is a skeleton document with preamble", + ]); // Test section events expect(sectionEvents).toHaveLength(3); // All sections should have empty content - sectionEvents.forEach((section, index) => { + sectionEvents.forEach((section: any, index: number) => { expect(section.kind).toBe(30041); expect(section.content).toBe(""); - expect(section.tags).toContainEqual(["d", `skeleton-document-with-preamble-empty-section-${index + 1}`]); - expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]); + expect(section.tags).toContainEqual([ + "d", + `skeleton-document-with-preamble-empty-section-${index + 1}`, + ]); + expect(section.tags).toContainEqual([ + "title", + `Empty Section ${index + 1}`, + ]); }); }); }); @@ -199,24 +293,44 @@ This is the preamble content. const tags: [string, string][] = [["type", "skeleton"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); expect(indexEvent.content).toBe(""); - expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-without-preamble"]); - expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document without Preamble"]); - expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document without preamble"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "skeleton-document-without-preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Skeleton Document without Preamble", + ]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is a skeleton document without preamble", + ]); // Test section events expect(sectionEvents).toHaveLength(3); // All sections should have empty content - sectionEvents.forEach((section, index) => { + sectionEvents.forEach((section: any, index: number) => { expect(section.kind).toBe(30041); expect(section.content).toBe(""); - expect(section.tags).toContainEqual(["d", `skeleton-document-without-preamble-empty-section-${index + 1}`]); - expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]); + expect(section.tags).toContainEqual([ + "d", + `skeleton-document-without-preamble-empty-section-${index + 1}`, + ]); + expect(section.tags).toContainEqual([ + "title", + `Empty Section ${index + 1}`, + ]); }); }); }); @@ -228,7 +342,12 @@ index card`; const tags: [string, string][] = [["type", "index-card"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); @@ -249,14 +368,28 @@ index card`; const tags: [string, string][] = [["type", "index-card"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event expect(indexEvent.kind).toBe(30040); expect(indexEvent.content).toBe(""); - expect(indexEvent.tags).toContainEqual(["d", "test-index-card-with-metadata"]); - expect(indexEvent.tags).toContainEqual(["title", "Test Index Card with Metadata"]); - expect(indexEvent.tags).toContainEqual(["summary", "This is an index card with metadata"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "test-index-card-with-metadata", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Test Index Card with Metadata", + ]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is an index card with metadata", + ]); expect(indexEvent.tags).toContainEqual(["t", "index"]); expect(indexEvent.tags).toContainEqual(["t", "card"]); expect(indexEvent.tags).toContainEqual(["t", "metadata"]); @@ -303,23 +436,46 @@ This is the section content.`; const tags: [string, string][] = [["type", "complex"]]; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); // Test index event metadata expect(indexEvent.kind).toBe(30040); - expect(indexEvent.tags).toContainEqual(["d", "complex-metadata-document"]); - expect(indexEvent.tags).toContainEqual(["title", "Complex Metadata Document"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "complex-metadata-document", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Complex Metadata Document", + ]); expect(indexEvent.tags).toContainEqual(["author", "Jane Smith"]); // Should use header line author expect(indexEvent.tags).toContainEqual(["author", "Override Author"]); // Additional author from attribute expect(indexEvent.tags).toContainEqual(["author", "Third Author"]); // Additional author from attribute expect(indexEvent.tags).toContainEqual(["version", "2.0"]); // Should use revision line version - expect(indexEvent.tags).toContainEqual(["summary", "This is a complex document with all metadata types Alternative description field"]); + expect(indexEvent.tags).toContainEqual([ + "summary", + "This is a complex document with all metadata types Alternative description field", + ]); expect(indexEvent.tags).toContainEqual(["published_on", "2024-03-01"]); - expect(indexEvent.tags).toContainEqual(["published_by", "Alexandria Complex"]); + expect(indexEvent.tags).toContainEqual([ + "published_by", + "Alexandria Complex", + ]); expect(indexEvent.tags).toContainEqual(["type", "book"]); - expect(indexEvent.tags).toContainEqual(["image", "https://example.com/cover.jpg"]); + expect(indexEvent.tags).toContainEqual([ + "image", + "https://example.com/cover.jpg", + ]); expect(indexEvent.tags).toContainEqual(["i", "978-0-123456-78-9"]); - expect(indexEvent.tags).toContainEqual(["source", "https://github.com/alexandria/complex"]); + expect(indexEvent.tags).toContainEqual([ + "source", + "https://github.com/alexandria/complex", + ]); expect(indexEvent.tags).toContainEqual(["auto-update", "yes"]); expect(indexEvent.tags).toContainEqual(["t", "complex"]); expect(indexEvent.tags).toContainEqual(["t", "metadata"]); @@ -332,13 +488,31 @@ This is the section content.`; expect(sectionEvents).toHaveLength(1); expect(sectionEvents[0].kind).toBe(30041); expect(sectionEvents[0].content).toBe("This is the section content."); - expect(sectionEvents[0].tags).toContainEqual(["d", "complex-metadata-document-section-with-complex-metadata"]); - expect(sectionEvents[0].tags).toContainEqual(["title", "Section with Complex Metadata"]); - expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); - expect(sectionEvents[0].tags).toContainEqual(["author", "Section Co-Author"]); - expect(sectionEvents[0].tags).toContainEqual(["summary", "This section has complex metadata Alternative description for section"]); + expect(sectionEvents[0].tags).toContainEqual([ + "d", + "complex-metadata-document-section-with-complex-metadata", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "title", + "Section with Complex Metadata", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "author", + "Section Author", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "author", + "Section Co-Author", + ]); + expect(sectionEvents[0].tags).toContainEqual([ + "summary", + "This section has complex metadata Alternative description for section", + ]); expect(sectionEvents[0].tags).toContainEqual(["type", "chapter"]); - expect(sectionEvents[0].tags).toContainEqual(["image", "https://example.com/section-image.jpg"]); + expect(sectionEvents[0].tags).toContainEqual([ + "image", + "https://example.com/section-image.jpg", + ]); expect(sectionEvents[0].tags).toContainEqual(["t", "section"]); expect(sectionEvents[0].tags).toContainEqual(["t", "complex"]); expect(sectionEvents[0].tags).toContainEqual(["t", "metadata"]); @@ -387,7 +561,9 @@ index card`; const validation = validate30040EventSet(content); expect(validation.valid).toBe(false); - expect(validation.reason).toContain("30040 events must have a document title"); + expect(validation.reason).toContain( + "30040 events must have a document title", + ); }); }); @@ -400,11 +576,22 @@ This is just preamble content.`; const tags: [string, string][] = []; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); expect(indexEvent.kind).toBe(30040); - expect(indexEvent.tags).toContainEqual(["d", "document-with-no-sections"]); - expect(indexEvent.tags).toContainEqual(["title", "Document with No Sections"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "document-with-no-sections", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Document with No Sections", + ]); expect(sectionEvents).toHaveLength(0); }); @@ -418,16 +605,28 @@ Content here.`; const tags: [string, string][] = []; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); expect(indexEvent.kind).toBe(30040); - expect(indexEvent.tags).toContainEqual(["d", "document-with-special-characters-test-more"]); - expect(indexEvent.tags).toContainEqual(["title", "Document with Special Characters: Test & More!"]); + expect(indexEvent.tags).toContainEqual([ + "d", + "document-with-special-characters-test-more", + ]); + expect(indexEvent.tags).toContainEqual([ + "title", + "Document with Special Characters: Test & More!", + ]); expect(sectionEvents).toHaveLength(1); }); it("should handle document with very long title", () => { - const content = `= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality + const content = + `= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality :summary: This document has a very long title == Section 1 @@ -436,11 +635,19 @@ Content here.`; const tags: [string, string][] = []; - const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); + const { indexEvent, sectionEvents } = build30040EventSet( + content, + tags, + baseEvent, + mockNdk as any, + ); expect(indexEvent.kind).toBe(30040); - expect(indexEvent.tags).toContainEqual(["title", "This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality"]); + expect(indexEvent.tags).toContainEqual([ + "title", + "This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality", + ]); expect(sectionEvents).toHaveLength(1); }); }); -}); \ No newline at end of file +}); diff --git a/tests/unit/latexRendering.test.ts b/tests/unit/latexRendering.test.ts deleted file mode 100644 index ed38f4d..0000000 --- a/tests/unit/latexRendering.test.ts +++ /dev/null @@ -1,83 +0,0 @@ -import { describe, it, expect } from "vitest"; -import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupParser"; -import { readFileSync } from "fs"; -import { join } from "path"; - -describe("LaTeX and AsciiMath Rendering in Inline Code Blocks", () => { - const jsonPath = join(__dirname, "../../test_data/LaTeXtestfile.json"); - const raw = readFileSync(jsonPath, "utf-8"); - // Extract the markdown content field from the JSON event - const content = JSON.parse(raw).content; - - it("renders LaTeX inline and display math correctly", async () => { - const html = await parseAdvancedmarkup(content); - // Test basic LaTeX examples from the test document - expect(html).toMatch(/\$\\sqrt\{x\}\$<\/span>/); - expect(html).toMatch(/
\$\$\\sqrt\{x\}\$\$<\/div>/); - expect(html).toMatch( - /\$\\mathbb\{N\} = \\{ a \\in \\mathbb\{Z\} : a > 0 \\}\$<\/span>/, - ); - expect(html).toMatch( - /
\$\$P \\left\( A=2 \\, \\middle\| \\, \\dfrac\{A\^2\}\{B\}>4 \\right\)\$\$<\/div>/, - ); - }); - - it("renders AsciiMath inline and display math correctly", async () => { - const html = await parseAdvancedmarkup(content); - // Test AsciiMath examples - expect(html).toMatch(/\$E=mc\^2\$<\/span>/); - expect(html).toMatch( - /
\$\$sum_\(k=1\)\^n k = 1\+2\+ cdots \+n=\(n\(n\+1\)\)\/2\$\$<\/div>/, - ); - expect(html).toMatch( - /
\$\$int_0\^1 x\^2 dx\$\$<\/div>/, - ); - }); - - it("renders LaTeX array and matrix environments as math", async () => { - const html = await parseAdvancedmarkup(content); - // Test array and matrix environments - expect(html).toMatch( - /
\$\$[\s\S]*\\begin\{array\}\{ccccc\}[\s\S]*\\end\{array\}[\s\S]*\$\$<\/div>/, - ); - expect(html).toMatch( - /
\$\$[\s\S]*\\begin\{bmatrix\}[\s\S]*\\end\{bmatrix\}[\s\S]*\$\$<\/div>/, - ); - }); - - it("handles unsupported LaTeX environments gracefully", async () => { - const html = await parseAdvancedmarkup(content); - // Should show a message and plaintext for tabular - expect(html).toMatch(/
/); - expect(html).toMatch( - /Unrendered, as it is LaTeX typesetting, not a formula:/, - ); - expect(html).toMatch(/\\\\begin\{tabular\}/); - }); - - it("renders mixed LaTeX and AsciiMath correctly", async () => { - const html = await parseAdvancedmarkup(content); - // Test mixed content - expect(html).toMatch( - /\$\\frac\{1\}\{2\}\$<\/span>/, - ); - expect(html).toMatch(/\$1\/2\$<\/span>/); - expect(html).toMatch( - /
\$\$\\sum_\{i=1\}\^n x_i\$\$<\/div>/, - ); - expect(html).toMatch( - /
\$\$sum_\(i=1\)\^n x_i\$\$<\/div>/, - ); - }); - - it("handles edge cases and regular code blocks", async () => { - const html = await parseAdvancedmarkup(content); - // Test regular code blocks (should remain as code, not math) - expect(html).toMatch(/]*>\$19\.99<\/code>/); - expect(html).toMatch(/]*>echo "Price: \$100"<\/code>/); - expect(html).toMatch( - /]*>const price = \\`\$\$\{amount\}\\`<\/code>/, - ); - expect(html).toMatch(/]*>color: \$primary-color<\/code>/); - }); -}); diff --git a/tests/unit/mathProcessing.test.ts b/tests/unit/mathProcessing.test.ts new file mode 100644 index 0000000..acf7378 --- /dev/null +++ b/tests/unit/mathProcessing.test.ts @@ -0,0 +1,186 @@ +import { describe, expect, it } from "vitest"; +import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupParser.ts"; + +describe("Math Processing in Advanced Markup Parser", () => { + it("should process inline math inside code blocks", async () => { + const input = "Here is some inline math: `$x^2 + y^2 = z^2$` in a sentence."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain('\\(x^2 + y^2 = z^2\\)'); + expect(result).toContain("Here is some inline math:"); + expect(result).toContain("in a sentence."); + }); + + it("should process display math inside code blocks", async () => { + const input = "Here is a display equation:\n\n`$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$`\n\nThis is after the equation."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain('\\[\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n\\]'); + expect(result).toContain('

Here is a display equation:

'); + expect(result).toContain('

This is after the equation.

'); + }); + + it("should process both inline and display math in the same code block", async () => { + const input = "Mixed math: `$\\alpha$ and $$\\beta = \\frac{1}{2}$$` in one block."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain('\\(\\alpha\\)'); + expect(result).toContain('\\[\\beta = \\frac{1}{2}\\]'); + expect(result).toContain("Mixed math:"); + expect(result).toContain("in one block."); + }); + + it("should NOT process math outside of code blocks", async () => { + const input = "This math $x^2 + y^2 = z^2$ should not be processed."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain("$x^2 + y^2 = z^2$"); + expect(result).not.toContain(''); + expect(result).not.toContain(''); + }); + + it("should NOT process display math outside of code blocks", async () => { + const input = "This display math $$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$ should not be processed."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain("$$\n\\int_{-\\infty}^{\\infty} e^{-x^2} dx = \\sqrt{\\pi}\n$$"); + expect(result).not.toContain(''); + expect(result).not.toContain(''); + }); + + it("should handle code blocks without math normally", async () => { + const input = "Here is some code: `console.log('hello world')` that should not be processed."; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain("`console.log('hello world')`"); + expect(result).not.toContain(''); + expect(result).not.toContain(''); + }); + + it("should handle complex math expressions with nested structures", async () => { + const input = "Complex math: `$$\\begin{pmatrix} a & b \\\\ c & d \\end{pmatrix} \\cdot \\begin{pmatrix} x \\\\ y \\end{pmatrix} = \\begin{pmatrix} ax + by \\\\ cx + dy \\end{pmatrix}$$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain(''); + expect(result).toContain("\\begin{pmatrix}"); + expect(result).toContain("\\end{pmatrix}"); + expect(result).toContain("\\cdot"); + }); + + it("should handle inline math with special characters", async () => { + const input = "Special chars: `$\\alpha, \\beta, \\gamma, \\delta$` and `$\\sum_{i=1}^{n} x_i$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain('\\(\\alpha, \\beta, \\gamma, \\delta\\)'); + expect(result).toContain('\\(\\sum_{i=1}^{n} x_i\\)'); + }); + + it("should handle multiple math expressions in separate code blocks", async () => { + const input = "First: `$E = mc^2$` and second: `$$F = G\\frac{m_1 m_2}{r^2}$$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain('\\(E = mc^2\\)'); + expect(result).toContain('\\[F = G\\frac{m_1 m_2}{r^2}\\]'); + }); + + it("should handle math expressions with line breaks in display mode", async () => { + const input = "Multi-line: `$$\n\\begin{align}\nx &= a + b \\\\\ny &= c + d\n\\end{align}\n$$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain(''); + expect(result).toContain("\\begin{align}"); + expect(result).toContain("\\end{align}"); + expect(result).toContain("x &= a + b"); + expect(result).toContain("y &= c + d"); + }); + + it("should handle edge case with empty math expressions", async () => { + const input = "Empty math: `$$` and `$`"; + const result = await parseAdvancedmarkup(input); + + // Should not crash and should preserve the original content + expect(result).toContain("`$$`"); + expect(result).toContain("`$`"); + }); + + it("should handle mixed content with regular text, code, and math", async () => { + const input = `This is a paragraph with regular text. + +Here is some code: \`console.log('hello')\` + +And here is math: \`$\\pi \\approx 3.14159$\` + +And display math: \`$$\n\\int_0^1 x^2 dx = \\frac{1}{3}\n$$\` + +And more regular text.`; + + const result = await parseAdvancedmarkup(input); + + // Should preserve regular text + expect(result).toContain("This is a paragraph with regular text."); + expect(result).toContain("And more regular text."); + + // Should preserve regular code blocks + expect(result).toContain("`console.log('hello')`"); + + // Should process math + expect(result).toContain('\\(\\pi \\approx 3.14159\\)'); + expect(result).toContain(''); + expect(result).toContain("\\int_0^1 x^2 dx = \\frac{1}{3}"); + }); + + it("should handle math expressions with dollar signs in the content", async () => { + const input = "Price math: `$\\text{Price} = \\$19.99$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain(''); + expect(result).toContain("\\text{Price} = \\$19.99"); + }); + + it("should handle display math with dollar signs in the content", async () => { + const input = "Price display: `$$\n\\text{Total} = \\$19.99 + \\$5.99 = \\$25.98\n$$`"; + const result = await parseAdvancedmarkup(input); + + expect(result).toContain(''); + expect(result).toContain("\\text{Total} = \\$19.99 + \\$5.99 = \\$25.98"); + }); + + it("should handle JSON content with escaped backslashes", async () => { + // Simulate content from JSON where backslashes are escaped + const jsonContent = "Math from JSON: `$\\\\alpha + \\\\beta = \\\\gamma$`"; + const result = await parseAdvancedmarkup(jsonContent); + + expect(result).toContain(''); + expect(result).toContain("\\\\alpha + \\\\beta = \\\\gamma"); + }); + + it("should handle JSON content with escaped display math", async () => { + // Simulate content from JSON where backslashes are escaped + const jsonContent = "Display math from JSON: `$$\\\\int_0^1 x^2 dx = \\\\frac{1}{3}$$`"; + const result = await parseAdvancedmarkup(jsonContent); + + expect(result).toContain(''); + expect(result).toContain("\\\\int_0^1 x^2 dx = \\\\frac{1}{3}"); + }); + + it("should handle JSON content with escaped dollar signs", async () => { + // Simulate content from JSON where dollar signs are escaped + const jsonContent = "Price math from JSON: `$\\\\text{Price} = \\\\\\$19.99$`"; + const result = await parseAdvancedmarkup(jsonContent); + + expect(result).toContain(''); + expect(result).toContain("\\\\text{Price} = \\\\\\$19.99"); + }); + + it("should handle complex JSON content with multiple escaped characters", async () => { + // Simulate complex content from JSON + const jsonContent = "Complex JSON math: `$$\\\\begin{pmatrix} a & b \\\\\\\\ c & d \\\\end{pmatrix} \\\\cdot \\\\begin{pmatrix} x \\\\\\\\ y \\\\end{pmatrix}$$`"; + const result = await parseAdvancedmarkup(jsonContent); + + expect(result).toContain(''); + expect(result).toContain("\\\\begin{pmatrix}"); + expect(result).toContain("\\\\end{pmatrix}"); + expect(result).toContain("\\\\cdot"); + expect(result).toContain("\\\\\\\\"); + }); +}); diff --git a/tests/unit/metadataExtraction.test.ts b/tests/unit/metadataExtraction.test.ts index 65a50b8..01c7e6e 100644 --- a/tests/unit/metadataExtraction.test.ts +++ b/tests/unit/metadataExtraction.test.ts @@ -1,10 +1,10 @@ -import { describe, it, expect } from "vitest"; -import { - extractDocumentMetadata, - extractSectionMetadata, - parseAsciiDocWithMetadata, +import { describe, expect, it } from "vitest"; +import { + extractDocumentMetadata, + extractSectionMetadata, + extractSmartMetadata, metadataToTags, - extractSmartMetadata + parseAsciiDocWithMetadata, } from "../../src/lib/utils/asciidoc_metadata.ts"; describe("AsciiDoc Metadata Extraction", () => { @@ -39,13 +39,15 @@ This is the content of the second section.`; it("extractDocumentMetadata should extract document metadata correctly", () => { const { metadata, content } = extractDocumentMetadata(testContent); - + expect(metadata.title).toBe("Test Document with Metadata"); expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); expect(metadata.version).toBe("1.0"); expect(metadata.publicationDate).toBe("2024-01-15"); expect(metadata.publishedBy).toBe("Alexandria Test"); - expect(metadata.summary).toBe("This is a test document for metadata extraction"); + expect(metadata.summary).toBe( + "This is a test document for metadata extraction", + ); expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); expect(metadata.type).toBe("article"); expect(metadata.tags).toEqual(["test", "metadata", "asciidoc"]); @@ -53,7 +55,7 @@ This is the content of the second section.`; expect(metadata.isbn).toBe("978-0-123456-78-9"); expect(metadata.source).toBe("https://github.com/alexandria/test"); expect(metadata.autoUpdate).toBe("yes"); - + // Content should not include the header metadata expect(content).toContain("This is the preamble content"); expect(content).toContain("== First Section"); @@ -70,7 +72,7 @@ This is the content of the second section.`; This is the content of the first section.`; const { metadata, content, title } = extractSectionMetadata(sectionContent); - + expect(title).toBe("First Section"); expect(metadata.authors).toEqual(["Section Author"]); expect(metadata.summary).toBe("This is the first section"); @@ -86,7 +88,7 @@ Stella Some context text`; const { metadata, content, title } = extractSectionMetadata(sectionContent); - + expect(title).toBe("Section Header1"); expect(metadata.authors).toEqual(["Stella"]); expect(metadata.summary).toBe("Some summary"); @@ -102,7 +104,7 @@ Stella Some context text`; const { metadata, content, title } = extractSectionMetadata(sectionContent); - + expect(title).toBe("Section Header1"); expect(metadata.authors).toEqual(["Stella", "John Doe"]); expect(metadata.summary).toBe("Some summary"); @@ -118,22 +120,26 @@ This is not an author line Some context text`; const { metadata, content, title } = extractSectionMetadata(sectionContent); - + expect(title).toBe("Section Header1"); expect(metadata.authors).toEqual(["Stella"]); expect(metadata.summary).toBe("Some summary"); - expect(content.trim()).toBe("This is not an author line\nSome context text"); + expect(content.trim()).toBe( + "This is not an author line\nSome context text", + ); }); it("parseAsciiDocWithMetadata should parse complete document", () => { const parsed = parseAsciiDocWithMetadata(testContent); - + expect(parsed.metadata.title).toBe("Test Document with Metadata"); expect(parsed.sections).toHaveLength(2); expect(parsed.sections[0].title).toBe("First Section"); expect(parsed.sections[1].title).toBe("Second Section"); expect(parsed.sections[0].metadata.authors).toEqual(["Section Author"]); - expect(parsed.sections[1].metadata.summary).toBe("This is the second section"); + expect(parsed.sections[1].metadata.summary).toBe( + "This is the second section", + ); }); it("metadataToTags should convert metadata to Nostr tags", () => { @@ -142,11 +148,11 @@ Some context text`; authors: ["Author 1", "Author 2"], version: "1.0", summary: "Test summary", - tags: ["tag1", "tag2"] + tags: ["tag1", "tag2"], }; - + const tags = metadataToTags(metadata); - + expect(tags).toContainEqual(["title", "Test Title"]); expect(tags).toContainEqual(["author", "Author 1"]); expect(tags).toContainEqual(["author", "Author 2"]); @@ -161,16 +167,16 @@ Some context text`; index card`; const { metadata, content } = extractDocumentMetadata(indexCardContent); - + expect(metadata.title).toBe("Test Index Card"); expect(content.trim()).toBe("index card"); }); it("should handle empty content gracefully", () => { const emptyContent = ""; - + const { metadata, content } = extractDocumentMetadata(emptyContent); - + expect(metadata.title).toBeUndefined(); expect(content).toBe(""); }); @@ -182,7 +188,7 @@ index card`; Some content here.`; const { metadata } = extractDocumentMetadata(contentWithKeywords); - + expect(metadata.tags).toEqual(["keyword1", "keyword2", "keyword3"]); }); @@ -194,7 +200,7 @@ Some content here.`; Some content here.`; const { metadata } = extractDocumentMetadata(contentWithBoth); - + // Both tags and keywords are valid, both should be accumulated expect(metadata.tags).toEqual(["tag1", "tag2", "keyword1", "keyword2"]); }); @@ -206,7 +212,7 @@ Some content here.`; Content here.`; const { metadata } = extractDocumentMetadata(contentWithTags); - + expect(metadata.tags).toEqual(["tag1", "tag2", "tag3"]); }); @@ -221,15 +227,19 @@ Content here.`; Content here.`; - const { metadata: summaryMetadata } = extractDocumentMetadata(contentWithSummary); - const { metadata: descriptionMetadata } = extractDocumentMetadata(contentWithDescription); - + const { metadata: summaryMetadata } = extractDocumentMetadata( + contentWithSummary, + ); + const { metadata: descriptionMetadata } = extractDocumentMetadata( + contentWithDescription, + ); + expect(summaryMetadata.summary).toBe("This is a summary"); expect(descriptionMetadata.summary).toBe("This is a description"); }); - describe('Smart metadata extraction', () => { - it('should handle section-only content correctly', () => { + describe("Smart metadata extraction", () => { + it("should handle section-only content correctly", () => { const sectionOnlyContent = `== First Section :author: Section Author :description: This is the first section @@ -244,20 +254,20 @@ This is the content of the first section. This is the content of the second section.`; const { metadata, content } = extractSmartMetadata(sectionOnlyContent); - + // Should extract title from first section - expect(metadata.title).toBe('First Section'); - + expect(metadata.title).toBe("First Section"); + // Should not have document-level metadata since there's no document header expect(metadata.authors).toBeUndefined(); expect(metadata.version).toBeUndefined(); expect(metadata.publicationDate).toBeUndefined(); - + // Content should be preserved expect(content).toBe(sectionOnlyContent); }); - it('should handle minimal document header (just title) correctly', () => { + it("should handle minimal document header (just title) correctly", () => { const minimalDocumentHeader = `= Test Document == First Section @@ -273,22 +283,22 @@ This is the content of the first section. This is the content of the second section.`; const { metadata, content } = extractSmartMetadata(minimalDocumentHeader); - + // Should extract title from document header - expect(metadata.title).toBe('Test Document'); - + expect(metadata.title).toBe("Test Document"); + // Should not have document-level metadata since there's no other metadata expect(metadata.authors).toBeUndefined(); // Note: version might be set from section attributes like :type: chapter expect(metadata.publicationDate).toBeUndefined(); - + // Content should preserve the title line for 30040 events - expect(content).toContain('= Test Document'); - expect(content).toContain('== First Section'); - expect(content).toContain('== Second Section'); + expect(content).toContain("= Test Document"); + expect(content).toContain("== First Section"); + expect(content).toContain("== Second Section"); }); - it('should handle document with full header correctly', () => { + it("should handle document with full header correctly", () => { const documentWithHeader = `= Test Document John Doe 1.0, 2024-01-15: Alexandria Test @@ -302,21 +312,21 @@ John Doe This is the content.`; const { metadata, content } = extractSmartMetadata(documentWithHeader); - + // Should extract document-level metadata - expect(metadata.title).toBe('Test Document'); - expect(metadata.authors).toEqual(['John Doe', 'Jane Smith']); - expect(metadata.version).toBe('1.0'); - expect(metadata.publishedBy).toBe('Alexandria Test'); - expect(metadata.publicationDate).toBe('2024-01-15'); - expect(metadata.summary).toBe('This is a test document'); - + expect(metadata.title).toBe("Test Document"); + expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); + expect(metadata.version).toBe("1.0"); + expect(metadata.publishedBy).toBe("Alexandria Test"); + expect(metadata.publicationDate).toBe("2024-01-15"); + expect(metadata.summary).toBe("This is a test document"); + // Content should be cleaned - expect(content).not.toContain('= Test Document'); - expect(content).not.toContain('John Doe '); - expect(content).not.toContain('1.0, 2024-01-15: Alexandria Test'); - expect(content).not.toContain(':summary: This is a test document'); - expect(content).not.toContain(':author: Jane Smith'); + expect(content).not.toContain("= Test Document"); + expect(content).not.toContain("John Doe "); + expect(content).not.toContain("1.0, 2024-01-15: Alexandria Test"); + expect(content).not.toContain(":summary: This is a test document"); + expect(content).not.toContain(":author: Jane Smith"); }); }); -}); \ No newline at end of file +}); diff --git a/tests/unit/nostr_identifiers.test.ts b/tests/unit/nostr_identifiers.test.ts index d4c2d1f..a70c7bf 100644 --- a/tests/unit/nostr_identifiers.test.ts +++ b/tests/unit/nostr_identifiers.test.ts @@ -1,106 +1,112 @@ -import { describe, it, expect } from 'vitest'; -import { - isEventId, - isCoordinate, - parseCoordinate, +import { describe, expect, it } from "vitest"; +import { createCoordinate, - isNostrIdentifier -} from '../../src/lib/utils/nostr_identifiers'; + isCoordinate, + isEventId, + isNostrIdentifier, + parseCoordinate, +} from "../../src/lib/utils/nostr_identifiers"; -describe('Nostr Identifier Validation', () => { - describe('isEventId', () => { - it('should validate correct hex event IDs', () => { - const validId = 'a'.repeat(64); +describe("Nostr Identifier Validation", () => { + describe("isEventId", () => { + it("should validate correct hex event IDs", () => { + const validId = "a".repeat(64); expect(isEventId(validId)).toBe(true); - - const validIdWithMixedCase = 'A'.repeat(32) + 'f'.repeat(32); + + const validIdWithMixedCase = "A".repeat(32) + "f".repeat(32); expect(isEventId(validIdWithMixedCase)).toBe(true); }); - it('should reject invalid event IDs', () => { - expect(isEventId('')).toBe(false); - expect(isEventId('abc')).toBe(false); - expect(isEventId('a'.repeat(63))).toBe(false); // too short - expect(isEventId('a'.repeat(65))).toBe(false); // too long - expect(isEventId('g'.repeat(64))).toBe(false); // invalid hex char + it("should reject invalid event IDs", () => { + expect(isEventId("")).toBe(false); + expect(isEventId("abc")).toBe(false); + expect(isEventId("a".repeat(63))).toBe(false); // too short + expect(isEventId("a".repeat(65))).toBe(false); // too long + expect(isEventId("g".repeat(64))).toBe(false); // invalid hex char }); }); - describe('isCoordinate', () => { - it('should validate correct coordinates', () => { - const validCoordinate = `30040:${'a'.repeat(64)}:chapter-1`; + describe("isCoordinate", () => { + it("should validate correct coordinates", () => { + const validCoordinate = `30040:${"a".repeat(64)}:chapter-1`; expect(isCoordinate(validCoordinate)).toBe(true); - - const coordinateWithColonsInDTag = `30041:${'b'.repeat(64)}:chapter:with:colons`; + + const coordinateWithColonsInDTag = `30041:${ + "b".repeat(64) + }:chapter:with:colons`; expect(isCoordinate(coordinateWithColonsInDTag)).toBe(true); }); - it('should reject invalid coordinates', () => { - expect(isCoordinate('')).toBe(false); - expect(isCoordinate('abc')).toBe(false); - expect(isCoordinate('30040:abc:chapter-1')).toBe(false); // invalid pubkey - expect(isCoordinate('30040:abc')).toBe(false); // missing d-tag - expect(isCoordinate('abc:def:ghi')).toBe(false); // invalid kind - expect(isCoordinate('-1:abc:def')).toBe(false); // negative kind + it("should reject invalid coordinates", () => { + expect(isCoordinate("")).toBe(false); + expect(isCoordinate("abc")).toBe(false); + expect(isCoordinate("30040:abc:chapter-1")).toBe(false); // invalid pubkey + expect(isCoordinate("30040:abc")).toBe(false); // missing d-tag + expect(isCoordinate("abc:def:ghi")).toBe(false); // invalid kind + expect(isCoordinate("-1:abc:def")).toBe(false); // negative kind }); }); - describe('parseCoordinate', () => { - it('should parse valid coordinates correctly', () => { - const coordinate = `30040:${'a'.repeat(64)}:chapter-1`; + describe("parseCoordinate", () => { + it("should parse valid coordinates correctly", () => { + const coordinate = `30040:${"a".repeat(64)}:chapter-1`; const parsed = parseCoordinate(coordinate); - + expect(parsed).toEqual({ kind: 30040, - pubkey: 'a'.repeat(64), - dTag: 'chapter-1' + pubkey: "a".repeat(64), + dTag: "chapter-1", }); }); - it('should handle d-tags with colons', () => { - const coordinate = `30041:${'b'.repeat(64)}:chapter:with:colons`; + it("should handle d-tags with colons", () => { + const coordinate = `30041:${"b".repeat(64)}:chapter:with:colons`; const parsed = parseCoordinate(coordinate); - + expect(parsed).toEqual({ kind: 30041, - pubkey: 'b'.repeat(64), - dTag: 'chapter:with:colons' + pubkey: "b".repeat(64), + dTag: "chapter:with:colons", }); }); - it('should return null for invalid coordinates', () => { - expect(parseCoordinate('')).toBeNull(); - expect(parseCoordinate('abc')).toBeNull(); - expect(parseCoordinate('30040:abc:chapter-1')).toBeNull(); + it("should return null for invalid coordinates", () => { + expect(parseCoordinate("")).toBeNull(); + expect(parseCoordinate("abc")).toBeNull(); + expect(parseCoordinate("30040:abc:chapter-1")).toBeNull(); }); }); - describe('createCoordinate', () => { - it('should create valid coordinates', () => { - const coordinate = createCoordinate(30040, 'a'.repeat(64), 'chapter-1'); - expect(coordinate).toBe(`30040:${'a'.repeat(64)}:chapter-1`); + describe("createCoordinate", () => { + it("should create valid coordinates", () => { + const coordinate = createCoordinate(30040, "a".repeat(64), "chapter-1"); + expect(coordinate).toBe(`30040:${"a".repeat(64)}:chapter-1`); }); - it('should handle d-tags with colons', () => { - const coordinate = createCoordinate(30041, 'b'.repeat(64), 'chapter:with:colons'); - expect(coordinate).toBe(`30041:${'b'.repeat(64)}:chapter:with:colons`); + it("should handle d-tags with colons", () => { + const coordinate = createCoordinate( + 30041, + "b".repeat(64), + "chapter:with:colons", + ); + expect(coordinate).toBe(`30041:${"b".repeat(64)}:chapter:with:colons`); }); }); - describe('isNostrIdentifier', () => { - it('should accept valid event IDs', () => { - expect(isNostrIdentifier('a'.repeat(64))).toBe(true); + describe("isNostrIdentifier", () => { + it("should accept valid event IDs", () => { + expect(isNostrIdentifier("a".repeat(64))).toBe(true); }); - it('should accept valid coordinates', () => { - const coordinate = `30040:${'a'.repeat(64)}:chapter-1`; + it("should accept valid coordinates", () => { + const coordinate = `30040:${"a".repeat(64)}:chapter-1`; expect(isNostrIdentifier(coordinate)).toBe(true); }); - it('should reject invalid identifiers', () => { - expect(isNostrIdentifier('')).toBe(false); - expect(isNostrIdentifier('abc')).toBe(false); - expect(isNostrIdentifier('30040:abc:chapter-1')).toBe(false); + it("should reject invalid identifiers", () => { + expect(isNostrIdentifier("")).toBe(false); + expect(isNostrIdentifier("abc")).toBe(false); + expect(isNostrIdentifier("30040:abc:chapter-1")).toBe(false); }); }); -}); \ No newline at end of file +}); diff --git a/tests/unit/relayDeduplication.test.ts b/tests/unit/relayDeduplication.test.ts index 9344cc2..4ea6b91 100644 --- a/tests/unit/relayDeduplication.test.ts +++ b/tests/unit/relayDeduplication.test.ts @@ -1,11 +1,11 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import type { NDKEvent } from '@nostr-dev-kit/ndk'; -import { - deduplicateContentEvents, +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { NDKEvent } from "@nostr-dev-kit/ndk"; +import { deduplicateAndCombineEvents, + deduplicateContentEvents, + getEventCoordinate, isReplaceableEvent, - getEventCoordinate -} from '../../src/lib/utils/eventDeduplication'; +} from "../../src/lib/utils/eventDeduplication"; // Mock NDKEvent for testing class MockNDKEvent { @@ -16,162 +16,264 @@ class MockNDKEvent { content: string; tags: string[][]; - constructor(id: string, kind: number, pubkey: string, created_at: number, dTag: string, content: string = '') { + constructor( + id: string, + kind: number, + pubkey: string, + created_at: number, + dTag: string, + content: string = "", + ) { this.id = id; this.kind = kind; this.pubkey = pubkey; this.created_at = created_at; this.content = content; - this.tags = [['d', dTag]]; + this.tags = [["d", dTag]]; } tagValue(tagName: string): string | undefined { - const tag = this.tags.find(t => t[0] === tagName); + const tag = this.tags.find((t) => t[0] === tagName); return tag ? tag[1] : undefined; } } -describe('Relay Deduplication Behavior Tests', () => { +describe("Relay Deduplication Behavior Tests", () => { let mockEvents: MockNDKEvent[]; beforeEach(() => { // Create test events with different timestamps mockEvents = [ // Older version of a publication content event - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old content'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Old content", + ), // Newer version of the same publication content event - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Updated content'), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-1", + "Updated content", + ), // Different publication content event - new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-2', 'Different content'), + new MockNDKEvent( + "event3", + 30041, + "pubkey1", + 1500, + "chapter-2", + "Different content", + ), // Publication index event (should not be deduplicated) - new MockNDKEvent('event4', 30040, 'pubkey1', 1200, 'book-1', 'Index content'), + new MockNDKEvent( + "event4", + 30040, + "pubkey1", + 1200, + "book-1", + "Index content", + ), // Regular text note (should not be deduplicated) - new MockNDKEvent('event5', 1, 'pubkey1', 1300, '', 'Regular note'), + new MockNDKEvent("event5", 1, "pubkey1", 1300, "", "Regular note"), ]; }); - describe('Addressable Event Deduplication', () => { - it('should keep only the most recent version of addressable events by coordinate', () => { + describe("Addressable Event Deduplication", () => { + it("should keep only the most recent version of addressable events by coordinate", () => { // Test the deduplication logic for content events - const eventSets = [new Set(mockEvents.filter(e => e.kind === 30041) as NDKEvent[])]; + const eventSets = [ + new Set(mockEvents.filter((e) => e.kind === 30041) as NDKEvent[]), + ]; const result = deduplicateContentEvents(eventSets); - + // Should have 2 unique coordinates: chapter-1 and chapter-2 expect(result.size).toBe(2); - + // Should keep the newer version of chapter-1 - const chapter1Event = result.get('30041:pubkey1:chapter-1'); - expect(chapter1Event?.id).toBe('event2'); - expect(chapter1Event?.content).toBe('Updated content'); - + const chapter1Event = result.get("30041:pubkey1:chapter-1"); + expect(chapter1Event?.id).toBe("event2"); + expect(chapter1Event?.content).toBe("Updated content"); + // Should keep chapter-2 - const chapter2Event = result.get('30041:pubkey1:chapter-2'); - expect(chapter2Event?.id).toBe('event3'); + const chapter2Event = result.get("30041:pubkey1:chapter-2"); + expect(chapter2Event?.id).toBe("event3"); }); - it('should handle events with missing d-tags gracefully', () => { - const eventWithoutDTag = new MockNDKEvent('event6', 30041, 'pubkey1', 1400, '', 'No d-tag'); + it("should handle events with missing d-tags gracefully", () => { + const eventWithoutDTag = new MockNDKEvent( + "event6", + 30041, + "pubkey1", + 1400, + "", + "No d-tag", + ); eventWithoutDTag.tags = []; // Remove d-tag - + const eventSets = [new Set([eventWithoutDTag] as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Should not include events without d-tags expect(result.size).toBe(0); }); - it('should handle events with missing timestamps', () => { - const eventWithoutTimestamp = new MockNDKEvent('event7', 30041, 'pubkey1', 0, 'chapter-3', 'No timestamp'); - const eventWithTimestamp = new MockNDKEvent('event8', 30041, 'pubkey1', 1500, 'chapter-3', 'With timestamp'); - - const eventSets = [new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[])]; + it("should handle events with missing timestamps", () => { + const eventWithoutTimestamp = new MockNDKEvent( + "event7", + 30041, + "pubkey1", + 0, + "chapter-3", + "No timestamp", + ); + const eventWithTimestamp = new MockNDKEvent( + "event8", + 30041, + "pubkey1", + 1500, + "chapter-3", + "With timestamp", + ); + + const eventSets = [ + new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[]), + ]; const result = deduplicateContentEvents(eventSets); - + // Should prefer the event with timestamp - const chapter3Event = result.get('30041:pubkey1:chapter-3'); - expect(chapter3Event?.id).toBe('event8'); + const chapter3Event = result.get("30041:pubkey1:chapter-3"); + expect(chapter3Event?.id).toBe("event8"); }); }); - describe('Mixed Event Type Deduplication', () => { - it('should only deduplicate addressable events (kinds 30000-39999)', () => { + describe("Mixed Event Type Deduplication", () => { + it("should only deduplicate addressable events (kinds 30000-39999)", () => { const result = deduplicateAndCombineEvents( [mockEvents[4]] as NDKEvent[], // Regular text note new Set([mockEvents[3]] as NDKEvent[]), // Publication index - new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]) // Content events + new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]), // Content events ); - + // Should have 4 events total: // - 1 regular text note (not deduplicated) // - 1 publication index (not deduplicated) // - 2 unique content events (deduplicated from 3) expect(result.length).toBe(4); - + // Verify the content events were deduplicated - const contentEvents = result.filter(e => e.kind === 30041); + const contentEvents = result.filter((e) => e.kind === 30041); expect(contentEvents.length).toBe(2); - + // Verify the newer version was kept - const newerEvent = contentEvents.find(e => e.id === 'event2'); + const newerEvent = contentEvents.find((e) => e.id === "event2"); expect(newerEvent).toBeDefined(); }); - it('should handle non-addressable events correctly', () => { + it("should handle non-addressable events correctly", () => { const regularEvents = [ - new MockNDKEvent('note1', 1, 'pubkey1', 1000, '', 'Note 1'), - new MockNDKEvent('note2', 1, 'pubkey1', 2000, '', 'Note 2'), - new MockNDKEvent('profile1', 0, 'pubkey1', 1500, '', 'Profile 1'), + new MockNDKEvent("note1", 1, "pubkey1", 1000, "", "Note 1"), + new MockNDKEvent("note2", 1, "pubkey1", 2000, "", "Note 2"), + new MockNDKEvent("profile1", 0, "pubkey1", 1500, "", "Profile 1"), ]; - + const result = deduplicateAndCombineEvents( regularEvents as NDKEvent[], new Set(), - new Set() + new Set(), ); - + // All regular events should be included (no deduplication) expect(result.length).toBe(3); }); }); - describe('Coordinate System Validation', () => { - it('should correctly identify event coordinates', () => { - const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test-chapter'); + describe("Coordinate System Validation", () => { + it("should correctly identify event coordinates", () => { + const event = new MockNDKEvent( + "test", + 30041, + "pubkey123", + 1000, + "test-chapter", + ); const coordinate = getEventCoordinate(event as NDKEvent); - - expect(coordinate).toBe('30041:pubkey123:test-chapter'); + + expect(coordinate).toBe("30041:pubkey123:test-chapter"); }); - it('should handle d-tags with colons correctly', () => { - const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'chapter:with:colons'); + it("should handle d-tags with colons correctly", () => { + const event = new MockNDKEvent( + "test", + 30041, + "pubkey123", + 1000, + "chapter:with:colons", + ); const coordinate = getEventCoordinate(event as NDKEvent); - - expect(coordinate).toBe('30041:pubkey123:chapter:with:colons'); + + expect(coordinate).toBe("30041:pubkey123:chapter:with:colons"); }); - it('should return null for non-replaceable events', () => { - const event = new MockNDKEvent('test', 1, 'pubkey123', 1000, ''); + it("should return null for non-replaceable events", () => { + const event = new MockNDKEvent("test", 1, "pubkey123", 1000, ""); const coordinate = getEventCoordinate(event as NDKEvent); - + expect(coordinate).toBeNull(); }); }); - describe('Replaceable Event Detection', () => { - it('should correctly identify replaceable events', () => { - const addressableEvent = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test'); - const regularEvent = new MockNDKEvent('test', 1, 'pubkey123', 1000, ''); - + describe("Replaceable Event Detection", () => { + it("should correctly identify replaceable events", () => { + const addressableEvent = new MockNDKEvent( + "test", + 30041, + "pubkey123", + 1000, + "test", + ); + const regularEvent = new MockNDKEvent("test", 1, "pubkey123", 1000, ""); + expect(isReplaceableEvent(addressableEvent as NDKEvent)).toBe(true); expect(isReplaceableEvent(regularEvent as NDKEvent)).toBe(false); }); - it('should handle edge cases of replaceable event ranges', () => { - const event29999 = new MockNDKEvent('test', 29999, 'pubkey123', 1000, 'test'); - const event30000 = new MockNDKEvent('test', 30000, 'pubkey123', 1000, 'test'); - const event39999 = new MockNDKEvent('test', 39999, 'pubkey123', 1000, 'test'); - const event40000 = new MockNDKEvent('test', 40000, 'pubkey123', 1000, 'test'); - + it("should handle edge cases of replaceable event ranges", () => { + const event29999 = new MockNDKEvent( + "test", + 29999, + "pubkey123", + 1000, + "test", + ); + const event30000 = new MockNDKEvent( + "test", + 30000, + "pubkey123", + 1000, + "test", + ); + const event39999 = new MockNDKEvent( + "test", + 39999, + "pubkey123", + 1000, + "test", + ); + const event40000 = new MockNDKEvent( + "test", + 40000, + "pubkey123", + 1000, + "test", + ); + expect(isReplaceableEvent(event29999 as NDKEvent)).toBe(false); expect(isReplaceableEvent(event30000 as NDKEvent)).toBe(true); expect(isReplaceableEvent(event39999 as NDKEvent)).toBe(true); @@ -179,279 +281,429 @@ describe('Relay Deduplication Behavior Tests', () => { }); }); - describe('Edge Cases', () => { - it('should handle empty event sets', () => { + describe("Edge Cases", () => { + it("should handle empty event sets", () => { const result = deduplicateContentEvents([]); expect(result.size).toBe(0); }); - it('should handle events with null/undefined values', () => { + it("should handle events with null/undefined values", () => { const invalidEvent = { id: undefined, kind: 30041, - pubkey: 'pubkey1', + pubkey: "pubkey1", created_at: 1000, tagValue: () => undefined, // Return undefined for d-tag } as unknown as NDKEvent; - + const eventSets = [new Set([invalidEvent])]; const result = deduplicateContentEvents(eventSets); - + // Should handle gracefully without crashing expect(result.size).toBe(0); }); - it('should handle events from different authors with same d-tag', () => { - const event1 = new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'same-chapter', 'Author 1'); - const event2 = new MockNDKEvent('event2', 30041, 'pubkey2', 1000, 'same-chapter', 'Author 2'); - + it("should handle events from different authors with same d-tag", () => { + const event1 = new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "same-chapter", + "Author 1", + ); + const event2 = new MockNDKEvent( + "event2", + 30041, + "pubkey2", + 1000, + "same-chapter", + "Author 2", + ); + const eventSets = [new Set([event1, event2] as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Should have 2 events (different coordinates due to different authors) expect(result.size).toBe(2); - expect(result.has('30041:pubkey1:same-chapter')).toBe(true); - expect(result.has('30041:pubkey2:same-chapter')).toBe(true); + expect(result.has("30041:pubkey1:same-chapter")).toBe(true); + expect(result.has("30041:pubkey2:same-chapter")).toBe(true); }); }); }); -describe('Relay Behavior Simulation', () => { - it('should simulate what happens when relays return duplicate events', () => { +describe("Relay Behavior Simulation", () => { + it("should simulate what happens when relays return duplicate events", () => { // Simulate a relay that returns multiple versions of the same event const relayEvents = [ - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), - new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Old version", + ), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-1", + "New version", + ), + new MockNDKEvent( + "event3", + 30041, + "pubkey1", + 1500, + "chapter-1", + "Middle version", + ), ]; - + // This simulates what a "bad" relay might return const eventSets = [new Set(relayEvents as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Should only keep the newest version expect(result.size).toBe(1); - const keptEvent = result.get('30041:pubkey1:chapter-1'); - expect(keptEvent?.id).toBe('event2'); - expect(keptEvent?.content).toBe('New version'); + const keptEvent = result.get("30041:pubkey1:chapter-1"); + expect(keptEvent?.id).toBe("event2"); + expect(keptEvent?.content).toBe("New version"); }); - it('should simulate multiple relays returning different versions', () => { + it("should simulate multiple relays returning different versions", () => { // Simulate multiple relays returning different versions const relay1Events = [ - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Relay 1 version'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Relay 1 version", + ), ]; - + const relay2Events = [ - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Relay 2 version'), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-1", + "Relay 2 version", + ), + ]; + + const eventSets = [ + new Set(relay1Events as NDKEvent[]), + new Set(relay2Events as NDKEvent[]), ]; - - const eventSets = [new Set(relay1Events as NDKEvent[]), new Set(relay2Events as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Should keep the newest version from any relay expect(result.size).toBe(1); - const keptEvent = result.get('30041:pubkey1:chapter-1'); - expect(keptEvent?.id).toBe('event2'); - expect(keptEvent?.content).toBe('Relay 2 version'); + const keptEvent = result.get("30041:pubkey1:chapter-1"); + expect(keptEvent?.id).toBe("event2"); + expect(keptEvent?.content).toBe("Relay 2 version"); }); }); -describe('Real Relay Deduplication Tests', () => { +describe("Real Relay Deduplication Tests", () => { // These tests actually query real relays to see if they deduplicate // Note: These are integration tests and may be flaky due to network conditions - - it('should detect if relays are returning duplicate replaceable events', async () => { - // This test queries real relays to see if they return duplicates - // We'll use a known author who has published multiple versions of content - - // Known author with multiple publication content events - const testAuthor = 'npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks'; - - // Query for publication content events (kind 30041) from this author - // We expect relays to return only the most recent version of each d-tag - - // This is a placeholder - in a real test, we would: - // 1. Query multiple relays for the same author's 30041 events - // 2. Check if any relay returns multiple events with the same d-tag - // 3. Verify that if duplicates exist, our deduplication logic handles them - - console.log('Note: This test would require actual relay queries to verify deduplication behavior'); - console.log('To run this test properly, we would need to:'); - console.log('1. Query real relays for replaceable events'); - console.log('2. Check if relays return duplicates'); - console.log('3. Verify our deduplication logic works on real data'); - - // For now, we'll just assert that our logic is ready to handle real data - expect(true).toBe(true); - }, 30000); // 30 second timeout for network requests - - it('should verify that our deduplication logic works on real relay data', async () => { - // This test would: - // 1. Fetch real events from relays - // 2. Apply our deduplication logic - // 3. Verify that the results are correct - - console.log('Note: This test would require actual relay queries'); - console.log('To implement this test, we would need to:'); - console.log('1. Set up NDK with real relays'); - console.log('2. Fetch events for a known author with multiple versions'); - console.log('3. Apply deduplication and verify results'); - - expect(true).toBe(true); - }, 30000); + + it( + "should detect if relays are returning duplicate replaceable events", + async () => { + // This test queries real relays to see if they return duplicates + // We'll use a known author who has published multiple versions of content + + // Known author with multiple publication content events + const testAuthor = + "npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks"; + + // Query for publication content events (kind 30041) from this author + // We expect relays to return only the most recent version of each d-tag + + // This is a placeholder - in a real test, we would: + // 1. Query multiple relays for the same author's 30041 events + // 2. Check if any relay returns multiple events with the same d-tag + // 3. Verify that if duplicates exist, our deduplication logic handles them + + console.log( + "Note: This test would require actual relay queries to verify deduplication behavior", + ); + console.log("To run this test properly, we would need to:"); + console.log("1. Query real relays for replaceable events"); + console.log("2. Check if relays return duplicates"); + console.log("3. Verify our deduplication logic works on real data"); + + // For now, we'll just assert that our logic is ready to handle real data + expect(true).toBe(true); + }, + 30000, + ); // 30 second timeout for network requests + + it( + "should verify that our deduplication logic works on real relay data", + async () => { + // This test would: + // 1. Fetch real events from relays + // 2. Apply our deduplication logic + // 3. Verify that the results are correct + + console.log("Note: This test would require actual relay queries"); + console.log("To implement this test, we would need to:"); + console.log("1. Set up NDK with real relays"); + console.log("2. Fetch events for a known author with multiple versions"); + console.log("3. Apply deduplication and verify results"); + + expect(true).toBe(true); + }, + 30000, + ); }); -describe('Practical Relay Behavior Analysis', () => { - it('should document what we know about relay deduplication behavior', () => { +describe("Practical Relay Behavior Analysis", () => { + it("should document what we know about relay deduplication behavior", () => { // This test documents our current understanding of relay behavior // based on the code analysis and the comment from onedev - - console.log('\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ==='); - console.log('\nBased on the code analysis and the comment from onedev:'); - console.log('\n1. THEORETICAL BEHAVIOR:'); - console.log(' - Relays SHOULD handle deduplication for replaceable events'); - console.log(' - Only the most recent version of each coordinate should be stored'); - console.log(' - Client-side deduplication should only be needed for cached/local events'); - - console.log('\n2. REALITY CHECK:'); - console.log(' - Not all relays implement deduplication correctly'); - console.log(' - Some relays may return multiple versions of the same event'); - console.log(' - Network conditions and relay availability can cause inconsistencies'); - - console.log('\n3. ALEXANDRIA\'S APPROACH:'); - console.log(' - Implements client-side deduplication as a safety net'); - console.log(' - Uses coordinate system (kind:pubkey:d-tag) for addressable events'); - console.log(' - Keeps the most recent version based on created_at timestamp'); - console.log(' - Only applies to replaceable events (kinds 30000-39999)'); - - console.log('\n4. WHY KEEP THE DEDUPLICATION:'); - console.log(' - Defensive programming against imperfect relay implementations'); - console.log(' - Handles multiple relay sources with different data'); - console.log(' - Works with cached events that might be outdated'); - console.log(' - Ensures consistent user experience regardless of relay behavior'); - - console.log('\n5. TESTING STRATEGY:'); - console.log(' - Unit tests verify our deduplication logic works correctly'); - console.log(' - Integration tests would verify relay behavior (when network allows)'); - console.log(' - Monitoring can help determine if relays improve over time'); - + + console.log("\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ==="); + console.log("\nBased on the code analysis and the comment from onedev:"); + console.log("\n1. THEORETICAL BEHAVIOR:"); + console.log( + " - Relays SHOULD handle deduplication for replaceable events", + ); + console.log( + " - Only the most recent version of each coordinate should be stored", + ); + console.log( + " - Client-side deduplication should only be needed for cached/local events", + ); + + console.log("\n2. REALITY CHECK:"); + console.log(" - Not all relays implement deduplication correctly"); + console.log( + " - Some relays may return multiple versions of the same event", + ); + console.log( + " - Network conditions and relay availability can cause inconsistencies", + ); + + console.log("\n3. ALEXANDRIA'S APPROACH:"); + console.log(" - Implements client-side deduplication as a safety net"); + console.log( + " - Uses coordinate system (kind:pubkey:d-tag) for addressable events", + ); + console.log( + " - Keeps the most recent version based on created_at timestamp", + ); + console.log(" - Only applies to replaceable events (kinds 30000-39999)"); + + console.log("\n4. WHY KEEP THE DEDUPLICATION:"); + console.log( + " - Defensive programming against imperfect relay implementations", + ); + console.log(" - Handles multiple relay sources with different data"); + console.log(" - Works with cached events that might be outdated"); + console.log( + " - Ensures consistent user experience regardless of relay behavior", + ); + + console.log("\n5. TESTING STRATEGY:"); + console.log( + " - Unit tests verify our deduplication logic works correctly", + ); + console.log( + " - Integration tests would verify relay behavior (when network allows)", + ); + console.log( + " - Monitoring can help determine if relays improve over time", + ); + // This test documents our understanding rather than asserting specific behavior expect(true).toBe(true); }); - it('should provide recommendations for when to remove deduplication', () => { - console.log('\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ==='); - console.log('\nThe deduplication logic should be kept until:'); - console.log('\n1. RELAY STANDARDS:'); - console.log(' - NIP-33 (replaceable events) is widely implemented by relays'); - console.log(' - Relays consistently return only the most recent version'); - console.log(' - No major relay implementations return duplicates'); - - console.log('\n2. TESTING EVIDENCE:'); - console.log(' - Real-world testing shows relays don\'t return duplicates'); - console.log(' - Multiple relay operators confirm deduplication behavior'); - console.log(' - No user reports of duplicate content issues'); - - console.log('\n3. MONITORING:'); - console.log(' - Add logging to track when deduplication is actually used'); - console.log(' - Monitor relay behavior over time'); - console.log(' - Collect metrics on duplicate events found'); - - console.log('\n4. GRADUAL REMOVAL:'); - console.log(' - Make deduplication configurable (on/off)'); - console.log(' - Test with deduplication disabled in controlled environments'); - console.log(' - Monitor for issues before removing completely'); - - console.log('\n5. FALLBACK STRATEGY:'); - console.log(' - Keep deduplication as a fallback option'); - console.log(' - Allow users to enable it if they experience issues'); - console.log(' - Maintain the code for potential future use'); - + it("should provide recommendations for when to remove deduplication", () => { + console.log("\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ==="); + console.log("\nThe deduplication logic should be kept until:"); + console.log("\n1. RELAY STANDARDS:"); + console.log( + " - NIP-33 (replaceable events) is widely implemented by relays", + ); + console.log(" - Relays consistently return only the most recent version"); + console.log(" - No major relay implementations return duplicates"); + + console.log("\n2. TESTING EVIDENCE:"); + console.log(" - Real-world testing shows relays don't return duplicates"); + console.log(" - Multiple relay operators confirm deduplication behavior"); + console.log(" - No user reports of duplicate content issues"); + + console.log("\n3. MONITORING:"); + console.log( + " - Add logging to track when deduplication is actually used", + ); + console.log(" - Monitor relay behavior over time"); + console.log(" - Collect metrics on duplicate events found"); + + console.log("\n4. GRADUAL REMOVAL:"); + console.log(" - Make deduplication configurable (on/off)"); + console.log( + " - Test with deduplication disabled in controlled environments", + ); + console.log(" - Monitor for issues before removing completely"); + + console.log("\n5. FALLBACK STRATEGY:"); + console.log(" - Keep deduplication as a fallback option"); + console.log(" - Allow users to enable it if they experience issues"); + console.log(" - Maintain the code for potential future use"); + expect(true).toBe(true); }); }); -describe('Logging and Monitoring Tests', () => { - it('should verify that logging works when duplicates are found', () => { +describe("Logging and Monitoring Tests", () => { + it("should verify that logging works when duplicates are found", () => { // Mock console.log to capture output - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + // Create events with duplicates const duplicateEvents = [ - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), - new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Old version", + ), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-1", + "New version", + ), + new MockNDKEvent( + "event3", + 30041, + "pubkey1", + 1500, + "chapter-1", + "Middle version", + ), ]; - + const eventSets = [new Set(duplicateEvents as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Verify the deduplication worked expect(result.size).toBe(1); - + // Verify that logging was called expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('[eventDeduplication] Found 2 duplicate events out of 3 total events') + expect.stringContaining( + "[eventDeduplication] Found 2 duplicate events out of 3 total events", + ), ); expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('[eventDeduplication] Reduced to 1 unique coordinates') + expect.stringContaining( + "[eventDeduplication] Reduced to 1 unique coordinates", + ), ); - + // Restore console.log consoleSpy.mockRestore(); }); - it('should verify that logging works when no duplicates are found', () => { + it("should verify that logging works when no duplicates are found", () => { // Mock console.log to capture output - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + // Create events without duplicates const uniqueEvents = [ - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Content 1'), - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-2', 'Content 2'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Content 1", + ), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-2", + "Content 2", + ), ]; - + const eventSets = [new Set(uniqueEvents as NDKEvent[])]; const result = deduplicateContentEvents(eventSets); - + // Verify no deduplication was needed expect(result.size).toBe(2); - + // Verify that logging was called with "no duplicates" message expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('[eventDeduplication] No duplicates found in 2 events') + expect.stringContaining( + "[eventDeduplication] No duplicates found in 2 events", + ), ); - + // Restore console.log consoleSpy.mockRestore(); }); - it('should verify that deduplicateAndCombineEvents logging works', () => { + it("should verify that deduplicateAndCombineEvents logging works", () => { // Mock console.log to capture output - const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); - + const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {}); + // Create events with duplicates const duplicateEvents = [ - new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), - new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), + new MockNDKEvent( + "event1", + 30041, + "pubkey1", + 1000, + "chapter-1", + "Old version", + ), + new MockNDKEvent( + "event2", + 30041, + "pubkey1", + 2000, + "chapter-1", + "New version", + ), ]; - + const result = deduplicateAndCombineEvents( [] as NDKEvent[], new Set(), - new Set(duplicateEvents as NDKEvent[]) + new Set(duplicateEvents as NDKEvent[]), ); - + // Verify the deduplication worked expect(result.length).toBe(1); - + // Verify that logging was called expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates') + expect.stringContaining( + "[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates", + ), ); - + // Restore console.log consoleSpy.mockRestore(); }); -}); \ No newline at end of file +}); diff --git a/tests/unit/tagExpansion.test.ts b/tests/unit/tagExpansion.test.ts index 65e71fa..e47f74b 100644 --- a/tests/unit/tagExpansion.test.ts +++ b/tests/unit/tagExpansion.test.ts @@ -1,11 +1,10 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest'; -import type { NDKEvent } from '@nostr-dev-kit/ndk'; -import { +import { beforeEach, describe, expect, it, vi } from "vitest"; +import type { NDKEvent } from "@nostr-dev-kit/ndk"; +import { + fetchProfilesForNewEvents, fetchTaggedEventsFromRelays, findTaggedEventsInFetched, - fetchProfilesForNewEvents, - type TagExpansionResult -} from '../../src/lib/utils/tag_event_fetch'; +} from "../../src/lib/utils/tag_event_fetch"; // Mock NDKEvent for testing class MockNDKEvent { @@ -16,7 +15,14 @@ class MockNDKEvent { content: string; tags: string[][]; - constructor(id: string, kind: number, pubkey: string, created_at: number, content: string = '', tags: string[][] = []) { + constructor( + id: string, + kind: number, + pubkey: string, + created_at: number, + content: string = "", + tags: string[][] = [], + ) { this.id = id; this.kind = kind; this.pubkey = pubkey; @@ -26,151 +32,203 @@ class MockNDKEvent { } tagValue(tagName: string): string | undefined { - const tag = this.tags.find(t => t[0] === tagName); + const tag = this.tags.find((t) => t[0] === tagName); return tag ? tag[1] : undefined; } getMatchingTags(tagName: string): string[][] { - return this.tags.filter(tag => tag[0] === tagName); + return this.tags.filter((tag) => tag[0] === tagName); } } // Mock NDK instance const mockNDK = { - fetchEvents: vi.fn() -}; + fetchEvents: vi.fn(), + pool: {}, + debug: false, + mutedIds: new Set(), + queuesZapConfig: {}, + // Add other required properties as needed for the mock +} as any; // Mock the ndkInstance store -vi.mock('../../src/lib/ndk', () => ({ +// TODO: Replace with getNdkContext mock. +vi.mock("../../src/lib/ndk", () => ({ ndkInstance: { subscribe: vi.fn((fn) => { fn(mockNDK); return { unsubscribe: vi.fn() }; - }) - } + }), + }, })); // Mock the profile cache utilities -vi.mock('../../src/lib/utils/profileCache', () => ({ +vi.mock("../../src/lib/utils/profileCache", () => ({ extractPubkeysFromEvents: vi.fn((events: NDKEvent[]) => { const pubkeys = new Set(); - events.forEach(event => { + events.forEach((event) => { if (event.pubkey) pubkeys.add(event.pubkey); }); return pubkeys; }), - batchFetchProfiles: vi.fn(async (pubkeys: string[], onProgress: (fetched: number, total: number) => void) => { - // Simulate progress updates - onProgress(0, pubkeys.length); - onProgress(pubkeys.length, pubkeys.length); - return []; - }) + batchFetchProfiles: vi.fn( + async ( + pubkeys: string[], + ndk: any, + onProgress?: (fetched: number, total: number) => void, + ) => { + // Simulate progress updates + if (onProgress) { + onProgress(0, pubkeys.length); + onProgress(pubkeys.length, pubkeys.length); + } + return []; + }, + ), })); -describe('Tag Expansion Tests', () => { +describe("Tag Expansion Tests", () => { let mockPublications: MockNDKEvent[]; let mockContentEvents: MockNDKEvent[]; let mockAllEvents: MockNDKEvent[]; beforeEach(() => { vi.clearAllMocks(); - + // Create test publication index events (kind 30040) mockPublications = [ - new MockNDKEvent('pub1', 30040, 'author1', 1000, 'Book 1', [ - ['t', 'bitcoin'], - ['t', 'cryptocurrency'], - ['a', '30041:author1:chapter-1'], - ['a', '30041:author1:chapter-2'] + new MockNDKEvent("pub1", 30040, "author1", 1000, "Book 1", [ + ["t", "bitcoin"], + ["t", "cryptocurrency"], + ["a", "30041:author1:chapter-1"], + ["a", "30041:author1:chapter-2"], + ]), + new MockNDKEvent("pub2", 30040, "author2", 1100, "Book 2", [ + ["t", "bitcoin"], + ["t", "blockchain"], + ["a", "30041:author2:chapter-1"], ]), - new MockNDKEvent('pub2', 30040, 'author2', 1100, 'Book 2', [ - ['t', 'bitcoin'], - ['t', 'blockchain'], - ['a', '30041:author2:chapter-1'] + new MockNDKEvent("pub3", 30040, "author3", 1200, "Book 3", [ + ["t", "ethereum"], + ["a", "30041:author3:chapter-1"], ]), - new MockNDKEvent('pub3', 30040, 'author3', 1200, 'Book 3', [ - ['t', 'ethereum'], - ['a', '30041:author3:chapter-1'] - ]) ]; // Create test content events (kind 30041) mockContentEvents = [ - new MockNDKEvent('content1', 30041, 'author1', 1000, 'Chapter 1 content', [['d', 'chapter-1']]), - new MockNDKEvent('content2', 30041, 'author1', 1100, 'Chapter 2 content', [['d', 'chapter-2']]), - new MockNDKEvent('content3', 30041, 'author2', 1200, 'Author 2 Chapter 1', [['d', 'chapter-1']]), - new MockNDKEvent('content4', 30041, 'author3', 1300, 'Author 3 Chapter 1', [['d', 'chapter-1']]) + new MockNDKEvent( + "content1", + 30041, + "author1", + 1000, + "Chapter 1 content", + [["d", "chapter-1"]], + ), + new MockNDKEvent( + "content2", + 30041, + "author1", + 1100, + "Chapter 2 content", + [["d", "chapter-2"]], + ), + new MockNDKEvent( + "content3", + 30041, + "author2", + 1200, + "Author 2 Chapter 1", + [["d", "chapter-1"]], + ), + new MockNDKEvent( + "content4", + 30041, + "author3", + 1300, + "Author 3 Chapter 1", + [["d", "chapter-1"]], + ), ]; // Combine all events for testing mockAllEvents = [...mockPublications, ...mockContentEvents]; }); - describe('fetchTaggedEventsFromRelays', () => { - it('should fetch publications with matching tags from relays', async () => { + describe("fetchTaggedEventsFromRelays", () => { + it("should fetch publications with matching tags from relays", async () => { // Mock the NDK fetch to return publications with 'bitcoin' tag - const bitcoinPublications = mockPublications.filter(pub => - pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin') + const bitcoinPublications = mockPublications.filter((pub) => + pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin") + ); + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(bitcoinPublications as NDKEvent[]), + ); + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(mockContentEvents as NDKEvent[]), ); - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[])); - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[])); - const existingEventIds = new Set(['existing-event']); + const existingEventIds = new Set(["existing-event"]); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = await fetchTaggedEventsFromRelays( - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + mockNDK as any, + debug, ); // Should fetch publications with bitcoin tag expect(mockNDK.fetchEvents).toHaveBeenCalledWith({ kinds: [30040], - "#t": ['bitcoin'], - limit: 30 + "#t": ["bitcoin"], + limit: 30, }); // Should return the matching publications expect(result.publications).toHaveLength(2); - expect(result.publications.map(p => p.id)).toContain('pub1'); - expect(result.publications.map(p => p.id)).toContain('pub2'); + expect(result.publications.map((p: any) => p.id)).toContain("pub1"); + expect(result.publications.map((p: any) => p.id)).toContain("pub2"); // Should fetch content events for the publications expect(mockNDK.fetchEvents).toHaveBeenCalledWith({ kinds: [30041, 30818], - "#d": ['chapter-1', 'chapter-2'] + "#d": ["chapter-1", "chapter-2"], }); }); - it('should filter out existing events to avoid duplicates', async () => { - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockPublications as NDKEvent[])); - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[])); + it("should filter out existing events to avoid duplicates", async () => { + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(mockPublications as NDKEvent[]), + ); + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(mockContentEvents as NDKEvent[]), + ); - const existingEventIds = new Set(['pub1']); // pub1 already exists + const existingEventIds = new Set(["pub1"]); // pub1 already exists const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = await fetchTaggedEventsFromRelays( - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + mockNDK as any, + debug, ); // Should exclude pub1 since it already exists expect(result.publications).toHaveLength(2); - expect(result.publications.map(p => p.id)).not.toContain('pub1'); - expect(result.publications.map(p => p.id)).toContain('pub2'); - expect(result.publications.map(p => p.id)).toContain('pub3'); + expect(result.publications.map((p: any) => p.id)).not.toContain("pub1"); + expect(result.publications.map((p: any) => p.id)).toContain("pub2"); + expect(result.publications.map((p: any) => p.id)).toContain("pub3"); }); - it('should handle empty tag array gracefully', async () => { + it("should handle empty tag array gracefully", async () => { // Mock empty result for empty tags mockNDK.fetchEvents.mockResolvedValueOnce(new Set()); - + const existingEventIds = new Set(); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); @@ -179,7 +237,8 @@ describe('Tag Expansion Tests', () => { [], existingEventIds, baseEvents, - debug + mockNDK as any, + debug, ); expect(result.publications).toHaveLength(0); @@ -187,103 +246,110 @@ describe('Tag Expansion Tests', () => { }); }); - describe('findTaggedEventsInFetched', () => { - it('should find publications with matching tags in already fetched events', () => { - const existingEventIds = new Set(['existing-event']); + describe("findTaggedEventsInFetched", () => { + it("should find publications with matching tags in already fetched events", () => { + const existingEventIds = new Set(["existing-event"]); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = findTaggedEventsInFetched( mockAllEvents as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); // Should find publications with bitcoin tag expect(result.publications).toHaveLength(2); - expect(result.publications.map(p => p.id)).toContain('pub1'); - expect(result.publications.map(p => p.id)).toContain('pub2'); + expect(result.publications.map((p: any) => p.id)).toContain("pub1"); + expect(result.publications.map((p: any) => p.id)).toContain("pub2"); // Should find content events for those publications expect(result.contentEvents).toHaveLength(4); - expect(result.contentEvents.map(c => c.id)).toContain('content1'); - expect(result.contentEvents.map(c => c.id)).toContain('content2'); - expect(result.contentEvents.map(c => c.id)).toContain('content3'); - expect(result.contentEvents.map(c => c.id)).toContain('content4'); + expect(result.contentEvents.map((c: any) => c.id)).toContain("content1"); + expect(result.contentEvents.map((c: any) => c.id)).toContain("content2"); + expect(result.contentEvents.map((c: any) => c.id)).toContain("content3"); + expect(result.contentEvents.map((c: any) => c.id)).toContain("content4"); }); - it('should exclude base events from search results', () => { - const existingEventIds = new Set(['pub1']); // pub1 is a base event + it("should exclude base events from search results", () => { + const existingEventIds = new Set(["pub1"]); // pub1 is a base event const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = findTaggedEventsInFetched( mockAllEvents as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); // Should exclude pub1 since it's a base event expect(result.publications).toHaveLength(1); - expect(result.publications.map(p => p.id)).not.toContain('pub1'); - expect(result.publications.map(p => p.id)).toContain('pub2'); + expect(result.publications.map((p: any) => p.id)).not.toContain("pub1"); + expect(result.publications.map((p: any) => p.id)).toContain("pub2"); }); - it('should handle multiple tags (OR logic)', () => { + it("should handle multiple tags (OR logic)", () => { const existingEventIds = new Set(); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = findTaggedEventsInFetched( mockAllEvents as NDKEvent[], - ['bitcoin', 'ethereum'], + ["bitcoin", "ethereum"], existingEventIds, baseEvents, - debug + debug, ); // Should find publications with either bitcoin OR ethereum tags expect(result.publications).toHaveLength(3); - expect(result.publications.map(p => p.id)).toContain('pub1'); // bitcoin - expect(result.publications.map(p => p.id)).toContain('pub2'); // bitcoin - expect(result.publications.map(p => p.id)).toContain('pub3'); // ethereum + expect(result.publications.map((p: any) => p.id)).toContain("pub1"); // bitcoin + expect(result.publications.map((p: any) => p.id)).toContain("pub2"); // bitcoin + expect(result.publications.map((p: any) => p.id)).toContain("pub3"); // ethereum }); - it('should handle events without tags gracefully', () => { - const eventWithoutTags = new MockNDKEvent('no-tags', 30040, 'author4', 1000, 'No tags'); + it("should handle events without tags gracefully", () => { + const eventWithoutTags = new MockNDKEvent( + "no-tags", + 30040, + "author4", + 1000, + "No tags", + ); const allEventsWithNoTags = [...mockAllEvents, eventWithoutTags]; - + const existingEventIds = new Set(); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = findTaggedEventsInFetched( allEventsWithNoTags as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); // Should not include events without tags - expect(result.publications.map(p => p.id)).not.toContain('no-tags'); + expect(result.publications.map((p: any) => p.id)).not.toContain("no-tags"); }); }); - describe('fetchProfilesForNewEvents', () => { - it('should extract pubkeys and fetch profiles for new events', async () => { + describe("fetchProfilesForNewEvents", () => { + it("should extract pubkeys and fetch profiles for new events", async () => { const onProgressUpdate = vi.fn(); const debug = vi.fn(); await fetchProfilesForNewEvents( mockPublications as NDKEvent[], mockContentEvents as NDKEvent[], + mockNDK as any, onProgressUpdate, - debug + debug, ); // Should call progress update with initial state @@ -296,15 +362,16 @@ describe('Tag Expansion Tests', () => { expect(onProgressUpdate).toHaveBeenCalledWith(null); }); - it('should handle empty event arrays gracefully', async () => { + it("should handle empty event arrays gracefully", async () => { const onProgressUpdate = vi.fn(); const debug = vi.fn(); await fetchProfilesForNewEvents( [], [], + mockNDK as any, onProgressUpdate, - debug + debug, ); // Should not call progress update for empty arrays @@ -312,27 +379,32 @@ describe('Tag Expansion Tests', () => { }); }); - describe('Tag Expansion Integration', () => { - it('should demonstrate the complete tag expansion flow', async () => { + describe("Tag Expansion Integration", () => { + it("should demonstrate the complete tag expansion flow", async () => { // This test simulates the complete flow from the visualize page - + // Step 1: Mock relay fetch for 'bitcoin' tag - const bitcoinPublications = mockPublications.filter(pub => - pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin') + const bitcoinPublications = mockPublications.filter((pub) => + pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin") + ); + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(bitcoinPublications as NDKEvent[]), + ); + mockNDK.fetchEvents.mockResolvedValueOnce( + new Set(mockContentEvents as NDKEvent[]), ); - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[])); - mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[])); - const existingEventIds = new Set(['base-event']); + const existingEventIds = new Set(["base-event"]); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); // Step 2: Fetch from relays const relayResult = await fetchTaggedEventsFromRelays( - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + mockNDK as any, + debug, ); expect(relayResult.publications).toHaveLength(2); @@ -341,10 +413,10 @@ describe('Tag Expansion Tests', () => { // Step 3: Search in fetched events const searchResult = findTaggedEventsInFetched( mockAllEvents as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); expect(searchResult.publications).toHaveLength(2); @@ -355,21 +427,29 @@ describe('Tag Expansion Tests', () => { await fetchProfilesForNewEvents( relayResult.publications, relayResult.contentEvents, + mockNDK as any, onProgressUpdate, - debug + debug, ); expect(onProgressUpdate).toHaveBeenCalledWith(null); }); }); - describe('Edge Cases and Error Handling', () => { - it('should handle malformed a-tags gracefully', () => { - const malformedPublication = new MockNDKEvent('malformed', 30040, 'author1', 1000, 'Malformed', [ - ['t', 'bitcoin'], - ['a', 'invalid-tag-format'], // Missing parts - ['a', '30041:author1:chapter-1'] // Valid format - ]); + describe("Edge Cases and Error Handling", () => { + it("should handle malformed a-tags gracefully", () => { + const malformedPublication = new MockNDKEvent( + "malformed", + 30040, + "author1", + 1000, + "Malformed", + [ + ["t", "bitcoin"], + ["a", "invalid-tag-format"], // Missing parts + ["a", "30041:author1:chapter-1"], // Valid format + ], + ); const allEventsWithMalformed = [...mockAllEvents, malformedPublication]; const existingEventIds = new Set(); @@ -378,10 +458,10 @@ describe('Tag Expansion Tests', () => { const result = findTaggedEventsInFetched( allEventsWithMalformed as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); // Should still work and include the publication with valid a-tags @@ -389,32 +469,50 @@ describe('Tag Expansion Tests', () => { expect(result.contentEvents.length).toBeGreaterThan(0); }); - it('should handle events with d-tags containing colons', () => { - const publicationWithColonDTag = new MockNDKEvent('colon-pub', 30040, 'author1', 1000, 'Colon d-tag', [ - ['t', 'bitcoin'], - ['a', '30041:author1:chapter:with:colons'] - ]); + it("should handle events with d-tags containing colons", () => { + const publicationWithColonDTag = new MockNDKEvent( + "colon-pub", + 30040, + "author1", + 1000, + "Colon d-tag", + [ + ["t", "bitcoin"], + ["a", "30041:author1:chapter:with:colons"], + ], + ); - const contentWithColonDTag = new MockNDKEvent('colon-content', 30041, 'author1', 1100, 'Content with colon d-tag', [ - ['d', 'chapter:with:colons'] - ]); + const contentWithColonDTag = new MockNDKEvent( + "colon-content", + 30041, + "author1", + 1100, + "Content with colon d-tag", + [ + ["d", "chapter:with:colons"], + ], + ); - const allEventsWithColons = [...mockAllEvents, publicationWithColonDTag, contentWithColonDTag]; + const allEventsWithColons = [ + ...mockAllEvents, + publicationWithColonDTag, + contentWithColonDTag, + ]; const existingEventIds = new Set(); const baseEvents: NDKEvent[] = []; const debug = vi.fn(); const result = findTaggedEventsInFetched( allEventsWithColons as NDKEvent[], - ['bitcoin'], + ["bitcoin"], existingEventIds, baseEvents, - debug + debug, ); // Should handle d-tags with colons correctly expect(result.publications).toHaveLength(3); - expect(result.contentEvents.map(c => c.id)).toContain('colon-content'); + expect(result.contentEvents.map((c: any) => c.id)).toContain("colon-content"); }); }); -}); \ No newline at end of file +}); diff --git a/vite.config.ts b/vite.config.ts index 82206c3..a81279c 100644 --- a/vite.config.ts +++ b/vite.config.ts @@ -33,7 +33,7 @@ export default defineConfig({ }, build: { rollupOptions: { - external: ["bech32"], + // Removed bech32 from externals since it's needed on client side }, }, test: { @@ -42,17 +42,24 @@ export default defineConfig({ define: { // Expose the app version as a global variable "import.meta.env.APP_VERSION": JSON.stringify(getAppVersionString()), + // Enable debug logging for relays when needed + "process.env.DEBUG_RELAYS": JSON.stringify( + process.env.DEBUG_RELAYS || "false", + ), }, optimizeDeps: { esbuildOptions: { define: { - global: 'globalThis', + global: "globalThis", }, }, }, server: { fs: { - allow: ['..'], + allow: [".."], + }, + hmr: { + overlay: false, // Disable HMR overlay to prevent ESM URL scheme errors }, }, });