Browse Source

Refactor markup generator for embedded events into Svelte snippets

- Eliminate a component that is no longer needed.
- Reduce duplicate code.
- Tidy up code along the way.
- Ran `deno fmt` to auto-format code (hence the large diff).
master
buttercat1791 7 months ago
parent
commit
d8c64260b3
  1. 40
      README.md
  2. 2617
      deno.lock
  3. 7
      playwright.config.ts
  4. 83
      src/app.css
  5. 4
      src/app.d.ts
  6. 10
      src/app.html
  7. 29
      src/lib/components/CommentViewer.svelte
  8. 33
      src/lib/components/EmbeddedEvent.svelte
  9. 194
      src/lib/components/EventDetails.svelte
  10. 69
      src/lib/components/Notifications.svelte
  11. 1
      src/lib/components/publications/PublicationSection.svelte
  12. 10
      src/lib/components/publications/table_of_contents.svelte.ts
  13. 321
      src/lib/components/util/Notifications.svelte
  14. 7
      src/lib/consts.ts
  15. 85
      src/lib/data_structures/docs/relay_selector_design.md
  16. 310
      src/lib/data_structures/publication_tree.ts
  17. 65
      src/lib/data_structures/websocket_pool.ts
  18. 2
      src/lib/navigator/EventNetwork/types.ts
  19. 2
      src/lib/navigator/EventNetwork/utils/common.ts
  20. 325
      src/lib/navigator/EventNetwork/utils/forceSimulation.ts
  21. 439
      src/lib/navigator/EventNetwork/utils/networkBuilder.ts
  22. 100
      src/lib/navigator/EventNetwork/utils/personNetworkBuilder.ts
  23. 83
      src/lib/navigator/EventNetwork/utils/starForceSimulation.ts
  24. 186
      src/lib/navigator/EventNetwork/utils/starNetworkBuilder.ts
  25. 29
      src/lib/navigator/EventNetwork/utils/tagNetworkBuilder.ts
  26. 298
      src/lib/ndk.ts
  27. 8
      src/lib/parser.ts
  28. 28
      src/lib/services/event_search_service.ts
  29. 57
      src/lib/services/publisher.ts
  30. 20
      src/lib/services/search_state_manager.ts
  31. 2
      src/lib/state.ts
  32. 2
      src/lib/stores/authStore.Svelte.ts
  33. 26
      src/lib/stores/networkStore.ts
  34. 119
      src/lib/stores/userStore.ts
  35. 43
      src/lib/stores/visualizationConfig.ts
  36. 24
      src/lib/utils.ts
  37. 10
      src/lib/utils/ZettelParser.ts
  38. 273
      src/lib/utils/asciidoc_metadata.ts
  39. 4
      src/lib/utils/community_checker.ts
  40. 59
      src/lib/utils/displayLimits.ts
  41. 101
      src/lib/utils/eventColors.ts
  42. 152
      src/lib/utils/eventDeduplication.ts
  43. 72
      src/lib/utils/event_input_utils.ts
  44. 71
      src/lib/utils/event_kind_utils.ts
  45. 78
      src/lib/utils/event_search.ts
  46. 12
      src/lib/utils/image_utils.ts
  47. 76
      src/lib/utils/kind24_utils.ts
  48. 58
      src/lib/utils/markup/MarkupInfo.md
  49. 3
      src/lib/utils/markup/advancedAsciidoctorPostProcessor.ts
  50. 44
      src/lib/utils/markup/advancedMarkupParser.ts
  51. 24
      src/lib/utils/markup/asciidoctorPostProcessor.ts
  52. 51
      src/lib/utils/markup/basicMarkupParser.ts
  53. 56
      src/lib/utils/markup/embeddedMarkupParser.ts
  54. 147
      src/lib/utils/markup/markupServices.ts
  55. 4
      src/lib/utils/markup/tikzRenderer.ts
  56. 2
      src/lib/utils/mime.ts
  57. 106
      src/lib/utils/network_detection.ts
  58. 41
      src/lib/utils/nostrEventService.ts
  59. 124
      src/lib/utils/nostrUtils.ts
  60. 40
      src/lib/utils/nostr_identifiers.ts
  61. 306
      src/lib/utils/notification_utils.ts
  62. 22
      src/lib/utils/npubCache.ts
  63. 69
      src/lib/utils/profileCache.ts
  64. 32
      src/lib/utils/profile_search.ts
  65. 7
      src/lib/utils/relayDiagnostics.ts
  66. 84
      src/lib/utils/relay_info_service.ts
  67. 380
      src/lib/utils/relay_management.ts
  68. 16
      src/lib/utils/search_result_formatter.ts
  69. 14
      src/lib/utils/search_utility.ts
  70. 272
      src/lib/utils/subscription_search.ts
  71. 117
      src/lib/utils/tag_event_fetch.ts
  72. 91
      src/lib/utils/websocket_utils.ts
  73. 141
      src/routes/+layout.ts
  74. 69
      src/routes/events/+page.svelte
  75. 5
      src/routes/proxy+layout.ts
  76. 4
      src/routes/publication/+page.server.ts
  77. 15
      src/routes/publication/[type]/[identifier]/+layout.server.ts
  78. 52
      src/routes/publication/[type]/[identifier]/+page.ts
  79. 10
      src/routes/visualize/+page.ts
  80. 8
      src/styles/notifications.css
  81. 20
      src/styles/publications.css
  82. 6
      src/styles/scrollbar.css
  83. 28
      src/styles/visualize.css
  84. 85
      test_data/LaTeXtestfile.md
  85. 26
      tests/e2e/my_notes_layout.pw.spec.ts
  86. 275
      tests/unit/ZettelEditor.test.ts
  87. 337
      tests/unit/eventInput30040.test.ts
  88. 2
      tests/unit/latexRendering.test.ts
  89. 124
      tests/unit/metadataExtraction.test.ts
  90. 132
      tests/unit/nostr_identifiers.test.ts
  91. 742
      tests/unit/relayDeduplication.test.ts
  92. 353
      tests/unit/tagExpansion.test.ts
  93. 8
      vite.config.ts

40
README.md

@ -3,19 +3,31 @@
# Alexandria # Alexandria
Alexandria is a reader and writer for curated publications, including e-books. Alexandria is a reader and writer for curated publications, including e-books.
For a thorough introduction, please refer to our [project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1), viewable on Alexandria, or to the Alexandria [About page](https://next-alexandria.gitcitadel.eu/about). For a thorough introduction, please refer to our
[project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1),
viewable on Alexandria, or to the Alexandria
[About page](https://next-alexandria.gitcitadel.eu/about).
It also contains a [universal event viewer](https://next-alexandria.gitcitadel.eu/events), with which you can search our relays, some aggregator relays, and your own relay list, to find and view event data. It also contains a
[universal event viewer](https://next-alexandria.gitcitadel.eu/events), with
which you can search our relays, some aggregator relays, and your own relay
list, to find and view event data.
## Issues and Patches ## Issues and Patches
If you would like to suggest a feature or report a bug, please use the [Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact). If you would like to suggest a feature or report a bug, please use the
[Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact).
You can also contact us [on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg), directly. You can also contact us
[on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg),
directly.
## Developing ## Developing
Make sure that you have [Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or [Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2) installed. Make sure that you have
[Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or
[Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2)
installed.
Once you've cloned this repo, install dependencies with NPM: Once you've cloned this repo, install dependencies with NPM:
@ -43,7 +55,8 @@ deno task dev
## Building ## Building
Alexandria is configured to run on a Node server. The [Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well. Alexandria is configured to run on a Node server. The
[Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well.
To build a production version of your app with Node, use: To build a production version of your app with Node, use:
@ -71,7 +84,8 @@ deno task preview
## Docker + Deno ## Docker + Deno
This application is configured to use the Deno runtime. A Docker container is provided to handle builds and deployments. This application is configured to use the Deno runtime. A Docker container is
provided to handle builds and deployments.
To build the app for local development: To build the app for local development:
@ -87,9 +101,11 @@ docker run -d -p 3000:3000 local-alexandria
## Testing ## Testing
_These tests are under development, but will run. They will later be added to the container._ _These tests are under development, but will run. They will later be added to
the container._
To run the Vitest suite we've built, install the program locally and run the tests. To run the Vitest suite we've built, install the program locally and run the
tests.
```bash ```bash
npm run test npm run test
@ -103,4 +119,8 @@ npx playwright test
## Markup Support ## Markup Support
Alexandria supports both Markdown and AsciiDoc markup for different content types. For a detailed list of supported tags and features in the basic and advanced markdown parsers, as well as information about AsciiDoc usage for publications and wikis, see [MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md). Alexandria supports both Markdown and AsciiDoc markup for different content
types. For a detailed list of supported tags and features in the basic and
advanced markdown parsers, as well as information about AsciiDoc usage for
publications and wikis, see
[MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md).

2617
deno.lock

File diff suppressed because it is too large Load Diff

7
playwright.config.ts

@ -27,7 +27,7 @@ export default defineConfig({
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: { use: {
/* Base URL to use in actions like `await page.goto('/')`. */ /* Base URL to use in actions like `await page.goto('/')`. */
baseURL: 'http://localhost:5173', baseURL: "http://localhost:5173",
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry", trace: "on-first-retry",
@ -49,7 +49,6 @@ export default defineConfig({
name: "webkit", name: "webkit",
use: { ...devices["Desktop Safari"] }, use: { ...devices["Desktop Safari"] },
}, },
/* Test against mobile viewports. */ /* Test against mobile viewports. */
// { // {
// name: 'Mobile Chrome', // name: 'Mobile Chrome',
@ -73,8 +72,8 @@ export default defineConfig({
/* Run your local dev server before starting the tests */ /* Run your local dev server before starting the tests */
webServer: { webServer: {
command: 'npm run dev', command: "npm run dev",
url: 'http://localhost:5173', url: "http://localhost:5173",
reuseExistingServer: !process.env.CI, reuseExistingServer: !process.env.CI,
}, },

83
src/app.css

@ -28,7 +28,9 @@
} }
div[role="tooltip"] button.btn-leather { div[role="tooltip"] button.btn-leather {
@apply hover:text-primary-600 dark:hover:text-primary-400 hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200 dark:hover:bg-gray-700; @apply hover:text-primary-600 dark:hover:text-primary-400
hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200
dark:hover:bg-gray-700;
} }
.image-border { .image-border {
@ -36,8 +38,10 @@
} }
div.card-leather { div.card-leather {
@apply shadow-none text-primary-1000 border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700; @apply shadow-none text-primary-1000 border-s-4 bg-highlight
@apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
} }
div.card-leather h1, div.card-leather h1,
@ -46,11 +50,13 @@
div.card-leather h4, div.card-leather h4,
div.card-leather h5, div.card-leather h5,
div.card-leather h6 { div.card-leather h6 {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; @apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
} }
div.card-leather .font-thin { div.card-leather .font-thin {
@apply text-gray-900 hover:text-primary-700 dark:text-gray-100 dark:hover:text-primary-300; @apply text-gray-900 hover:text-primary-700 dark:text-gray-100
dark:hover:text-primary-300;
} }
main { main {
@ -74,7 +80,8 @@
div.note-leather, div.note-leather,
p.note-leather, p.note-leather,
section.note-leather { section.note-leather {
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 p-2 rounded; @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
p-2 rounded;
} }
.edit div.note-leather:hover:not(:has(.note-leather:hover)), .edit div.note-leather:hover:not(:has(.note-leather:hover)),
@ -117,7 +124,8 @@
} }
div.modal-leather > div { div.modal-leather > div {
@apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100 dark:border-primary-600; @apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100
dark:border-primary-600;
} }
div.modal-leather > div > h1, div.modal-leather > div > h1,
@ -126,11 +134,14 @@
div.modal-leather > div > h4, div.modal-leather > div > h4,
div.modal-leather > div > h5, div.modal-leather > div > h5,
div.modal-leather > div > h6 { div.modal-leather > div > h6 {
@apply text-gray-900 hover:text-gray-900 dark:text-gray-100 dark:hover:text-gray-100; @apply text-gray-900 hover:text-gray-900 dark:text-gray-100
dark:hover:text-gray-100;
} }
div.modal-leather button { div.modal-leather button {
@apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950 dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; @apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950
dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600
dark:text-gray-100 dark:hover:text-primary-400;
} }
/* Navbar */ /* Navbar */
@ -143,7 +154,8 @@
} }
nav.navbar-leather svg { nav.navbar-leather svg {
@apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100 dark:hover:fill-primary-400; @apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100
dark:hover:fill-primary-400;
} }
nav.navbar-leather h1, nav.navbar-leather h1,
@ -152,7 +164,8 @@
nav.navbar-leather h4, nav.navbar-leather h4,
nav.navbar-leather h5, nav.navbar-leather h5,
nav.navbar-leather h6 { nav.navbar-leather h6 {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; @apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
} }
div.skeleton-leather div { div.skeleton-leather div {
@ -201,16 +214,16 @@
.network-node-content { .network-node-content {
@apply fill-primary-100; @apply fill-primary-100;
} }
/* Person link colors */ /* Person link colors */
.person-link-signed { .person-link-signed {
@apply stroke-green-500; @apply stroke-green-500;
} }
.person-link-referenced { .person-link-referenced {
@apply stroke-blue-400; @apply stroke-blue-400;
} }
/* Person anchor node */ /* Person anchor node */
.person-anchor-node { .person-anchor-node {
@apply fill-green-400 stroke-green-600; @apply fill-green-400 stroke-green-600;
@ -272,11 +285,13 @@
/* Lists */ /* Lists */
.ol-leather li a, .ol-leather li a,
.ul-leather li a { .ul-leather li a {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400; @apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
} }
.link { .link {
@apply underline cursor-pointer hover:text-primary-600 dark:hover:text-primary-400; @apply underline cursor-pointer hover:text-primary-600
dark:hover:text-primary-400;
} }
/* Card with transition */ /* Card with transition */
@ -290,11 +305,14 @@
} }
.tags span { .tags span {
@apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5 rounded-sm dark:bg-primary-900 dark:text-primary-200; @apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5
rounded-sm dark:bg-primary-900 dark:text-primary-200;
} }
.npub-badge { .npub-badge {
@apply inline-flex space-x-1 items-center text-primary-600 dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border border-primary-600 dark:border-primary-500; @apply inline-flex space-x-1 items-center text-primary-600
dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border
border-primary-600 dark:border-primary-500;
svg { svg {
@apply fill-primary-600 dark:fill-primary-500; @apply fill-primary-600 dark:fill-primary-500;
@ -305,14 +323,19 @@
@layer components { @layer components {
/* Legend */ /* Legend */
.leather-legend { .leather-legend {
@apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2 rounded; @apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2
@apply shadow-none text-primary-1000 border border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700; rounded;
@apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500; @apply shadow-none text-primary-1000 border border-s-4 bg-highlight
border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
} }
/* Tooltip */ /* Tooltip */
.tooltip-leather { .tooltip-leather {
@apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border border-gray-200 dark:border-gray-700 transition-colors duration-200; @apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000
text-gray-900 dark:text-gray-100 border border-gray-200
dark:border-gray-700 transition-colors duration-200;
max-width: 400px; max-width: 400px;
z-index: 1000; z-index: 1000;
} }
@ -536,13 +559,15 @@
input[type="tel"], input[type="tel"],
input[type="url"], input[type="url"],
textarea { textarea {
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border-s-4 border-primary-200 rounded shadow-none px-4 py-2; @apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
border-s-4 border-primary-200 rounded shadow-none px-4 py-2;
@apply focus:border-primary-600 dark:focus:border-primary-400; @apply focus:border-primary-600 dark:focus:border-primary-400;
} }
/* Table of Contents highlighting */ /* Table of Contents highlighting */
.toc-highlight { .toc-highlight {
@apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600 dark:border-primary-400 font-medium; @apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600
dark:border-primary-400 font-medium;
transition: all 0.2s ease-in-out; transition: all 0.2s ease-in-out;
} }
@ -551,14 +576,8 @@
} }
/* Override prose first-line bold styling */ /* Override prose first-line bold styling */
.prose p:first-line { .prose p:first-line,
font-weight: normal !important; .prose-sm p:first-line,
}
.prose-sm p:first-line {
font-weight: normal !important;
}
.prose-invert p:first-line { .prose-invert p:first-line {
font-weight: normal !important; font-weight: normal !important;
} }

4
src/app.d.ts vendored

@ -23,7 +23,9 @@ declare global {
var MathJax: any; var MathJax: any;
var nostr: NDKNip07Signer & { var nostr: NDKNip07Signer & {
getRelays: () => Promise<Record<string, Record<string, boolean | undefined>>>; getRelays: () => Promise<
Record<string, Record<string, boolean | undefined>>
>;
// deno-lint-ignore no-explicit-any // deno-lint-ignore no-explicit-any
signEvent: (event: any) => Promise<any>; signEvent: (event: any) => Promise<any>;
}; };

10
src/app.html

@ -1,4 +1,4 @@
<!doctype html> <!DOCTYPE html>
<html lang="en"> <html lang="en">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
@ -26,14 +26,18 @@
}, },
}; };
</script> </script>
<script src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script> <script
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
></script>
<!-- highlight.js for code highlighting --> <!-- highlight.js for code highlighting -->
<link <link
rel="stylesheet" rel="stylesheet"
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css"
/> />
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script> <script
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"
></script>
%sveltekit.head% %sveltekit.head%
</head> </head>

29
src/lib/components/CommentViewer.svelte

@ -6,9 +6,7 @@
import { goto } from "$app/navigation"; import { goto } from "$app/navigation";
import { onMount } from "svelte"; import { onMount } from "svelte";
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; import EmbeddedEvent from "./EmbeddedEvent.svelte";
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser";
import { parseRepostContent, parseContent as parseNotificationContent } from "$lib/utils/notification_utils";
const { event } = $props<{ event: NDKEvent }>(); const { event } = $props<{ event: NDKEvent }>();
@ -654,19 +652,6 @@
return `${actualLevel * 16}px`; return `${actualLevel * 16}px`;
} }
async function parseContent(content: string, eventKind?: number): Promise<string> {
if (!content) return "";
// Use parseRepostContent for kind 6 and 16 events (reposts)
if (eventKind === 6 || eventKind === 16) {
return await parseRepostContent(content);
} else {
return await parseNotificationContent(content);
}
}
// AI-NOTE: 2025-01-24 - Get highlight source information // AI-NOTE: 2025-01-24 - Get highlight source information
function getHighlightSource(highlightEvent: NDKEvent): { type: string; value: string; url?: string } | null { function getHighlightSource(highlightEvent: NDKEvent): { type: string; value: string; url?: string } | null {
// Check for e-tags (nostr events) // Check for e-tags (nostr events)
@ -785,11 +770,7 @@
<div class="text-sm text-gray-600 dark:text-gray-400 mb-2"> <div class="text-sm text-gray-600 dark:text-gray-400 mb-2">
<span class="font-medium">Comment:</span> <span class="font-medium">Comment:</span>
</div> </div>
{#await parseContent(node.event.getMatchingTags("comment")[0]?.[1] || "") then parsedContent} <EmbeddedEvent nostrIdentifier={node.event.getMatchingTags("comment")[0]?.[1]} nestingLevel={0} />
{@html parsedContent}
{:catch}
{@html node.event.getMatchingTags("comment")[0]?.[1] || ""}
{/await}
</div> </div>
{:else} {:else}
<!-- Simple highlight --> <!-- Simple highlight -->
@ -829,11 +810,7 @@
</div> </div>
{:else} {:else}
<!-- Regular comment content --> <!-- Regular comment content -->
{#await parseContent(node.event.content || "", node.event.kind) then parsedContent} <EmbeddedEvent nostrIdentifier={node.event.id} nestingLevel={0} />
{@html parsedContent}
{:catch}
{@html node.event.content || ""}
{/await}
{/if} {/if}
</div> </div>
</div> </div>

33
src/lib/components/EmbeddedEvent.svelte

@ -4,16 +4,14 @@
import { fetchEventWithFallback } from "$lib/utils/nostrUtils"; import { fetchEventWithFallback } from "$lib/utils/nostrUtils";
import { getUserMetadata, toNpub } from "$lib/utils/nostrUtils"; import { getUserMetadata, toNpub } from "$lib/utils/nostrUtils";
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser"; import { parsedContent } from "$lib/components/util/Notifications.svelte";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser"; import { naddrEncode } from "$lib/utils";
import { parseRepostContent } from "$lib/utils/notification_utils";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
import { neventEncode, naddrEncode } from "$lib/utils";
import { activeInboxRelays, ndkInstance } from "$lib/ndk"; import { activeInboxRelays, ndkInstance } from "$lib/ndk";
import { goto } from "$app/navigation"; import { goto } from "$app/navigation";
import { getEventType } from "$lib/utils/mime"; import { getEventType } from "$lib/utils/mime";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { repostKinds } from "$lib/consts";
const { const {
nostrIdentifier, nostrIdentifier,
@ -36,7 +34,6 @@
} | null>(null); } | null>(null);
let loading = $state(true); let loading = $state(true);
let error = $state<string | null>(null); let error = $state<string | null>(null);
let parsedContent = $state("");
let authorDisplayName = $state<string | undefined>(undefined); let authorDisplayName = $state<string | undefined>(undefined);
// Maximum nesting level allowed // Maximum nesting level allowed
@ -120,16 +117,6 @@
} }
} }
// Parse content if available
if (event?.content) {
if (event.kind === 6 || event.kind === 16) {
parsedContent = await parseRepostContent(event.content);
} else {
// Use embedded markup parser for nested events
parsedContent = await parseEmbeddedMarkup(event.content, nestingLevel + 1);
}
}
// Parse profile if it's a profile event // Parse profile if it's a profile event
if (event?.kind === 0) { if (event?.kind === 0) {
try { try {
@ -196,10 +183,6 @@
} }
} }
function getNeventUrl(event: NDKEvent): string {
return neventEncode(event, $activeInboxRelays);
}
function getNaddrUrl(event: NDKEvent): string { function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays); return naddrEncode(event, $activeInboxRelays);
} }
@ -303,17 +286,15 @@
{/if} {/if}
<!-- Content for text events --> <!-- Content for text events -->
{#if event.kind === 1 && parsedContent} {#if event.kind === 1 || repostKinds.includes(event.kind)}
<div class="prose prose-sm dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 min-w-0 overflow-hidden"> <div class="prose prose-sm dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 min-w-0 overflow-hidden">
<EmbeddedEventRenderer content={parsedContent.slice(0, 300)} nestingLevel={nestingLevel + 1} /> {@render parsedContent(event.content.slice(0, 300))}
{#if parsedContent.length > 300} {#if event.content.length > 300}
<span class="text-gray-500 dark:text-gray-400">...</span> <span class="text-gray-500 dark:text-gray-400">...</span>
{/if} {/if}
</div> </div>
{/if}
<!-- Profile content --> <!-- Profile content -->
{#if event.kind === 0 && profile} {:else if event.kind === 0 && profile}
<div class="space-y-2 min-w-0 overflow-hidden"> <div class="space-y-2 min-w-0 overflow-hidden">
{#if profile.picture} {#if profile.picture}
<img <img

194
src/lib/components/EventDetails.svelte

@ -1,13 +1,9 @@
<script lang="ts"> <script lang="ts">
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
import { getMimeTags } from "$lib/utils/mime"; import { getMimeTags } from "$lib/utils/mime";
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { toNpub } from "$lib/utils/nostrUtils"; import { toNpub } from "$lib/utils/nostrUtils";
import { neventEncode, naddrEncode, nprofileEncode } from "$lib/utils"; import { neventEncode, naddrEncode, nprofileEncode } from "$lib/utils";
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk"; import { activeInboxRelays } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import type { NDKEvent } from "$lib/utils/nostrUtils"; import type { NDKEvent } from "$lib/utils/nostrUtils";
import { getMatchingTags } from "$lib/utils/nostrUtils"; import { getMatchingTags } from "$lib/utils/nostrUtils";
import ProfileHeader from "$components/cards/ProfileHeader.svelte"; import ProfileHeader from "$components/cards/ProfileHeader.svelte";
@ -18,13 +14,11 @@
import { navigateToEvent } from "$lib/utils/nostrEventService"; import { navigateToEvent } from "$lib/utils/nostrEventService";
import ContainingIndexes from "$lib/components/util/ContainingIndexes.svelte"; import ContainingIndexes from "$lib/components/util/ContainingIndexes.svelte";
import Notifications from "$lib/components/Notifications.svelte"; import Notifications from "$lib/components/Notifications.svelte";
import { parseRepostContent } from "$lib/utils/notification_utils"; import EmbeddedEvent from "./EmbeddedEvent.svelte";
import RelayActions from "$lib/components/RelayActions.svelte";
const { const {
event, event,
profile = null, profile = null,
searchValue = null,
} = $props<{ } = $props<{
event: NDKEvent; event: NDKEvent;
profile?: { profile?: {
@ -37,20 +31,11 @@
lud16?: string; lud16?: string;
nip05?: string; nip05?: string;
} | null; } | null;
searchValue?: string | null;
}>(); }>();
let showFullContent = $state(false);
let parsedContent = $state("");
let contentProcessing = $state(false);
let authorDisplayName = $state<string | undefined>(undefined); let authorDisplayName = $state<string | undefined>(undefined);
let showFullContent = $state(false);
// Determine if content should be truncated let shouldTruncate = $derived(event.content.length > 250 && !showFullContent);
let shouldTruncate = $state(false);
$effect(() => {
shouldTruncate = event.content.length > 250 && !showFullContent;
});
function getEventTitle(event: NDKEvent): string { function getEventTitle(event: NDKEvent): string {
// First try to get title from title tag // First try to get title from title tag
@ -92,109 +77,11 @@
return getMatchingTags(event, "summary")[0]?.[1] || ""; return getMatchingTags(event, "summary")[0]?.[1] || "";
} }
function getEventHashtags(event: NDKEvent): string[] {
return getMatchingTags(event, "t").map((tag: string[]) => tag[1]);
}
function getEventTypeDisplay(event: NDKEvent): string { function getEventTypeDisplay(event: NDKEvent): string {
const [mTag, MTag] = getMimeTags(event.kind || 0); const [mTag, MTag] = getMimeTags(event.kind || 0);
return MTag[1].split("/")[1] || `Event Kind ${event.kind}`; return MTag[1].split("/")[1] || `Event Kind ${event.kind}`;
} }
function renderTag(tag: string[]): string {
if (tag[0] === "a" && tag.length > 1) {
const parts = tag[1].split(":");
if (parts.length >= 3) {
const [kind, pubkey, d] = parts;
// Validate that pubkey is a valid hex string
if (pubkey && /^[0-9a-fA-F]{64}$/.test(pubkey)) {
try {
const mockEvent = {
kind: +kind,
pubkey,
tags: [["d", d]],
content: "",
id: "",
sig: "",
} as any;
const naddr = naddrEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${naddr}' class='underline text-primary-700'>a:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode naddr for a tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else {
console.warn("Invalid pubkey in a tag in renderTag:", pubkey);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else {
console.warn("Invalid a tag format in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else if (tag[0] === "e" && tag.length > 1) {
// Validate that event ID is a valid hex string
if (/^[0-9a-fA-F]{64}$/.test(tag[1])) {
try {
const mockEvent = {
id: tag[1],
kind: 1,
content: "",
tags: [],
pubkey: "",
sig: "",
} as any;
const nevent = neventEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${nevent}' class='underline text-primary-700'>e:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode nevent for e tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>e:${tag[1]}</span>`;
}
} else {
console.warn("Invalid event ID in e tag in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>e:${tag[1]}</span>`;
}
} else if (tag[0] === "note" && tag.length > 1) {
// 'note' tags are the same as 'e' tags but with different prefix
if (/^[0-9a-fA-F]{64}$/.test(tag[1])) {
try {
const mockEvent = {
id: tag[1],
kind: 1,
content: "",
tags: [],
pubkey: "",
sig: "",
} as any;
const nevent = neventEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${nevent}' class='underline text-primary-700'>note:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode nevent for note tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>note:${tag[1]}</span>`;
}
} else {
console.warn("Invalid event ID in note tag in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>note:${tag[1]}</span>`;
}
} else if (tag[0] === "d" && tag.length > 1) {
// 'd' tags are used for identifiers in addressable events
return `<a href='/events?d=${encodeURIComponent(tag[1])}' class='underline text-primary-700'>d:${tag[1]}</a>`;
} else {
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>${tag[0]}:${tag[1]}</span>`;
}
}
function getTagButtonInfo(tag: string[]): { function getTagButtonInfo(tag: string[]): {
text: string; text: string;
gotoValue?: string; gotoValue?: string;
@ -303,52 +190,12 @@
return { text: `${tag[0]}:${tag[1]}` }; return { text: `${tag[0]}:${tag[1]}` };
} }
function getNeventUrl(event: NDKEvent): string {
return neventEncode(event, $activeInboxRelays);
}
function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays);
}
function getNprofileUrl(pubkey: string): string {
return nprofileEncode(pubkey, $activeInboxRelays);
}
$effect(() => {
if (event && event.kind !== 0 && event.content) {
contentProcessing = true;
// Use parseRepostContent for kind 6 and 16 events (reposts)
if (event.kind === 6 || event.kind === 16) {
parseRepostContent(event.content).then((html) => {
parsedContent = html;
contentProcessing = false;
}).catch((error) => {
console.error('Error parsing repost content:', error);
contentProcessing = false;
});
} else {
// Use embedded markup parser for better Nostr event support
parseEmbeddedMarkup(event.content, 0).then((html) => {
parsedContent = html;
contentProcessing = false;
}).catch((error) => {
console.error('Error parsing embedded markup:', error);
contentProcessing = false;
});
}
} else {
contentProcessing = false;
parsedContent = "";
}
});
$effect(() => { $effect(() => {
if (!event?.pubkey) { if (!event?.pubkey) {
authorDisplayName = undefined; authorDisplayName = undefined;
return; return;
} }
getUserMetadata(toNpub(event.pubkey) as string).then((profile) => { getUserMetadata(toNpub(event.pubkey) as string).then((profile) => {
authorDisplayName = authorDisplayName =
profile.displayName || profile.displayName ||
@ -403,13 +250,6 @@
return ids; return ids;
} }
function isCurrentSearch(value: string): boolean {
if (!searchValue) return false;
// Compare ignoring case and possible nostr: prefix
const norm = (s: string) => s.replace(/^nostr:/, "").toLowerCase();
return norm(value) === norm(searchValue);
}
onMount(() => { onMount(() => {
function handleInternalLinkClick(event: MouseEvent) { function handleInternalLinkClick(event: MouseEvent) {
const target = event.target as HTMLElement; const target = event.target as HTMLElement;
@ -468,8 +308,6 @@
</div> </div>
{/if} {/if}
<!-- Containing Publications --> <!-- Containing Publications -->
<ContainingIndexes {event} /> <ContainingIndexes {event} />
@ -479,19 +317,15 @@
<div class="flex flex-col space-y-1 min-w-0"> <div class="flex flex-col space-y-1 min-w-0">
<span class="text-gray-700 dark:text-gray-300 font-semibold">Content:</span> <span class="text-gray-700 dark:text-gray-300 font-semibold">Content:</span>
<div class="prose dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 break-words overflow-wrap-anywhere min-w-0"> <div class="prose dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 break-words overflow-wrap-anywhere min-w-0">
{#if contentProcessing} <div class={shouldTruncate ? 'max-h-32 overflow-hidden' : ''}>
<div class="text-gray-500 dark:text-gray-400 italic">Processing content...</div> <EmbeddedEvent nostrIdentifier={event.id} nestingLevel={0} />
{:else} </div>
<div class={shouldTruncate ? 'max-h-32 overflow-hidden' : ''}> {#if shouldTruncate}
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} /> <button
</div> class="mt-2 text-primary-700 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-200"
{#if shouldTruncate} onclick={() => (showFullContent = true)}>Show more</button
<button >
class="mt-2 text-primary-700 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-200" {/if}
onclick={() => (showFullContent = true)}>Show more</button
>
{/if}
{/if}
</div> </div>
</div> </div>
</div> </div>

69
src/lib/components/Notifications.svelte

@ -1,37 +1,28 @@
<script lang="ts"> <script lang="ts">
import "../../styles/notifications.css"; import "../../styles/notifications.css";
import { onMount } from "svelte";
import { Heading, P } from "flowbite-svelte"; import { Heading, P } from "flowbite-svelte";
import type { NDKEvent } from "$lib/utils/nostrUtils"; import type { NDKEvent } from "$lib/utils/nostrUtils";
import { userStore } from "$lib/stores/userStore"; import { userStore } from "$lib/stores/userStore";
import { userPubkey, isLoggedIn } from "$lib/stores/authStore.Svelte"; import { ndkInstance } from "$lib/ndk";
import { ndkInstance, activeInboxRelays } from "$lib/ndk";
import { goto } from "$app/navigation"; import { goto } from "$app/navigation";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { communityRelays, localRelays, anonymousRelays, searchRelays } from "$lib/consts"; import { anonymousRelays } from "$lib/consts";
import { createKind24Reply, getKind24RelaySet } from "$lib/utils/kind24_utils"; import { getKind24RelaySet } from "$lib/utils/kind24_utils";
import { createSignedEvent } from "$lib/utils/nostrEventService"; import { createSignedEvent } from "$lib/utils/nostrEventService";
import RelayDisplay from "$lib/components/RelayDisplay.svelte";
import RelayInfoList from "$lib/components/RelayInfoList.svelte";
import { Modal, Button } from "flowbite-svelte"; import { Modal, Button } from "flowbite-svelte";
import { searchProfiles } from "$lib/utils/search_utility"; import { searchProfiles } from "$lib/utils/search_utility";
import type { NostrProfile } from "$lib/utils/search_types"; import type { NostrProfile } from "$lib/utils/search_types";
import { PlusOutline, ReplyOutline, UserOutline } from "flowbite-svelte-icons"; import { PlusOutline, ReplyOutline, UserOutline } from "flowbite-svelte-icons";
import { import {
truncateContent,
truncateRenderedContent,
parseContent,
parseRepostContent,
renderQuotedContent,
getNotificationType, getNotificationType,
fetchAuthorProfiles fetchAuthorProfiles,
} from "$lib/utils/notification_utils"; quotedContent,
} from "$lib/components/util/Notifications.svelte";
import { buildCompleteRelaySet } from "$lib/utils/relay_management"; import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { formatDate, neventEncode } from "$lib/utils"; import { formatDate, neventEncode } from "$lib/utils";
import { toNpub, getUserMetadata, NDKRelaySetFromNDK } from "$lib/utils/nostrUtils"; import { NDKRelaySetFromNDK } from "$lib/utils/nostrUtils";
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; import EmbeddedEvent from "./EmbeddedEvent.svelte";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
const { event } = $props<{ event: NDKEvent }>(); const { event } = $props<{ event: NDKEvent }>();
@ -60,7 +51,6 @@
let notificationMode = $state<"to-me" | "from-me" | "public-messages">("to-me"); let notificationMode = $state<"to-me" | "from-me" | "public-messages">("to-me");
let authorProfiles = $state<Map<string, { name?: string; displayName?: string; picture?: string }>>(new Map()); let authorProfiles = $state<Map<string, { name?: string; displayName?: string; picture?: string }>>(new Map());
let filteredByUser = $state<string | null>(null); let filteredByUser = $state<string | null>(null);
// New Message Modal state // New Message Modal state
let showNewMessageModal = $state(false); let showNewMessageModal = $state(false);
@ -69,7 +59,6 @@
let newMessageRelays = $state<string[]>([]); let newMessageRelays = $state<string[]>([]);
let isComposingMessage = $state(false); let isComposingMessage = $state(false);
let replyToMessage = $state<NDKEvent | null>(null); let replyToMessage = $state<NDKEvent | null>(null);
let quotedContent = $state<string>("");
// Recipient Selection Modal state // Recipient Selection Modal state
let showRecipientModal = $state(false); let showRecipientModal = $state(false);
@ -166,8 +155,6 @@
filteredByUser = null; filteredByUser = null;
} }
// AI-NOTE: New Message Modal Functions // AI-NOTE: New Message Modal Functions
function openNewMessageModal(messageToReplyTo?: NDKEvent) { function openNewMessageModal(messageToReplyTo?: NDKEvent) {
showNewMessageModal = true; showNewMessageModal = true;
@ -178,12 +165,7 @@
replyToMessage = messageToReplyTo || null; replyToMessage = messageToReplyTo || null;
// If replying, set up the quote and pre-select all original recipients plus sender // If replying, set up the quote and pre-select all original recipients plus sender
if (messageToReplyTo) { if (messageToReplyTo) {
// Store clean content for UI display (no markdown formatting)
quotedContent = messageToReplyTo.content.length > 200
? messageToReplyTo.content.slice(0, 200) + "..."
: messageToReplyTo.content;
// Collect all recipients: original sender + all p-tag recipients // Collect all recipients: original sender + all p-tag recipients
const recipientPubkeys = new Set<string>(); const recipientPubkeys = new Set<string>();
@ -218,8 +200,6 @@
}).filter(recipient => recipient.pubkey); // Ensure we have valid pubkeys }).filter(recipient => recipient.pubkey); // Ensure we have valid pubkeys
console.log(`Pre-loaded ${selectedRecipients.length} recipients for reply:`, selectedRecipients.map(r => r.displayName || r.name || r.pubkey?.slice(0, 8))); console.log(`Pre-loaded ${selectedRecipients.length} recipients for reply:`, selectedRecipients.map(r => r.displayName || r.name || r.pubkey?.slice(0, 8)));
} else {
quotedContent = "";
} }
} }
@ -230,7 +210,6 @@
newMessageRelays = []; newMessageRelays = [];
isComposingMessage = false; isComposingMessage = false;
replyToMessage = null; replyToMessage = null;
quotedContent = "";
} }
// AI-NOTE: Recipient Selection Modal Functions // AI-NOTE: Recipient Selection Modal Functions
@ -580,8 +559,6 @@
} }
} }
// Check if user is viewing their own profile // Check if user is viewing their own profile
$effect(() => { $effect(() => {
if ($userStore.signedIn && $userStore.pubkey && event.pubkey) { if ($userStore.signedIn && $userStore.pubkey && event.pubkey) {
@ -607,8 +584,6 @@
} }
}); });
// AI-NOTE: Refactored to avoid blocking $effect with async operations // AI-NOTE: Refactored to avoid blocking $effect with async operations
// Calculate relay set when recipients change - non-blocking approach // Calculate relay set when recipients change - non-blocking approach
$effect(() => { $effect(() => {
@ -838,21 +813,13 @@
{#if message.getMatchingTags("q").length > 0} {#if message.getMatchingTags("q").length > 0}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed"> <div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
{#await renderQuotedContent(message, publicMessages) then quotedHtml} {@render quotedContent(message, publicMessages)}
{@html quotedHtml}
{:catch}
<!-- Fallback if quoted content fails to render -->
{/await}
</div> </div>
{/if} {/if}
{#if message.content} {#if message.content}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed"> <div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
<div class="px-2"> <div class="px-2">
{#await ((message.kind === 6 || message.kind === 16) ? parseRepostContent(message.content) : parseContent(message.content)) then parsedContent} <EmbeddedEvent nostrIdentifier={message.id} nestingLevel={0} />
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html message.content}
{/await}
</div> </div>
</div> </div>
{/if} {/if}
@ -929,11 +896,7 @@
{#if notification.content} {#if notification.content}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed"> <div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
<div class="px-2"> <div class="px-2">
{#await ((notification.kind === 6 || notification.kind === 16) ? parseRepostContent(notification.content) : parseContent(notification.content)) then parsedContent} <EmbeddedEvent nostrIdentifier={notification.id} nestingLevel={0} />
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html truncateContent(notification.content)}
{/await}
</div> </div>
</div> </div>
{/if} {/if}
@ -964,15 +927,11 @@
</div> </div>
<!-- Quoted Content Display --> <!-- Quoted Content Display -->
{#if quotedContent} {#if replyToMessage}
<div class="quoted-content mb-4 p-3 rounded-r-lg"> <div class="quoted-content mb-4 p-3 rounded-r-lg">
<div class="text-sm text-gray-600 dark:text-gray-400 mb-1">Replying to:</div> <div class="text-sm text-gray-600 dark:text-gray-400 mb-1">Replying to:</div>
<div class="text-sm text-gray-800 dark:text-gray-200"> <div class="text-sm text-gray-800 dark:text-gray-200">
{#await parseContent(quotedContent) then parsedContent} <EmbeddedEvent nostrIdentifier={replyToMessage.id} nestingLevel={0} />
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html quotedContent}
{/await}
</div> </div>
</div> </div>
{/if} {/if}

1
src/lib/components/publications/PublicationSection.svelte

@ -1,5 +1,4 @@
<script lang="ts"> <script lang="ts">
import type { PublicationTree } from "$lib/data_structures/publication_tree";
import { import {
contentParagraph, contentParagraph,
sectionHeading, sectionHeading,

10
src/lib/components/publications/table_of_contents.svelte.ts

@ -159,7 +159,7 @@ export class TableOfContents {
// Handle any other nodes that have already been resolved in parallel. // Handle any other nodes that have already been resolved in parallel.
await Promise.all( await Promise.all(
Array.from(this.#publicationTree.resolvedAddresses).map((address) => Array.from(this.#publicationTree.resolvedAddresses).map((address) =>
this.#buildTocEntryFromResolvedNode(address), this.#buildTocEntryFromResolvedNode(address)
), ),
); );
@ -274,10 +274,10 @@ export class TableOfContents {
}); });
entry.children.sort((a, b) => { entry.children.sort((a, b) => {
const aOrdinal = const aOrdinal = addressToOrdinal.get(a.address) ??
addressToOrdinal.get(a.address) ?? Number.MAX_SAFE_INTEGER; Number.MAX_SAFE_INTEGER;
const bOrdinal = const bOrdinal = addressToOrdinal.get(b.address) ??
addressToOrdinal.get(b.address) ?? Number.MAX_SAFE_INTEGER; Number.MAX_SAFE_INTEGER;
return aOrdinal - bOrdinal; return aOrdinal - bOrdinal;
}); });
} }

321
src/lib/components/util/Notifications.svelte

@ -0,0 +1,321 @@
<script module lang="ts">
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { NDKRelaySetFromNDK, toNpub, getUserMetadata } from "$lib/utils/nostrUtils";
import { get } from "svelte/store";
import { ndkInstance } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import { userStore, type UserState } from "$lib/stores/userStore";
import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { nip19 } from "nostr-tools";
import type NDK from "@nostr-dev-kit/ndk";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser";
export {
parsedContent,
repostContent,
quotedContent,
truncateContent,
truncateRenderedContent,
getNotificationType,
fetchAuthorProfiles
};
/**
* Truncates content to a specified length
*/
function truncateContent(content: string, maxLength: number = 300): string {
if (content.length <= maxLength) return content;
return content.slice(0, maxLength) + "...";
}
/**
* Truncates rendered HTML content while preserving quote boxes
*/
function truncateRenderedContent(renderedHtml: string, maxLength: number = 300): string {
if (renderedHtml.length <= maxLength) return renderedHtml;
const hasQuoteBoxes = renderedHtml.includes('jump-to-message');
if (hasQuoteBoxes) {
const quoteBoxPattern = /<div class="block w-fit my-2 px-3 py-2 bg-gray-200[^>]*onclick="window\.dispatchEvent\(new CustomEvent\('jump-to-message'[^>]*>[^<]*<\/div>/g;
const quoteBoxes = renderedHtml.match(quoteBoxPattern) || [];
let textOnly = renderedHtml.replace(quoteBoxPattern, '|||QUOTEBOX|||');
if (textOnly.length > maxLength) {
const availableLength = maxLength - (quoteBoxes.join('').length);
if (availableLength > 50) {
textOnly = textOnly.slice(0, availableLength) + "...";
} else {
textOnly = textOnly.slice(0, 50) + "...";
}
}
let result = textOnly;
quoteBoxes.forEach(box => {
result = result.replace('|||QUOTEBOX|||', box);
});
return result;
} else {
if (renderedHtml.includes('<')) {
const truncated = renderedHtml.slice(0, maxLength);
const lastTagStart = truncated.lastIndexOf('<');
const lastTagEnd = truncated.lastIndexOf('>');
if (lastTagStart > lastTagEnd) {
return renderedHtml.slice(0, lastTagStart) + "...";
}
return truncated + "...";
} else {
return renderedHtml.slice(0, maxLength) + "...";
}
}
}
/**
* Gets notification type based on event kind
*/
function getNotificationType(event: NDKEvent): string {
switch (event.kind) {
case 1: return "Reply";
case 1111: return "Custom Reply";
case 9802: return "Highlight";
case 6: return "Repost";
case 16: return "Generic Repost";
case 24: return "Public Message";
default: return `Kind ${event.kind}`;
}
}
/**
* Fetches author profiles for a list of events
*/
async function fetchAuthorProfiles(events: NDKEvent[]): Promise<Map<string, { name?: string; displayName?: string; picture?: string }>> {
const authorProfiles = new Map<string, { name?: string; displayName?: string; picture?: string }>();
const uniquePubkeys = new Set<string>();
events.forEach(event => {
if (event.pubkey) uniquePubkeys.add(event.pubkey);
});
const profilePromises = Array.from(uniquePubkeys).map(async (pubkey) => {
try {
const npub = toNpub(pubkey);
if (!npub) return;
// Try cache first
let profile = await getUserMetadata(npub, false);
if (profile && (profile.name || profile.displayName || profile.picture)) {
authorProfiles.set(pubkey, profile);
return;
}
// Try search relays
for (const relay of searchRelays) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) break;
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
relaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
return;
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from ${relay}:`, error);
}
}
// Try all available relays as fallback
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) return;
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
ndkRelaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
}
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from all relays:`, error);
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Error processing profile for ${pubkey}:`, error);
}
});
await Promise.all(profilePromises);
return authorProfiles;
}
async function findQuotedMessage(eventId: string, publicMessages: NDKEvent[]): Promise<NDKEvent | undefined> {
// Validate eventId format (should be 64 character hex string)
const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId);
if (!isValidEventId) return undefined;
// First try to find in local messages
let quotedMessage = publicMessages.find(msg => msg.id === eventId);
// If not found locally, fetch from relays
if (!quotedMessage) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (ndk) {
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays, ...searchRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const fetchedEvent = await ndk.fetchEvent({ ids: [eventId], limit: 1 }, undefined, ndkRelaySet);
quotedMessage = fetchedEvent || undefined;
}
}
} catch (error) {
console.warn(`[findQuotedMessage] Failed to fetch quoted event ${eventId}:`, error);
}
}
return quotedMessage;
}
</script>
{#snippet parsedContent(content: string)}
{#await parseEmbeddedMarkup(content, 0) then parsed}
{@html parsed}
{/await}
{/snippet}
{#snippet repostContent(content: string)}
{@const originalEvent = (() => {
try {
return JSON.parse(content);
} catch {
return null;
}
})()}
{#if originalEvent}
{@const originalContent = originalEvent.content || ""}
{@const originalAuthor = originalEvent.pubkey || ""}
{@const originalCreatedAt = originalEvent.created_at || 0}
{@const originalKind = originalEvent.kind || 1}
{@const formattedDate = originalCreatedAt ? new Date(originalCreatedAt * 1000).toLocaleDateString() : "Unknown date"}
{@const shortAuthor = originalAuthor ? `${originalAuthor.slice(0, 8)}...${originalAuthor.slice(-4)}` : "Unknown"}
<div class="embedded-repost bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 my-2">
<!-- Event header -->
<div class="flex items-center justify-between mb-3 min-w-0">
<div class="flex items-center space-x-2 min-w-0">
<span class="text-xs text-gray-500 dark:text-gray-400 font-mono flex-shrink-0">
Kind {originalKind}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0">
(repost)
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-600 dark:text-gray-400 flex-shrink-0">Author:</span>
<span class="text-xs text-gray-700 dark:text-gray-300 font-mono">
{shortAuthor}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-500 dark:text-gray-400">
{formattedDate}
</span>
</div>
<button
class="text-xs text-primary-600 dark:text-primary-500 hover:underline flex-shrink-0"
onclick={() => window.location.href=`/events?id=${originalEvent.id || 'unknown'}`}
>
View full event →
</button>
</div>
<!-- Reposted content -->
<div class="text-sm text-gray-800 dark:text-gray-200 leading-relaxed">
{#await parseEmbeddedMarkup(originalContent, 0) then parsedOriginalContent}
{@html parsedOriginalContent}
{/await}
</div>
</div>
{:else}
{#await parseEmbeddedMarkup(content, 0) then parsedContent}
{@html parsedContent}
{/await}
{/if}
{/snippet}
{#snippet quotedContent(message: NDKEvent, publicMessages: NDKEvent[])}
{@const qTags = message.getMatchingTags("q")}
{#if qTags.length > 0}
{@const qTag = qTags[0]}
{@const eventId = qTag[1]}
{#if eventId}
{#await findQuotedMessage(eventId, publicMessages) then quotedMessage}
{#if quotedMessage}
{@const quotedContent = quotedMessage.content ? quotedMessage.content.slice(0, 200) : "No content"}
{#await parseEmbeddedMarkup(quotedContent, 0) then parsedContent}
<button type="button" class="block text-left w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick={() => window.dispatchEvent(new CustomEvent('jump-to-message', { detail: eventId }))}>
{@html parsedContent}
</button>
{/await}
{:else}
{@const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId)}
{#if isValidEventId}
{@const nevent = (() => {
try {
return nip19.neventEncode({ id: eventId });
} catch (error) {
console.warn(`[quotedContent] Failed to encode nevent for ${eventId}:`, error);
return null;
}
})()}
{#if nevent}
<button type="button" class="block text-left w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick={() => window.location.href=`/events?id=${nevent}`}>
Quoted message not found. Click to view event {eventId.slice(0, 8)}...
</button>
{:else}
<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">
Quoted message not found. Event ID: {eventId.slice(0, 8)}...
</div>
{/if}
{:else}
<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">
Invalid quoted message reference
</div>
{/if}
{/if}
{/await}
{/if}
{/if}
{/snippet}

7
src/lib/consts.ts

@ -3,6 +3,7 @@
export const wikiKind = 30818; export const wikiKind = 30818;
export const indexKind = 30040; export const indexKind = 30040;
export const zettelKinds = [30041, 30818, 30023]; export const zettelKinds = [30041, 30818, 30023];
export const repostKinds = [6, 16];
export const communityRelays = [ export const communityRelays = [
"wss://theforest.nostr1.com", "wss://theforest.nostr1.com",
@ -16,7 +17,7 @@ export const searchRelays = [
"wss://nostr.wine", "wss://nostr.wine",
"wss://relay.damus.io", "wss://relay.damus.io",
"wss://relay.nostr.band", "wss://relay.nostr.band",
"wss://freelay.sovbit.host" "wss://freelay.sovbit.host",
]; ];
export const secondaryRelays = [ export const secondaryRelays = [
@ -32,7 +33,7 @@ export const secondaryRelays = [
export const anonymousRelays = [ export const anonymousRelays = [
"wss://freelay.sovbit.host", "wss://freelay.sovbit.host",
"wss://thecitadel.nostr1.com" "wss://thecitadel.nostr1.com",
]; ];
export const lowbandwidthRelays = [ export const lowbandwidthRelays = [
@ -44,7 +45,7 @@ export const lowbandwidthRelays = [
export const localRelays: string[] = [ export const localRelays: string[] = [
"ws://localhost:8080", "ws://localhost:8080",
"ws://localhost:4869", "ws://localhost:4869",
"ws://localhost:3334" "ws://localhost:3334",
]; ];
export enum FeedType { export enum FeedType {

85
src/lib/data_structures/docs/relay_selector_design.md

@ -1,6 +1,11 @@
# Relay Selector Class Design # Relay Selector Class Design
The relay selector will be a singleton that tracks, rates, and ranks Nostr relays to help the application determine which relay should be used to handle each request. It will weight relays based on observed characteristics, then use these weights to implement a weighted round robin algorithm for selecting relays, with some additional modifications to account for domain-specific features of Nostr. The relay selector will be a singleton that tracks, rates, and ranks Nostr
relays to help the application determine which relay should be used to handle
each request. It will weight relays based on observed characteristics, then use
these weights to implement a weighted round robin algorithm for selecting
relays, with some additional modifications to account for domain-specific
features of Nostr.
## Relay Weights ## Relay Weights
@ -9,63 +14,92 @@ The relay selector will be a singleton that tracks, rates, and ranks Nostr relay
Relays are broadly divided into three categories: Relays are broadly divided into three categories:
1. **Public**: no authorization is required 1. **Public**: no authorization is required
2. **Private Write**: authorization is required to write to this relay, but not to read 2. **Private Write**: authorization is required to write to this relay, but not
3. **Private Read and Write**: authorization is required to use any features of this relay to read
3. **Private Read and Write**: authorization is required to use any features of
this relay
The broadest level of relay selection is based on these categories. The broadest level of relay selection is based on these categories.
- For users that are not logged in, public relays are used exclusively. - For users that are not logged in, public relays are used exclusively.
- For logged-in users, public and private read relays are initially rated equally for read operations. - For logged-in users, public and private read relays are initially rated
- For logged-in users, private write relays are preferred above public relays for write operations. equally for read operations.
- For logged-in users, private write relays are preferred above public relays
for write operations.
### User Preferences ### User Preferences
The relay selector will respect user relay preferences while still attempting to optimize for responsiveness and success rate. The relay selector will respect user relay preferences while still attempting to
optimize for responsiveness and success rate.
- User inbox relays will be stored in a separate list from general-purpose relays, and weighted and sorted separately using the same algorithm as the general-purpose relay list.
- Local relays (beginning with `wss://localhost` or `ws://localhost`) will be stored _unranked_ in a separate list, and used when the relay selector is operating on a web browser (as opposed to a server). - User inbox relays will be stored in a separate list from general-purpose
- When a caller requests relays from the relay selector, the selector will return: relays, and weighted and sorted separately using the same algorithm as the
general-purpose relay list.
- Local relays (beginning with `wss://localhost` or `ws://localhost`) will be
stored _unranked_ in a separate list, and used when the relay selector is
operating on a web browser (as opposed to a server).
- When a caller requests relays from the relay selector, the selector will
return:
- The highest-ranked general-purpose relay - The highest-ranked general-purpose relay
- The highest-ranked user inbox relay - The highest-ranked user inbox relay
- (If on browser) any local relays - (If on browser) any local relays
### Weighted Metrics ### Weighted Metrics
Several weighted metrics are used to compute a relay's score. The score is used to rank relays to determine which to prefer when fetching events. Several weighted metrics are used to compute a relay's score. The score is used
to rank relays to determine which to prefer when fetching events.
#### Response Time #### Response Time
The response time weight of each relay is computed according to the logarithmic function $`r(t) = -log(t) + 1`$, where $`t`$ is the median response time in seconds. This function has a few features which make it useful: The response time weight of each relay is computed according to the logarithmic
function $`r(t) = -log(t) + 1`$, where $`t`$ is the median response time in
seconds. This function has a few features which make it useful:
- $`r(1) = 1`$, making a response time of 1s the netural point. This causes the algorithm to prefer relays that respond in under 1s. - $`r(1) = 1`$, making a response time of 1s the netural point. This causes the
- $`r(0.3) \approx 1.5`$ and $`r(3) \approx 0.5`$. This clusters the 0.5 to 1.5 weight range in the 300ms to 3s response time range, which is a sufficiently rapid response time to keep user's from switching context. algorithm to prefer relays that respond in under 1s.
- The function has a long tail, so it doesn't discount slower response times too heavily, too quickly. - $`r(0.3) \approx 1.5`$ and $`r(3) \approx 0.5`$. This clusters the 0.5 to 1.5
weight range in the 300ms to 3s response time range, which is a sufficiently
rapid response time to keep user's from switching context.
- The function has a long tail, so it doesn't discount slower response times too
heavily, too quickly.
#### Success Rate #### Success Rate
The success rate $`s(x)`$ is computed as the fraction of total requests sent to the relay that returned at least one event in response. The optimal score is 1, meaning the relay successfully responds to 100% of requests. The success rate $`s(x)`$ is computed as the fraction of total requests sent to
the relay that returned at least one event in response. The optimal score is 1,
meaning the relay successfully responds to 100% of requests.
#### Trust Level #### Trust Level
Certain relays may be assigned a constant "trust level" score $`T`$. This modifier is a number in the range $`[-0.5, 0.5]`$ that indicates how much a relay is trusted by the GitCitadel organization. Certain relays may be assigned a constant "trust level" score $`T`$. This
modifier is a number in the range $`[-0.5, 0.5]`$ that indicates how much a
relay is trusted by the GitCitadel organization.
A few factors contribute to a higher trust rating: A few factors contribute to a higher trust rating:
- Effective filtering of spam and abusive content. - Effective filtering of spam and abusive content.
- Good data transparency, including such policies as honoring deletion requests. - Good data transparency, including such policies as honoring deletion requests.
- Event aggregation policies that aim at synchronization with the broader relay network. - Event aggregation policies that aim at synchronization with the broader relay
network.
#### Preferred Vendors #### Preferred Vendors
Certain relays may be assigned a constant "preferred vendor" score $`V`$. This modifier is a number in the range $`[0, 0.5]`$. It is used to increase the priority of GitCitadel's preferred relay vendors. Certain relays may be assigned a constant "preferred vendor" score $`V`$. This
modifier is a number in the range $`[0, 0.5]`$. It is used to increase the
priority of GitCitadel's preferred relay vendors.
### Overall Weight ### Overall Weight
The overall weight of a relay is calculated as $`w(t, x) = r(t) \times s(x) + T + V`$. The `RelaySelector` class maintains a list of relays sorted by their overall weights. The weights may be updated at runtime when $`t`$ or $`x`$ change. On update, the relay list is re-sorted to account for the new weights. The overall weight of a relay is calculated as
$`w(t, x) = r(t) \times s(x) + T + V`$. The `RelaySelector` class maintains a
list of relays sorted by their overall weights. The weights may be updated at
runtime when $`t`$ or $`x`$ change. On update, the relay list is re-sorted to
account for the new weights.
## Algorithm ## Algorithm
The relay weights contribute to a weighted round robin (WRR) algorithm for relay selection. Pseudocode for the algorithm is given below: The relay weights contribute to a weighted round robin (WRR) algorithm for relay
selection. Pseudocode for the algorithm is given below:
```pseudocode ```pseudocode
Constants and Variables: Constants and Variables:
@ -86,11 +120,13 @@ Function getRelay:
## Class Methods ## Class Methods
The `RelaySelector` class should expose the following methods to support updates to relay weights. Pseudocode for each method is given below. The `RelaySelector` class should expose the following methods to support updates
to relay weights. Pseudocode for each method is given below.
### Add Response Time Datum ### Add Response Time Datum
This function updates the class state by side effect. Locking should be used in concurrent use cases. This function updates the class state by side effect. Locking should be used in
concurrent use cases.
```pseudocode ```pseudocode
Constants and Variables: Constants and Variables:
@ -123,7 +159,8 @@ Function addResponseTimeDatum:
### Add Success Rate Datum ### Add Success Rate Datum
This function updates the class state by side effect. Locking should be used in concurrent use cases. This function updates the class state by side effect. Locking should be used in
concurrent use cases.
```pseudocode ```pseudocode
Constants and Variables: Constants and Variables:

310
src/lib/data_structures/publication_tree.ts

@ -2,7 +2,10 @@ import { Lazy } from "./lazy.ts";
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type NDK from "@nostr-dev-kit/ndk"; import type NDK from "@nostr-dev-kit/ndk";
import { fetchEventById } from "../utils/websocket_utils.ts"; import { fetchEventById } from "../utils/websocket_utils.ts";
import { fetchEventWithFallback, NDKRelaySetFromNDK } from "../utils/nostrUtils.ts"; import {
fetchEventWithFallback,
NDKRelaySetFromNDK,
} from "../utils/nostrUtils.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
import { searchRelays, secondaryRelays } from "../consts.ts"; import { searchRelays, secondaryRelays } from "../consts.ts";
@ -50,7 +53,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* A map of addresses in the tree to their corresponding events. * A map of addresses in the tree to their corresponding events.
*/ */
#events: Map<string, NDKEvent>; #events: Map<string, NDKEvent>;
/** /**
* Simple cache for fetched events to avoid re-fetching. * Simple cache for fetched events to avoid re-fetching.
*/ */
@ -486,7 +489,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
continue; continue;
} }
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) { if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null }; return { done: false, value: null };
} }
@ -494,7 +500,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
} }
} while (this.#cursor.tryMoveToParent()); } while (this.#cursor.tryMoveToParent());
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) { if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null }; return { done: false, value: null };
} }
@ -533,7 +542,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
} }
} while (this.#cursor.tryMoveToParent()); } while (this.#cursor.tryMoveToParent());
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) { if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null }; return { done: false, value: null };
} }
@ -588,47 +600,84 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
.filter((tag) => tag[0] === "a") .filter((tag) => tag[0] === "a")
.map((tag) => tag[1]); .map((tag) => tag[1]);
console.debug(`[PublicationTree] Current event ${currentEvent.id} has ${currentEvent.tags.length} tags:`, currentEvent.tags); console.debug(
console.debug(`[PublicationTree] Found ${currentChildAddresses.length} a-tags in current event:`, currentChildAddresses); `[PublicationTree] Current event ${currentEvent.id} has ${currentEvent.tags.length} tags:`,
currentEvent.tags,
);
console.debug(
`[PublicationTree] Found ${currentChildAddresses.length} a-tags in current event:`,
currentChildAddresses,
);
// If no a-tags found, try e-tags as fallback // If no a-tags found, try e-tags as fallback
if (currentChildAddresses.length === 0) { if (currentChildAddresses.length === 0) {
const eTags = currentEvent.tags const eTags = currentEvent.tags
.filter((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])); .filter((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
console.debug(`[PublicationTree] Found ${eTags.length} e-tags for current event ${currentEvent.id}:`, eTags.map(tag => tag[1])); );
console.debug(
`[PublicationTree] Found ${eTags.length} e-tags for current event ${currentEvent.id}:`,
eTags.map((tag) => tag[1]),
);
// For e-tags with hex IDs, fetch the referenced events to get their addresses // For e-tags with hex IDs, fetch the referenced events to get their addresses
const eTagPromises = eTags.map(async (tag) => { const eTagPromises = eTags.map(async (tag) => {
try { try {
console.debug(`[PublicationTree] Fetching event for e-tag ${tag[1]} in depthFirstRetrieve`); console.debug(
`[PublicationTree] Fetching event for e-tag ${
tag[1]
} in depthFirstRetrieve`,
);
const referencedEvent = await fetchEventById(tag[1]); const referencedEvent = await fetchEventById(tag[1]);
if (referencedEvent) { if (referencedEvent) {
// Construct the proper address format from the referenced event // Construct the proper address format from the referenced event
const dTag = referencedEvent.tags.find(tag => tag[0] === "d")?.[1]; const dTag = referencedEvent.tags.find((tag) => tag[0] === "d")
?.[1];
if (dTag) { if (dTag) {
const address = `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`; const address =
console.debug(`[PublicationTree] Constructed address from e-tag in depthFirstRetrieve: ${address}`); `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(
`[PublicationTree] Constructed address from e-tag in depthFirstRetrieve: ${address}`,
);
return address; return address;
} else { } else {
console.debug(`[PublicationTree] Referenced event ${tag[1]} has no d-tag in depthFirstRetrieve`); console.debug(
`[PublicationTree] Referenced event ${
tag[1]
} has no d-tag in depthFirstRetrieve`,
);
} }
} else { } else {
console.debug(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]} in depthFirstRetrieve - event not found`); console.debug(
`[PublicationTree] Failed to fetch event for e-tag ${
tag[1]
} in depthFirstRetrieve - event not found`,
);
} }
return null; return null;
} catch (error) { } catch (error) {
console.warn(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]} in depthFirstRetrieve:`, error); console.warn(
`[PublicationTree] Failed to fetch event for e-tag ${
tag[1]
} in depthFirstRetrieve:`,
error,
);
return null; return null;
} }
}); });
const resolvedAddresses = await Promise.all(eTagPromises); const resolvedAddresses = await Promise.all(eTagPromises);
const validAddresses = resolvedAddresses.filter(addr => addr !== null) as string[]; const validAddresses = resolvedAddresses.filter((addr) =>
addr !== null
console.debug(`[PublicationTree] Resolved ${validAddresses.length} valid addresses from e-tags in depthFirstRetrieve:`, validAddresses); ) as string[];
console.debug(
`[PublicationTree] Resolved ${validAddresses.length} valid addresses from e-tags in depthFirstRetrieve:`,
validAddresses,
);
if (validAddresses.length > 0) { if (validAddresses.length > 0) {
currentChildAddresses.push(...validAddresses); currentChildAddresses.push(...validAddresses);
} }
@ -646,9 +695,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
// Augment the tree with the children of the current event. // Augment the tree with the children of the current event.
const childPromises = currentChildAddresses const childPromises = currentChildAddresses
.filter(childAddress => !this.#nodes.has(childAddress)) .filter((childAddress) => !this.#nodes.has(childAddress))
.map(childAddress => this.#addNode(childAddress, currentNode!)); .map((childAddress) => this.#addNode(childAddress, currentNode!));
await Promise.all(childPromises); await Promise.all(childPromises);
// Push the popped address's children onto the stack for the next iteration. // Push the popped address's children onto the stack for the next iteration.
@ -663,7 +712,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
#addNode(address: string, parentNode: PublicationTreeNode) { #addNode(address: string, parentNode: PublicationTreeNode) {
const lazyNode = new Lazy<PublicationTreeNode>(() => const lazyNode = new Lazy<PublicationTreeNode>(() =>
this.#resolveNode(address, parentNode), this.#resolveNode(address, parentNode)
); );
parentNode.children!.push(lazyNode); parentNode.children!.push(lazyNode);
this.#nodes.set(address, lazyNode); this.#nodes.set(address, lazyNode);
@ -686,10 +735,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
): Promise<PublicationTreeNode> { ): Promise<PublicationTreeNode> {
// Check cache first // Check cache first
let event = this.#eventCache.get(address); let event = this.#eventCache.get(address);
if (!event) { if (!event) {
const [kind, pubkey, dTag] = address.split(":"); const [kind, pubkey, dTag] = address.split(":");
// AI-NOTE: 2025-01-24 - Enhanced event fetching with comprehensive fallback // AI-NOTE: 2025-01-24 - Enhanced event fetching with comprehensive fallback
// First try to fetch using the enhanced fetchEventWithFallback function // First try to fetch using the enhanced fetchEventWithFallback function
// which includes search relay fallback logic // which includes search relay fallback logic
@ -698,33 +747,50 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
authors: [pubkey], authors: [pubkey],
"#d": [dTag], "#d": [dTag],
}, 5000) // 5 second timeout for publication events }, 5000) // 5 second timeout for publication events
.then(fetchedEvent => { .then((fetchedEvent) => {
if (fetchedEvent) { if (fetchedEvent) {
// Cache the event if found // Cache the event if found
this.#eventCache.set(address, fetchedEvent); this.#eventCache.set(address, fetchedEvent);
event = fetchedEvent; event = fetchedEvent;
} }
if (!event) { if (!event) {
console.warn( console.warn(
`[PublicationTree] Event with address ${address} not found on primary relays, trying search relays.`, `[PublicationTree] Event with address ${address} not found on primary relays, trying search relays.`,
); );
// If still not found, try a more aggressive search using search relays // If still not found, try a more aggressive search using search relays
return this.#trySearchRelayFallback(address, kind, pubkey, dTag, parentNode); return this.#trySearchRelayFallback(
address,
kind,
pubkey,
dTag,
parentNode,
);
} }
return this.#buildNodeFromEvent(event, address, parentNode); return this.#buildNodeFromEvent(event, address, parentNode);
}) })
.catch(error => { .catch((error) => {
console.warn(`[PublicationTree] Error fetching event for address ${address}:`, error); console.warn(
`[PublicationTree] Error fetching event for address ${address}:`,
// Try search relay fallback even on error error,
return this.#trySearchRelayFallback(address, kind, pubkey, dTag, parentNode); );
// Try search relay fallback even on error
return this.#trySearchRelayFallback(
address,
kind,
pubkey,
dTag,
parentNode,
);
}); });
} }
return Promise.resolve(this.#buildNodeFromEvent(event, address, parentNode)); return Promise.resolve(
this.#buildNodeFromEvent(event, address, parentNode),
);
} }
/** /**
@ -732,54 +798,75 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* This method tries to find events on search relays when they're not found on primary relays * This method tries to find events on search relays when they're not found on primary relays
*/ */
async #trySearchRelayFallback( async #trySearchRelayFallback(
address: string, address: string,
kind: string, kind: string,
pubkey: string, pubkey: string,
dTag: string, dTag: string,
parentNode: PublicationTreeNode parentNode: PublicationTreeNode,
): Promise<PublicationTreeNode> { ): Promise<PublicationTreeNode> {
try { try {
console.log(`[PublicationTree] Trying search relay fallback for address: ${address}`); console.log(
`[PublicationTree] Trying search relay fallback for address: ${address}`,
);
// Get current relay configuration // Get current relay configuration
const inboxRelays = get(activeInboxRelays); const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays); const outboxRelays = get(activeOutboxRelays);
// Create a comprehensive relay set including search relays // Create a comprehensive relay set including search relays
const allRelays = [...inboxRelays, ...outboxRelays, ...searchRelays, ...secondaryRelays]; const allRelays = [
...inboxRelays,
...outboxRelays,
...searchRelays,
...secondaryRelays,
];
const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates
console.log(`[PublicationTree] Trying ${uniqueRelays.length} relays for fallback search:`, uniqueRelays); console.log(
`[PublicationTree] Trying ${uniqueRelays.length} relays for fallback search:`,
uniqueRelays,
);
// Try each relay individually with a shorter timeout // Try each relay individually with a shorter timeout
for (const relay of uniqueRelays) { for (const relay of uniqueRelays) {
try { try {
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], this.#ndk); const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], this.#ndk);
const fetchedEvent = await this.#ndk.fetchEvent({ const fetchedEvent = await this.#ndk.fetchEvent(
kinds: [parseInt(kind)], {
authors: [pubkey], kinds: [parseInt(kind)],
"#d": [dTag], authors: [pubkey],
}, undefined, relaySet).withTimeout(3000); // 3 second timeout per relay "#d": [dTag],
},
undefined,
relaySet,
).withTimeout(3000); // 3 second timeout per relay
if (fetchedEvent) { if (fetchedEvent) {
console.log(`[PublicationTree] Found event ${fetchedEvent.id} on search relay: ${relay}`); console.log(
`[PublicationTree] Found event ${fetchedEvent.id} on search relay: ${relay}`,
);
// Cache the event // Cache the event
this.#eventCache.set(address, fetchedEvent); this.#eventCache.set(address, fetchedEvent);
this.#events.set(address, fetchedEvent); this.#events.set(address, fetchedEvent);
return this.#buildNodeFromEvent(fetchedEvent, address, parentNode); return this.#buildNodeFromEvent(fetchedEvent, address, parentNode);
} }
} catch (error) { } catch (error) {
console.debug(`[PublicationTree] Failed to fetch from relay ${relay}:`, error); console.debug(
`[PublicationTree] Failed to fetch from relay ${relay}:`,
error,
);
continue; // Try next relay continue; // Try next relay
} }
} }
// If we get here, the event was not found on any relay // If we get here, the event was not found on any relay
console.warn(`[PublicationTree] Event with address ${address} not found on any relay after fallback search.`); console.warn(
`[PublicationTree] Event with address ${address} not found on any relay after fallback search.`,
);
return { return {
type: PublicationTreeNodeType.Leaf, type: PublicationTreeNodeType.Leaf,
status: PublicationTreeNodeStatus.Error, status: PublicationTreeNodeStatus.Error,
@ -787,10 +874,12 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
parent: parentNode, parent: parentNode,
children: [], children: [],
}; };
} catch (error) { } catch (error) {
console.error(`[PublicationTree] Error in search relay fallback for ${address}:`, error); console.error(
`[PublicationTree] Error in search relay fallback for ${address}:`,
error,
);
return { return {
type: PublicationTreeNodeType.Leaf, type: PublicationTreeNodeType.Leaf,
status: PublicationTreeNodeStatus.Error, status: PublicationTreeNodeStatus.Error,
@ -806,9 +895,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* This extracts the common logic for building nodes from events * This extracts the common logic for building nodes from events
*/ */
#buildNodeFromEvent( #buildNodeFromEvent(
event: NDKEvent, event: NDKEvent,
address: string, address: string,
parentNode: PublicationTreeNode parentNode: PublicationTreeNode,
): PublicationTreeNode { ): PublicationTreeNode {
this.#events.set(address, event); this.#events.set(address, event);
@ -816,46 +905,68 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
.filter((tag) => tag[0] === "a") .filter((tag) => tag[0] === "a")
.map((tag) => tag[1]); .map((tag) => tag[1]);
console.debug(`[PublicationTree] Event ${event.id} has ${event.tags.length} tags:`, event.tags); console.debug(
console.debug(`[PublicationTree] Found ${childAddresses.length} a-tags:`, childAddresses); `[PublicationTree] Event ${event.id} has ${event.tags.length} tags:`,
event.tags,
);
console.debug(
`[PublicationTree] Found ${childAddresses.length} a-tags:`,
childAddresses,
);
// If no a-tags found, try e-tags as fallback // If no a-tags found, try e-tags as fallback
if (childAddresses.length === 0) { if (childAddresses.length === 0) {
const eTags = event.tags const eTags = event.tags
.filter((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])); .filter((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
console.debug(`[PublicationTree] Found ${eTags.length} e-tags for event ${event.id}:`, eTags.map(tag => tag[1])); );
console.debug(
`[PublicationTree] Found ${eTags.length} e-tags for event ${event.id}:`,
eTags.map((tag) => tag[1]),
);
// For e-tags with hex IDs, fetch the referenced events to get their addresses // For e-tags with hex IDs, fetch the referenced events to get their addresses
const eTagPromises = eTags.map(async (tag) => { const eTagPromises = eTags.map(async (tag) => {
try { try {
console.debug(`[PublicationTree] Fetching event for e-tag ${tag[1]}`); console.debug(`[PublicationTree] Fetching event for e-tag ${tag[1]}`);
const referencedEvent = await fetchEventById(tag[1]); const referencedEvent = await fetchEventById(tag[1]);
if (referencedEvent) { if (referencedEvent) {
// Construct the proper address format from the referenced event // Construct the proper address format from the referenced event
const dTag = referencedEvent.tags.find(tag => tag[0] === "d")?.[1]; const dTag = referencedEvent.tags.find((tag) => tag[0] === "d")
?.[1];
if (dTag) { if (dTag) {
const address = `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`; const address =
console.debug(`[PublicationTree] Constructed address from e-tag: ${address}`); `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(
`[PublicationTree] Constructed address from e-tag: ${address}`,
);
return address; return address;
} else { } else {
console.debug(`[PublicationTree] Referenced event ${tag[1]} has no d-tag`); console.debug(
`[PublicationTree] Referenced event ${tag[1]} has no d-tag`,
);
} }
} else { } else {
console.debug(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}`); console.debug(
`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}`,
);
} }
return null; return null;
} catch (error) { } catch (error) {
console.warn(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}:`, error); console.warn(
`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}:`,
error,
);
return null; return null;
} }
}); });
// Note: We can't await here since this is a synchronous method // Note: We can't await here since this is a synchronous method
// The e-tag resolution will happen when the children are processed // The e-tag resolution will happen when the children are processed
// For now, we'll add the e-tags as potential child addresses // For now, we'll add the e-tags as potential child addresses
const eTagAddresses = eTags.map(tag => tag[1]); const eTagAddresses = eTags.map((tag) => tag[1]);
childAddresses.push(...eTagAddresses); childAddresses.push(...eTagAddresses);
} }
@ -868,11 +979,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
}; };
// Add children asynchronously // Add children asynchronously
const childPromises = childAddresses.map(address => const childPromises = childAddresses.map((address) =>
this.addEventByAddress(address, event) this.addEventByAddress(address, event)
); );
Promise.all(childPromises).catch(error => { Promise.all(childPromises).catch((error) => {
console.warn(`[PublicationTree] Error adding children for ${address}:`, error); console.warn(
`[PublicationTree] Error adding children for ${address}:`,
error,
);
}); });
this.#nodeResolvedObservers.forEach((observer) => observer(address)); this.#nodeResolvedObservers.forEach((observer) => observer(address));
@ -881,10 +995,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
} }
#getNodeType(event: NDKEvent): PublicationTreeNodeType { #getNodeType(event: NDKEvent): PublicationTreeNodeType {
if (event.kind === 30040 && ( if (
event.tags.some((tag) => tag[0] === "a") || event.kind === 30040 && (
event.tags.some((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])) event.tags.some((tag) => tag[0] === "a") ||
)) { event.tags.some((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
)
)
) {
return PublicationTreeNodeType.Branch; return PublicationTreeNodeType.Branch;
} }

65
src/lib/data_structures/websocket_pool.ts

@ -42,7 +42,10 @@ export class WebSocketPool {
* @param maxConnections - The maximum number of simultaneous WebSocket connections. Defaults to * @param maxConnections - The maximum number of simultaneous WebSocket connections. Defaults to
* 16. * 16.
*/ */
private constructor(idleTimeoutMs: number = 60000, maxConnections: number = 16) { private constructor(
idleTimeoutMs: number = 60000,
maxConnections: number = 16,
) {
this.#idleTimeoutMs = idleTimeoutMs; this.#idleTimeoutMs = idleTimeoutMs;
this.#maxConnections = maxConnections; this.#maxConnections = maxConnections;
} }
@ -71,15 +74,17 @@ export class WebSocketPool {
} }
if (limit == null || isNaN(limit)) { if (limit == null || isNaN(limit)) {
throw new Error('[WebSocketPool] Connection limit must be a number.'); throw new Error("[WebSocketPool] Connection limit must be a number.");
} }
if (limit <= 0) { if (limit <= 0) {
throw new Error('[WebSocketPool] Connection limit must be greater than 0.'); throw new Error(
"[WebSocketPool] Connection limit must be greater than 0.",
);
} }
if (!Number.isInteger(limit)) { if (!Number.isInteger(limit)) {
throw new Error('[WebSocketPool] Connection limit must be an integer.'); throw new Error("[WebSocketPool] Connection limit must be an integer.");
} }
this.#maxConnections = limit; this.#maxConnections = limit;
@ -106,15 +111,15 @@ export class WebSocketPool {
} }
if (timeoutMs == null || isNaN(timeoutMs)) { if (timeoutMs == null || isNaN(timeoutMs)) {
throw new Error('[WebSocketPool] Idle timeout must be a number.'); throw new Error("[WebSocketPool] Idle timeout must be a number.");
} }
if (timeoutMs <= 0) { if (timeoutMs <= 0) {
throw new Error('[WebSocketPool] Idle timeout must be greater than 0.'); throw new Error("[WebSocketPool] Idle timeout must be greater than 0.");
} }
if (!Number.isInteger(timeoutMs)) { if (!Number.isInteger(timeoutMs)) {
throw new Error('[WebSocketPool] Idle timeout must be an integer.'); throw new Error("[WebSocketPool] Idle timeout must be an integer.");
} }
this.#idleTimeoutMs = timeoutMs; this.#idleTimeoutMs = timeoutMs;
@ -151,9 +156,9 @@ export class WebSocketPool {
if (this.#pool.size >= this.#maxConnections) { if (this.#pool.size >= this.#maxConnections) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
this.#waitingQueue.push({ this.#waitingQueue.push({
url: normalizedUrl, url: normalizedUrl,
resolve: (handle) => resolve(handle.ws), resolve: (handle) => resolve(handle.ws),
reject, reject,
}); });
}); });
@ -163,7 +168,7 @@ export class WebSocketPool {
return newHandle.ws; return newHandle.ws;
} catch (error) { } catch (error) {
throw new Error( throw new Error(
`[WebSocketPool] Failed to acquire connection for ${normalizedUrl}: ${error}` `[WebSocketPool] Failed to acquire connection for ${normalizedUrl}: ${error}`,
); );
} }
} }
@ -179,7 +184,9 @@ export class WebSocketPool {
const normalizedUrl = this.#normalizeUrl(ws.url); const normalizedUrl = this.#normalizeUrl(ws.url);
const handle = this.#pool.get(normalizedUrl); const handle = this.#pool.get(normalizedUrl);
if (!handle) { if (!handle) {
throw new Error('[WebSocketPool] Attempted to release an unmanaged WebSocket connection.'); throw new Error(
"[WebSocketPool] Attempted to release an unmanaged WebSocket connection.",
);
} }
if (--handle.refCount === 0) { if (--handle.refCount === 0) {
@ -191,8 +198,10 @@ export class WebSocketPool {
* Closes all WebSocket connections and "drains" the pool. * Closes all WebSocket connections and "drains" the pool.
*/ */
public drain(): void { public drain(): void {
console.debug(`[WebSocketPool] Draining pool with ${this.#pool.size} connections and ${this.#waitingQueue.length} waiting requests`); console.debug(
`[WebSocketPool] Draining pool with ${this.#pool.size} connections and ${this.#waitingQueue.length} waiting requests`,
);
// Clear all idle timers first // Clear all idle timers first
for (const handle of this.#pool.values()) { for (const handle of this.#pool.values()) {
this.#clearIdleTimer(handle); this.#clearIdleTimer(handle);
@ -200,7 +209,7 @@ export class WebSocketPool {
// Reject all waiting requests // Reject all waiting requests
for (const { reject } of this.#waitingQueue) { for (const { reject } of this.#waitingQueue) {
reject(new Error('[WebSocketPool] Draining pool.')); reject(new Error("[WebSocketPool] Draining pool."));
} }
this.#waitingQueue = []; this.#waitingQueue = [];
@ -211,8 +220,8 @@ export class WebSocketPool {
} }
} }
this.#pool.clear(); this.#pool.clear();
console.debug('[WebSocketPool] Pool drained successfully'); console.debug("[WebSocketPool] Pool drained successfully");
} }
// #endregion // #endregion
@ -239,7 +248,9 @@ export class WebSocketPool {
this.#removeSocket(handle); this.#removeSocket(handle);
this.#processWaitingQueue(); this.#processWaitingQueue();
reject( reject(
new Error(`[WebSocketPool] WebSocket connection failed for ${url}: ${event.type}`) new Error(
`[WebSocketPool] WebSocket connection failed for ${url}: ${event.type}`,
),
); );
}; };
} catch (error) { } catch (error) {
@ -251,7 +262,7 @@ export class WebSocketPool {
#removeSocket(handle: WebSocketHandle): void { #removeSocket(handle: WebSocketHandle): void {
this.#clearIdleTimer(handle); this.#clearIdleTimer(handle);
// Clean up event listeners to prevent memory leaks // Clean up event listeners to prevent memory leaks
// AI-NOTE: Code that checks out connections should clean up its own listener callbacks before // AI-NOTE: Code that checks out connections should clean up its own listener callbacks before
// releasing the connection to the pool. // releasing the connection to the pool.
@ -261,11 +272,13 @@ export class WebSocketPool {
handle.ws.onclose = null; handle.ws.onclose = null;
handle.ws.onmessage = null; handle.ws.onmessage = null;
} }
const url = this.#normalizeUrl(handle.ws.url); const url = this.#normalizeUrl(handle.ws.url);
this.#pool.delete(url); this.#pool.delete(url);
console.debug(`[WebSocketPool] Removed socket for ${url}, pool size: ${this.#pool.size}`); console.debug(
`[WebSocketPool] Removed socket for ${url}, pool size: ${this.#pool.size}`,
);
this.#processWaitingQueue(); this.#processWaitingQueue();
} }
@ -283,7 +296,9 @@ export class WebSocketPool {
handle.idleTimer = setTimeout(() => { handle.idleTimer = setTimeout(() => {
const refCount = handle.refCount; const refCount = handle.refCount;
if (refCount === 0 && handle.ws.readyState === WebSocket.OPEN) { if (refCount === 0 && handle.ws.readyState === WebSocket.OPEN) {
console.debug(`[WebSocketPool] Closing idle connection to ${handle.ws.url}`); console.debug(
`[WebSocketPool] Closing idle connection to ${handle.ws.url}`,
);
handle.ws.close(); handle.ws.close();
this.#removeSocket(handle); this.#removeSocket(handle);
} }
@ -331,7 +346,7 @@ export class WebSocketPool {
#checkOut(handle: WebSocketHandle): void { #checkOut(handle: WebSocketHandle): void {
if (handle.refCount == null) { if (handle.refCount == null) {
throw new Error('[WebSocketPool] Handle refCount unexpectedly null.'); throw new Error("[WebSocketPool] Handle refCount unexpectedly null.");
} }
++handle.refCount; ++handle.refCount;
@ -346,10 +361,10 @@ export class WebSocketPool {
// The logic to remove a trailing slash for connection coalescing can be kept, // The logic to remove a trailing slash for connection coalescing can be kept,
// but should be done on the normalized string. // but should be done on the normalized string.
if (urlObj.pathname !== '/' && normalized.endsWith('/')) { if (urlObj.pathname !== "/" && normalized.endsWith("/")) {
normalized = normalized.slice(0, -1); normalized = normalized.slice(0, -1);
} }
return normalized; return normalized;
} catch { } catch {
// If URL is invalid, return it as-is and let WebSocket constructor handle the error. // If URL is invalid, return it as-is and let WebSocket constructor handle the error.

2
src/lib/navigator/EventNetwork/types.ts

@ -53,7 +53,7 @@ export interface NetworkNode extends SimulationNodeDatum {
tagType?: string; // Type of tag (t, p, e, etc.) tagType?: string; // Type of tag (t, p, e, etc.)
tagValue?: string; // The tag value tagValue?: string; // The tag value
connectedNodes?: string[]; // IDs of nodes that have this tag connectedNodes?: string[]; // IDs of nodes that have this tag
// Person anchor specific fields // Person anchor specific fields
isPersonAnchor?: boolean; // Whether this is a person anchor node isPersonAnchor?: boolean; // Whether this is a person anchor node
pubkey?: string; // The person's public key pubkey?: string; // The person's public key

2
src/lib/navigator/EventNetwork/utils/common.ts

@ -38,4 +38,4 @@ export function createDebugFunction(prefix: string) {
console.log(`[${prefix}]`, ...args); console.log(`[${prefix}]`, ...args);
} }
}; };
} }

325
src/lib/navigator/EventNetwork/utils/forceSimulation.ts

@ -1,11 +1,11 @@
/** /**
* D3 Force Simulation Utilities * D3 Force Simulation Utilities
* *
* This module provides utilities for creating and managing D3 force-directed * This module provides utilities for creating and managing D3 force-directed
* graph simulations for the event network visualization. * graph simulations for the event network visualization.
*/ */
import type { NetworkNode, NetworkLink } from "../types"; import type { NetworkLink, NetworkNode } from "../types";
import * as d3 from "d3"; import * as d3 from "d3";
import { createDebugFunction } from "./common"; import { createDebugFunction } from "./common";
@ -21,18 +21,18 @@ const debug = createDebugFunction("ForceSimulation");
* Provides type safety for simulation operations * Provides type safety for simulation operations
*/ */
export interface Simulation<NodeType, LinkType> { export interface Simulation<NodeType, LinkType> {
nodes(): NodeType[]; nodes(): NodeType[];
nodes(nodes: NodeType[]): this; nodes(nodes: NodeType[]): this;
alpha(): number; alpha(): number;
alpha(alpha: number): this; alpha(alpha: number): this;
alphaTarget(): number; alphaTarget(): number;
alphaTarget(target: number): this; alphaTarget(target: number): this;
restart(): this; restart(): this;
stop(): this; stop(): this;
tick(): this; tick(): this;
on(type: string, listener: (this: this) => void): this; on(type: string, listener: (this: this) => void): this;
force(name: string): any; force(name: string): any;
force(name: string, force: any): this; force(name: string, force: any): this;
} }
/** /**
@ -40,175 +40,192 @@ export interface Simulation<NodeType, LinkType> {
* Provides type safety for drag operations * Provides type safety for drag operations
*/ */
export interface D3DragEvent<GElement extends Element, Datum, Subject> { export interface D3DragEvent<GElement extends Element, Datum, Subject> {
active: number; active: number;
sourceEvent: any; sourceEvent: any;
subject: Subject; subject: Subject;
x: number; x: number;
y: number; y: number;
dx: number; dx: number;
dy: number; dy: number;
identifier: string | number; identifier: string | number;
} }
/** /**
* Updates a node's velocity by applying a force * Updates a node's velocity by applying a force
* *
* @param node - The node to update * @param node - The node to update
* @param deltaVx - Change in x velocity * @param deltaVx - Change in x velocity
* @param deltaVy - Change in y velocity * @param deltaVy - Change in y velocity
*/ */
export function updateNodeVelocity( export function updateNodeVelocity(
node: NetworkNode, node: NetworkNode,
deltaVx: number, deltaVx: number,
deltaVy: number deltaVy: number,
) { ) {
debug("Updating node velocity", { debug("Updating node velocity", {
nodeId: node.id, nodeId: node.id,
currentVx: node.vx, currentVx: node.vx,
currentVy: node.vy, currentVy: node.vy,
deltaVx, deltaVx,
deltaVy deltaVy,
}); });
if (typeof node.vx === "number" && typeof node.vy === "number") { if (typeof node.vx === "number" && typeof node.vy === "number") {
node.vx = node.vx - deltaVx; node.vx = node.vx - deltaVx;
node.vy = node.vy - deltaVy; node.vy = node.vy - deltaVy;
debug("New velocity", { nodeId: node.id, vx: node.vx, vy: node.vy }); debug("New velocity", { nodeId: node.id, vx: node.vx, vy: node.vy });
} else { } else {
debug("Node velocity not defined", { nodeId: node.id }); debug("Node velocity not defined", { nodeId: node.id });
} }
} }
/** /**
* Applies a logarithmic gravity force pulling the node toward the center * Applies a logarithmic gravity force pulling the node toward the center
* *
* The logarithmic scale ensures that nodes far from the center experience * The logarithmic scale ensures that nodes far from the center experience
* stronger gravity, preventing them from drifting too far away. * stronger gravity, preventing them from drifting too far away.
* *
* @param node - The node to apply gravity to * @param node - The node to apply gravity to
* @param centerX - X coordinate of the center * @param centerX - X coordinate of the center
* @param centerY - Y coordinate of the center * @param centerY - Y coordinate of the center
* @param alpha - Current simulation alpha (cooling factor) * @param alpha - Current simulation alpha (cooling factor)
*/ */
export function applyGlobalLogGravity( export function applyGlobalLogGravity(
node: NetworkNode, node: NetworkNode,
centerX: number, centerX: number,
centerY: number, centerY: number,
alpha: number, alpha: number,
) { ) {
// Tag anchors and person anchors should not be affected by gravity // Tag anchors and person anchors should not be affected by gravity
if (node.isTagAnchor || node.isPersonAnchor) return; if (node.isTagAnchor || node.isPersonAnchor) return;
const dx = (node.x ?? 0) - centerX; const dx = (node.x ?? 0) - centerX;
const dy = (node.y ?? 0) - centerY; const dy = (node.y ?? 0) - centerY;
const distance = Math.sqrt(dx * dx + dy * dy); const distance = Math.sqrt(dx * dx + dy * dy);
if (distance === 0) return; if (distance === 0) return;
const force = Math.log(distance + 1) * GRAVITY_STRENGTH * alpha; const force = Math.log(distance + 1) * GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force); updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
} }
/** /**
* Applies gravity between connected nodes * Applies gravity between connected nodes
* *
* This creates a cohesive force that pulls connected nodes toward their * This creates a cohesive force that pulls connected nodes toward their
* collective center of gravity, creating more meaningful clusters. * collective center of gravity, creating more meaningful clusters.
* *
* @param node - The node to apply connected gravity to * @param node - The node to apply connected gravity to
* @param links - All links in the network * @param links - All links in the network
* @param alpha - Current simulation alpha (cooling factor) * @param alpha - Current simulation alpha (cooling factor)
*/ */
export function applyConnectedGravity( export function applyConnectedGravity(
node: NetworkNode, node: NetworkNode,
links: NetworkLink[], links: NetworkLink[],
alpha: number, alpha: number,
) { ) {
// Tag anchors and person anchors should not be affected by connected gravity // Tag anchors and person anchors should not be affected by connected gravity
if (node.isTagAnchor || node.isPersonAnchor) return; if (node.isTagAnchor || node.isPersonAnchor) return;
// Find all nodes connected to this node (excluding tag anchors and person anchors)
const connectedNodes = links
.filter(link => link.source.id === node.id || link.target.id === node.id)
.map(link => link.source.id === node.id ? link.target : link.source)
.filter(n => !n.isTagAnchor && !n.isPersonAnchor);
if (connectedNodes.length === 0) return; // Find all nodes connected to this node (excluding tag anchors and person anchors)
const connectedNodes = links
.filter((link) => link.source.id === node.id || link.target.id === node.id)
.map((link) => link.source.id === node.id ? link.target : link.source)
.filter((n) => !n.isTagAnchor && !n.isPersonAnchor);
// Calculate center of gravity of connected nodes if (connectedNodes.length === 0) return;
const cogX = d3.mean(connectedNodes, (n: NetworkNode) => n.x);
const cogY = d3.mean(connectedNodes, (n: NetworkNode) => n.y);
if (cogX === undefined || cogY === undefined) return; // Calculate center of gravity of connected nodes
const cogX = d3.mean(connectedNodes, (n: NetworkNode) => n.x);
const cogY = d3.mean(connectedNodes, (n: NetworkNode) => n.y);
// Calculate force direction and magnitude if (cogX === undefined || cogY === undefined) return;
const dx = (node.x ?? 0) - cogX;
const dy = (node.y ?? 0) - cogY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance === 0) return; // Calculate force direction and magnitude
const dx = (node.x ?? 0) - cogX;
const dy = (node.y ?? 0) - cogY;
const distance = Math.sqrt(dx * dx + dy * dy);
// Apply force proportional to distance if (distance === 0) return;
const force = distance * CONNECTED_GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force); // Apply force proportional to distance
const force = distance * CONNECTED_GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
} }
/** /**
* Sets up drag behavior for nodes * Sets up drag behavior for nodes
* *
* This enables interactive dragging of nodes in the visualization. * This enables interactive dragging of nodes in the visualization.
* *
* @param simulation - The D3 force simulation * @param simulation - The D3 force simulation
* @param warmupClickEnergy - Alpha target when dragging starts (0-1) * @param warmupClickEnergy - Alpha target when dragging starts (0-1)
* @returns D3 drag behavior configured for the simulation * @returns D3 drag behavior configured for the simulation
*/ */
export function setupDragHandlers( export function setupDragHandlers(
simulation: Simulation<NetworkNode, NetworkLink>, simulation: Simulation<NetworkNode, NetworkLink>,
warmupClickEnergy: number = 0.9 warmupClickEnergy: number = 0.9,
) { ) {
return d3 return d3
.drag() .drag()
.on("start", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => { .on(
// Tag anchors and person anchors retain their anchor behavior "start",
if (d.isTagAnchor || d.isPersonAnchor) { (
// Still allow dragging but maintain anchor status event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
d.fx = d.x; d: NetworkNode,
d.fy = d.y; ) => {
return; // Tag anchors and person anchors retain their anchor behavior
} if (d.isTagAnchor || d.isPersonAnchor) {
// Still allow dragging but maintain anchor status
// Warm up simulation if it's cooled down d.fx = d.x;
if (!event.active) { d.fy = d.y;
simulation.alphaTarget(warmupClickEnergy).restart(); return;
} }
// Fix node position at current location
d.fx = d.x; // Warm up simulation if it's cooled down
d.fy = d.y; if (!event.active) {
}) simulation.alphaTarget(warmupClickEnergy).restart();
.on("drag", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => { }
// Update position for all nodes including anchors // Fix node position at current location
d.fx = d.x;
// Update fixed position to mouse position d.fy = d.y;
d.fx = event.x; },
d.fy = event.y; )
}) .on(
.on("end", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => { "drag",
(
// Cool down simulation when drag ends event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
if (!event.active) { d: NetworkNode,
simulation.alphaTarget(0); ) => {
} // Update position for all nodes including anchors
// Keep all nodes fixed after dragging // Update fixed position to mouse position
// This allows users to manually position any node type d.fx = event.x;
d.fx = d.x; d.fy = event.y;
d.fy = d.y; },
}); )
.on(
"end",
(
event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
d: NetworkNode,
) => {
// Cool down simulation when drag ends
if (!event.active) {
simulation.alphaTarget(0);
}
// Keep all nodes fixed after dragging
// This allows users to manually position any node type
d.fx = d.x;
d.fy = d.y;
},
);
} }
/** /**
* Creates a D3 force simulation for the network * Creates a D3 force simulation for the network
* *
* @param nodes - Array of network nodes * @param nodes - Array of network nodes
* @param links - Array of network links * @param links - Array of network links
* @param nodeRadius - Radius of node circles * @param nodeRadius - Radius of node circles
@ -216,34 +233,34 @@ export function setupDragHandlers(
* @returns Configured D3 force simulation * @returns Configured D3 force simulation
*/ */
export function createSimulation( export function createSimulation(
nodes: NetworkNode[], nodes: NetworkNode[],
links: NetworkLink[], links: NetworkLink[],
nodeRadius: number, nodeRadius: number,
linkDistance: number linkDistance: number,
): Simulation<NetworkNode, NetworkLink> { ): Simulation<NetworkNode, NetworkLink> {
debug("Creating simulation", { debug("Creating simulation", {
nodeCount: nodes.length, nodeCount: nodes.length,
linkCount: links.length, linkCount: links.length,
nodeRadius, nodeRadius,
linkDistance linkDistance,
}); });
try { try {
// Create the simulation with nodes // Create the simulation with nodes
const simulation = d3 const simulation = d3
.forceSimulation(nodes) .forceSimulation(nodes)
.force( .force(
"link", "link",
d3.forceLink(links) d3.forceLink(links)
.id((d: NetworkNode) => d.id) .id((d: NetworkNode) => d.id)
.distance(linkDistance * 0.1) .distance(linkDistance * 0.1),
) )
.force("collide", d3.forceCollide().radius(nodeRadius * 4)); .force("collide", d3.forceCollide().radius(nodeRadius * 4));
debug("Simulation created successfully"); debug("Simulation created successfully");
return simulation; return simulation;
} catch (error) { } catch (error) {
console.error("Error creating simulation:", error); console.error("Error creating simulation:", error);
throw error; throw error;
} }
} }

439
src/lib/navigator/EventNetwork/utils/networkBuilder.ts

@ -1,16 +1,16 @@
/** /**
* Network Builder Utilities * Network Builder Utilities
* *
* This module provides utilities for building a network graph from Nostr events. * This module provides utilities for building a network graph from Nostr events.
* It handles the creation of nodes and links, and the processing of event relationships. * It handles the creation of nodes and links, and the processing of event relationships.
*/ */
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData, GraphState } from "../types"; import type { GraphData, GraphState, NetworkLink, NetworkNode } from "../types";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { communityRelays } from "$lib/consts"; import { communityRelays } from "$lib/consts";
import { getMatchingTags } from '$lib/utils/nostrUtils'; import { getMatchingTags } from "$lib/utils/nostrUtils";
import { getDisplayNameSync } from '$lib/utils/profileCache'; import { getDisplayNameSync } from "$lib/utils/profileCache";
import { createDebugFunction } from "./common"; import { createDebugFunction } from "./common";
// Configuration // Configuration
@ -22,165 +22,173 @@ const debug = createDebugFunction("NetworkBuilder");
/** /**
* Creates a NetworkNode from an NDKEvent * Creates a NetworkNode from an NDKEvent
* *
* Extracts relevant information from the event and creates a node representation * Extracts relevant information from the event and creates a node representation
* for the visualization. * for the visualization.
* *
* @param event - The Nostr event to convert to a node * @param event - The Nostr event to convert to a node
* @param level - The hierarchy level of the node (default: 0) * @param level - The hierarchy level of the node (default: 0)
* @returns A NetworkNode object representing the event * @returns A NetworkNode object representing the event
*/ */
export function createNetworkNode( export function createNetworkNode(
event: NDKEvent, event: NDKEvent,
level: number = 0 level: number = 0,
): NetworkNode { ): NetworkNode {
debug("Creating network node", { eventId: event.id, kind: event.kind, level }); debug("Creating network node", {
eventId: event.id,
const isContainer = event.kind === INDEX_EVENT_KIND; kind: event.kind,
const nodeType = isContainer ? "Index" : event.kind === CONTENT_EVENT_KIND || event.kind === 30818 ? "Content" : `Kind ${event.kind}`; level,
});
// Create the base node with essential properties const isContainer = event.kind === INDEX_EVENT_KIND;
const node: NetworkNode = { const nodeType = isContainer
? "Index"
: event.kind === CONTENT_EVENT_KIND || event.kind === 30818
? "Content"
: `Kind ${event.kind}`;
// Create the base node with essential properties
const node: NetworkNode = {
id: event.id,
event,
isContainer,
level,
title: event.getMatchingTags("title")?.[0]?.[1] || "Untitled",
content: event.content || "",
author: event.pubkey ? getDisplayNameSync(event.pubkey) : "",
kind: event.kind !== undefined ? event.kind : CONTENT_EVENT_KIND, // Default to content event kind only if truly undefined
type: nodeType as "Index" | "Content" | "TagAnchor",
};
// Add NIP-19 identifiers if possible
if (event.kind && event.pubkey) {
try {
const dTag = event.getMatchingTags("d")?.[0]?.[1] || "";
// Create naddr (NIP-19 address) for the event
node.naddr = nip19.naddrEncode({
pubkey: event.pubkey,
identifier: dTag,
kind: event.kind,
relays: communityRelays,
});
// Create nevent (NIP-19 event reference) for the event
node.nevent = nip19.neventEncode({
id: event.id, id: event.id,
event, relays: communityRelays,
isContainer, kind: event.kind,
level, });
title: event.getMatchingTags("title")?.[0]?.[1] || "Untitled", } catch (error) {
content: event.content || "", console.warn("Failed to generate identifiers for node:", error);
author: event.pubkey ? getDisplayNameSync(event.pubkey) : "",
kind: event.kind !== undefined ? event.kind : CONTENT_EVENT_KIND, // Default to content event kind only if truly undefined
type: nodeType as "Index" | "Content" | "TagAnchor",
};
// Add NIP-19 identifiers if possible
if (event.kind && event.pubkey) {
try {
const dTag = event.getMatchingTags("d")?.[0]?.[1] || "";
// Create naddr (NIP-19 address) for the event
node.naddr = nip19.naddrEncode({
pubkey: event.pubkey,
identifier: dTag,
kind: event.kind,
relays: communityRelays,
});
// Create nevent (NIP-19 event reference) for the event
node.nevent = nip19.neventEncode({
id: event.id,
relays: communityRelays,
kind: event.kind,
});
} catch (error) {
console.warn("Failed to generate identifiers for node:", error);
}
} }
}
return node; return node;
} }
/** /**
* Creates a map of event IDs to events for quick lookup * Creates a map of event IDs to events for quick lookup
* *
* @param events - Array of Nostr events * @param events - Array of Nostr events
* @returns Map of event IDs to events * @returns Map of event IDs to events
*/ */
export function createEventMap(events: NDKEvent[]): Map<string, NDKEvent> { export function createEventMap(events: NDKEvent[]): Map<string, NDKEvent> {
debug("Creating event map", { eventCount: events.length }); debug("Creating event map", { eventCount: events.length });
const eventMap = new Map<string, NDKEvent>(); const eventMap = new Map<string, NDKEvent>();
events.forEach((event) => { events.forEach((event) => {
if (event.id) { if (event.id) {
eventMap.set(event.id, event); eventMap.set(event.id, event);
} }
}); });
debug("Event map created", { mapSize: eventMap.size }); debug("Event map created", { mapSize: eventMap.size });
return eventMap; return eventMap;
} }
/** /**
* Extracts an event ID from an 'a' tag * Extracts an event ID from an 'a' tag
* *
* @param tag - The tag array from a Nostr event * @param tag - The tag array from a Nostr event
* @returns The event ID or null if not found * @returns The event ID or null if not found
*/ */
export function extractEventIdFromATag(tag: string[]): string | null { export function extractEventIdFromATag(tag: string[]): string | null {
return tag[3] || null; return tag[3] || null;
} }
/** /**
* Generates a deterministic color for an event based on its ID * Generates a deterministic color for an event based on its ID
* *
* This creates visually distinct colors for different index events * This creates visually distinct colors for different index events
* while ensuring the same event always gets the same color. * while ensuring the same event always gets the same color.
* *
* @param eventId - The event ID to generate a color for * @param eventId - The event ID to generate a color for
* @returns An HSL color string * @returns An HSL color string
*/ */
export function getEventColor(eventId: string): string { export function getEventColor(eventId: string): string {
// Use first 4 characters of event ID as a hex number // Use first 4 characters of event ID as a hex number
const num = parseInt(eventId.slice(0, 4), 16); const num = parseInt(eventId.slice(0, 4), 16);
// Convert to a hue value (0-359) // Convert to a hue value (0-359)
const hue = num % 360; const hue = num % 360;
// Use fixed saturation and lightness for pastel colors // Use fixed saturation and lightness for pastel colors
const saturation = 70; const saturation = 70;
const lightness = 75; const lightness = 75;
return `hsl(${hue}, ${saturation}%, ${lightness}%)`; return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
} }
/** /**
* Initializes the graph state from a set of events * Initializes the graph state from a set of events
* *
* Creates nodes for all events and identifies referenced events. * Creates nodes for all events and identifies referenced events.
* *
* @param events - Array of Nostr events * @param events - Array of Nostr events
* @returns Initial graph state * @returns Initial graph state
*/ */
export function initializeGraphState(events: NDKEvent[]): GraphState { export function initializeGraphState(events: NDKEvent[]): GraphState {
debug("Initializing graph state", { eventCount: events.length }); debug("Initializing graph state", { eventCount: events.length });
const nodeMap = new Map<string, NetworkNode>(); const nodeMap = new Map<string, NetworkNode>();
const eventMap = createEventMap(events); const eventMap = createEventMap(events);
// Create initial nodes for all events // Create initial nodes for all events
events.forEach((event) => { events.forEach((event) => {
if (!event.id) return; if (!event.id) return;
const node = createNetworkNode(event); const node = createNetworkNode(event);
nodeMap.set(event.id, node); nodeMap.set(event.id, node);
});
debug("Node map created", { nodeCount: nodeMap.size });
// Build set of referenced event IDs to identify root events
const referencedIds = new Set<string>();
events.forEach((event) => {
const aTags = getMatchingTags(event, "a");
debug("Processing a-tags for event", {
eventId: event.id,
aTagCount: aTags.length,
}); });
debug("Node map created", { nodeCount: nodeMap.size });
aTags.forEach((tag) => {
// Build set of referenced event IDs to identify root events const id = extractEventIdFromATag(tag);
const referencedIds = new Set<string>(); if (id) referencedIds.add(id);
events.forEach((event) => {
const aTags = getMatchingTags(event, "a");
debug("Processing a-tags for event", {
eventId: event.id,
aTagCount: aTags.length
});
aTags.forEach((tag) => {
const id = extractEventIdFromATag(tag);
if (id) referencedIds.add(id);
});
}); });
debug("Referenced IDs set created", { referencedCount: referencedIds.size }); });
debug("Referenced IDs set created", { referencedCount: referencedIds.size });
return {
nodeMap, return {
links: [], nodeMap,
eventMap, links: [],
referencedIds, eventMap,
}; referencedIds,
};
} }
/** /**
* Processes a sequence of nodes referenced by an index event * Processes a sequence of nodes referenced by an index event
* *
* Creates links between the index and its content, and between sequential content nodes. * Creates links between the index and its content, and between sequential content nodes.
* Also processes nested indices recursively up to the maximum level. * Also processes nested indices recursively up to the maximum level.
* *
* @param sequence - Array of nodes in the sequence * @param sequence - Array of nodes in the sequence
* @param indexEvent - The index event referencing the sequence * @param indexEvent - The index event referencing the sequence
* @param level - Current hierarchy level * @param level - Current hierarchy level
@ -188,156 +196,157 @@ export function initializeGraphState(events: NDKEvent[]): GraphState {
* @param maxLevel - Maximum hierarchy level to process * @param maxLevel - Maximum hierarchy level to process
*/ */
export function processSequence( export function processSequence(
sequence: NetworkNode[], sequence: NetworkNode[],
indexEvent: NDKEvent, indexEvent: NDKEvent,
level: number, level: number,
state: GraphState, state: GraphState,
maxLevel: number, maxLevel: number,
): void { ): void {
// Stop if we've reached max level or have no nodes // Stop if we've reached max level or have no nodes
if (level >= maxLevel || sequence.length === 0) return; if (level >= maxLevel || sequence.length === 0) return;
// Set levels for all nodes in the sequence
sequence.forEach((node) => {
node.level = level + 1;
});
// Set levels for all nodes in the sequence // Create link from index to first content node
sequence.forEach((node) => { const indexNode = state.nodeMap.get(indexEvent.id);
node.level = level + 1; if (indexNode && sequence[0]) {
state.links.push({
source: indexNode,
target: sequence[0],
isSequential: true,
}); });
}
// Create link from index to first content node // Create sequential links between content nodes
const indexNode = state.nodeMap.get(indexEvent.id); for (let i = 0; i < sequence.length - 1; i++) {
if (indexNode && sequence[0]) { const currentNode = sequence[i];
state.links.push({ const nextNode = sequence[i + 1];
source: indexNode,
target: sequence[0],
isSequential: true,
});
}
// Create sequential links between content nodes state.links.push({
for (let i = 0; i < sequence.length - 1; i++) { source: currentNode,
const currentNode = sequence[i]; target: nextNode,
const nextNode = sequence[i + 1]; isSequential: true,
});
state.links.push({
source: currentNode,
target: nextNode,
isSequential: true,
});
// Process nested indices recursively
if (currentNode.isContainer) {
processNestedIndex(currentNode, level + 1, state, maxLevel);
}
}
// Process the last node if it's an index // Process nested indices recursively
const lastNode = sequence[sequence.length - 1]; if (currentNode.isContainer) {
if (lastNode?.isContainer) { processNestedIndex(currentNode, level + 1, state, maxLevel);
processNestedIndex(lastNode, level + 1, state, maxLevel);
} }
}
// Process the last node if it's an index
const lastNode = sequence[sequence.length - 1];
if (lastNode?.isContainer) {
processNestedIndex(lastNode, level + 1, state, maxLevel);
}
} }
/** /**
* Processes a nested index node * Processes a nested index node
* *
* @param node - The index node to process * @param node - The index node to process
* @param level - Current hierarchy level * @param level - Current hierarchy level
* @param state - Current graph state * @param state - Current graph state
* @param maxLevel - Maximum hierarchy level to process * @param maxLevel - Maximum hierarchy level to process
*/ */
export function processNestedIndex( export function processNestedIndex(
node: NetworkNode, node: NetworkNode,
level: number, level: number,
state: GraphState, state: GraphState,
maxLevel: number, maxLevel: number,
): void { ): void {
if (!node.isContainer || level >= maxLevel) return; if (!node.isContainer || level >= maxLevel) return;
const nestedEvent = state.eventMap.get(node.id); const nestedEvent = state.eventMap.get(node.id);
if (nestedEvent) { if (nestedEvent) {
processIndexEvent(nestedEvent, level, state, maxLevel); processIndexEvent(nestedEvent, level, state, maxLevel);
} }
} }
/** /**
* Processes an index event and its referenced content * Processes an index event and its referenced content
* *
* @param indexEvent - The index event to process * @param indexEvent - The index event to process
* @param level - Current hierarchy level * @param level - Current hierarchy level
* @param state - Current graph state * @param state - Current graph state
* @param maxLevel - Maximum hierarchy level to process * @param maxLevel - Maximum hierarchy level to process
*/ */
export function processIndexEvent( export function processIndexEvent(
indexEvent: NDKEvent, indexEvent: NDKEvent,
level: number, level: number,
state: GraphState, state: GraphState,
maxLevel: number, maxLevel: number,
): void { ): void {
if (level >= maxLevel) return; if (level >= maxLevel) return;
// Extract the sequence of nodes referenced by this index // Extract the sequence of nodes referenced by this index
const sequence = getMatchingTags(indexEvent, "a") const sequence = getMatchingTags(indexEvent, "a")
.map((tag) => extractEventIdFromATag(tag)) .map((tag) => extractEventIdFromATag(tag))
.filter((id): id is string => id !== null) .filter((id): id is string => id !== null)
.map((id) => state.nodeMap.get(id)) .map((id) => state.nodeMap.get(id))
.filter((node): node is NetworkNode => node !== undefined); .filter((node): node is NetworkNode => node !== undefined);
processSequence(sequence, indexEvent, level, state, maxLevel); processSequence(sequence, indexEvent, level, state, maxLevel);
} }
/** /**
* Generates a complete graph from a set of events * Generates a complete graph from a set of events
* *
* This is the main entry point for building the network visualization. * This is the main entry point for building the network visualization.
* *
* @param events - Array of Nostr events * @param events - Array of Nostr events
* @param maxLevel - Maximum hierarchy level to process * @param maxLevel - Maximum hierarchy level to process
* @returns Complete graph data for visualization * @returns Complete graph data for visualization
*/ */
export function generateGraph( export function generateGraph(
events: NDKEvent[], events: NDKEvent[],
maxLevel: number maxLevel: number,
): GraphData { ): GraphData {
debug("Generating graph", { eventCount: events.length, maxLevel }); debug("Generating graph", { eventCount: events.length, maxLevel });
// Initialize the graph state
const state = initializeGraphState(events);
// Find root events (index events not referenced by others, and all non-publication events)
const publicationKinds = [30040, 30041, 30818];
const rootEvents = events.filter(
(e) => e.id && (
// Index events not referenced by others
(e.kind === INDEX_EVENT_KIND && !state.referencedIds.has(e.id)) ||
// All non-publication events are treated as roots
(e.kind !== undefined && !publicationKinds.includes(e.kind))
)
);
debug("Found root events", {
rootCount: rootEvents.length,
rootIds: rootEvents.map(e => e.id)
});
// Process each root event
rootEvents.forEach((rootEvent) => {
debug("Processing root event", {
rootId: rootEvent.id,
kind: rootEvent.kind,
aTags: getMatchingTags(rootEvent, "a").length
});
processIndexEvent(rootEvent, 0, state, maxLevel);
});
// Create the final graph data // Initialize the graph state
const result = { const state = initializeGraphState(events);
nodes: Array.from(state.nodeMap.values()),
links: state.links, // Find root events (index events not referenced by others, and all non-publication events)
}; const publicationKinds = [30040, 30041, 30818];
const rootEvents = events.filter(
debug("Graph generation complete", { (e) =>
nodeCount: result.nodes.length, e.id && (
linkCount: result.links.length // Index events not referenced by others
(e.kind === INDEX_EVENT_KIND && !state.referencedIds.has(e.id)) ||
// All non-publication events are treated as roots
(e.kind !== undefined && !publicationKinds.includes(e.kind))
),
);
debug("Found root events", {
rootCount: rootEvents.length,
rootIds: rootEvents.map((e) => e.id),
});
// Process each root event
rootEvents.forEach((rootEvent) => {
debug("Processing root event", {
rootId: rootEvent.id,
kind: rootEvent.kind,
aTags: getMatchingTags(rootEvent, "a").length,
}); });
processIndexEvent(rootEvent, 0, state, maxLevel);
return result; });
// Create the final graph data
const result = {
nodes: Array.from(state.nodeMap.values()),
links: state.links,
};
debug("Graph generation complete", {
nodeCount: result.nodes.length,
linkCount: result.links.length,
});
return result;
} }

100
src/lib/navigator/EventNetwork/utils/personNetworkBuilder.ts

@ -5,9 +5,9 @@
*/ */
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink } from "../types"; import type { NetworkLink, NetworkNode } from "../types";
import { getDisplayNameSync } from "$lib/utils/profileCache"; import { getDisplayNameSync } from "$lib/utils/profileCache";
import { SeededRandom, createDebugFunction } from "./common"; import { createDebugFunction, SeededRandom } from "./common";
const PERSON_ANCHOR_RADIUS = 15; const PERSON_ANCHOR_RADIUS = 15;
const PERSON_ANCHOR_PLACEMENT_RADIUS = 1000; const PERSON_ANCHOR_PLACEMENT_RADIUS = 1000;
@ -16,7 +16,6 @@ const MAX_PERSON_NODES = 20; // Default limit for person nodes
// Debug function // Debug function
const debug = createDebugFunction("PersonNetworkBuilder"); const debug = createDebugFunction("PersonNetworkBuilder");
/** /**
* Creates a deterministic seed from a string * Creates a deterministic seed from a string
*/ */
@ -42,13 +41,16 @@ export interface PersonConnection {
*/ */
export function extractUniquePersons( export function extractUniquePersons(
events: NDKEvent[], events: NDKEvent[],
followListEvents?: NDKEvent[] followListEvents?: NDKEvent[],
): Map<string, PersonConnection> { ): Map<string, PersonConnection> {
// Map of pubkey -> PersonConnection // Map of pubkey -> PersonConnection
const personMap = new Map<string, PersonConnection>(); const personMap = new Map<string, PersonConnection>();
debug("Extracting unique persons", { eventCount: events.length, followListCount: followListEvents?.length || 0 }); debug("Extracting unique persons", {
eventCount: events.length,
followListCount: followListEvents?.length || 0,
});
// First collect pubkeys from follow list events // First collect pubkeys from follow list events
const followListPubkeys = new Set<string>(); const followListPubkeys = new Set<string>();
if (followListEvents && followListEvents.length > 0) { if (followListEvents && followListEvents.length > 0) {
@ -60,10 +62,10 @@ export function extractUniquePersons(
// People in follow lists (p tags) // People in follow lists (p tags)
if (event.tags) { if (event.tags) {
event.tags event.tags
.filter(tag => { .filter((tag) => {
tag[0] === 'p' tag[0] === "p";
}) })
.forEach(tag => { .forEach((tag) => {
followListPubkeys.add(tag[1]); followListPubkeys.add(tag[1]);
}); });
} }
@ -79,7 +81,7 @@ export function extractUniquePersons(
personMap.set(event.pubkey, { personMap.set(event.pubkey, {
signedByEventIds: new Set(), signedByEventIds: new Set(),
referencedInEventIds: new Set(), referencedInEventIds: new Set(),
isFromFollowList: followListPubkeys.has(event.pubkey) isFromFollowList: followListPubkeys.has(event.pubkey),
}); });
} }
personMap.get(event.pubkey)!.signedByEventIds.add(event.id); personMap.get(event.pubkey)!.signedByEventIds.add(event.id);
@ -87,14 +89,14 @@ export function extractUniquePersons(
// Track referenced connections from "p" tags // Track referenced connections from "p" tags
if (event.tags) { if (event.tags) {
event.tags.forEach(tag => { event.tags.forEach((tag) => {
if (tag[0] === "p" && tag[1]) { if (tag[0] === "p" && tag[1]) {
const referencedPubkey = tag[1]; const referencedPubkey = tag[1];
if (!personMap.has(referencedPubkey)) { if (!personMap.has(referencedPubkey)) {
personMap.set(referencedPubkey, { personMap.set(referencedPubkey, {
signedByEventIds: new Set(), signedByEventIds: new Set(),
referencedInEventIds: new Set(), referencedInEventIds: new Set(),
isFromFollowList: followListPubkeys.has(referencedPubkey) isFromFollowList: followListPubkeys.has(referencedPubkey),
}); });
} }
personMap.get(referencedPubkey)!.referencedInEventIds.add(event.id); personMap.get(referencedPubkey)!.referencedInEventIds.add(event.id);
@ -102,7 +104,7 @@ export function extractUniquePersons(
}); });
} }
}); });
debug("Extracted persons", { personCount: personMap.size }); debug("Extracted persons", { personCount: personMap.size });
return personMap; return personMap;
@ -115,7 +117,7 @@ function buildEligiblePerson(
pubkey: string, pubkey: string,
connection: PersonConnection, connection: PersonConnection,
showSignedBy: boolean, showSignedBy: boolean,
showReferenced: boolean showReferenced: boolean,
): { ): {
pubkey: string; pubkey: string;
connection: PersonConnection; connection: PersonConnection;
@ -125,11 +127,11 @@ function buildEligiblePerson(
const connectedEventIds = new Set<string>(); const connectedEventIds = new Set<string>();
if (showSignedBy) { if (showSignedBy) {
connection.signedByEventIds.forEach(id => connectedEventIds.add(id)); connection.signedByEventIds.forEach((id) => connectedEventIds.add(id));
} }
if (showReferenced) { if (showReferenced) {
connection.referencedInEventIds.forEach(id => connectedEventIds.add(id)); connection.referencedInEventIds.forEach((id) => connectedEventIds.add(id));
} }
if (connectedEventIds.size === 0) { if (connectedEventIds.size === 0) {
@ -140,7 +142,7 @@ function buildEligiblePerson(
pubkey, pubkey,
connection, connection,
connectedEventIds, connectedEventIds,
totalConnections: connectedEventIds.size totalConnections: connectedEventIds.size,
}; };
} }
@ -155,7 +157,7 @@ function getEligiblePersons(
personMap: Map<string, PersonConnection>, personMap: Map<string, PersonConnection>,
showSignedBy: boolean, showSignedBy: boolean,
showReferenced: boolean, showReferenced: boolean,
limit: number limit: number,
): EligiblePerson[] { ): EligiblePerson[] {
// Build eligible persons and keep only top N using a min-heap or partial sort // Build eligible persons and keep only top N using a min-heap or partial sort
const eligible: EligiblePerson[] = []; const eligible: EligiblePerson[] = [];
@ -163,16 +165,20 @@ function getEligiblePersons(
for (const [pubkey, connection] of personMap) { for (const [pubkey, connection] of personMap) {
let totalConnections = 0; let totalConnections = 0;
if (showSignedBy) totalConnections += connection.signedByEventIds.size; if (showSignedBy) totalConnections += connection.signedByEventIds.size;
if (showReferenced) totalConnections += connection.referencedInEventIds.size; if (showReferenced) {
totalConnections += connection.referencedInEventIds.size;
}
if (totalConnections === 0) continue; if (totalConnections === 0) continue;
// Only build the set if this person is eligible // Only build the set if this person is eligible
const connectedEventIds = new Set<string>(); const connectedEventIds = new Set<string>();
if (showSignedBy) { if (showSignedBy) {
connection.signedByEventIds.forEach(id => connectedEventIds.add(id)); connection.signedByEventIds.forEach((id) => connectedEventIds.add(id));
} }
if (showReferenced) { if (showReferenced) {
connection.referencedInEventIds.forEach(id => connectedEventIds.add(id)); connection.referencedInEventIds.forEach((id) =>
connectedEventIds.add(id)
);
} }
eligible.push({ pubkey, connection, totalConnections, connectedEventIds }); eligible.push({ pubkey, connection, totalConnections, connectedEventIds });
@ -192,22 +198,27 @@ export function createPersonAnchorNodes(
height: number, height: number,
showSignedBy: boolean, showSignedBy: boolean,
showReferenced: boolean, showReferenced: boolean,
limit: number = MAX_PERSON_NODES limit: number = MAX_PERSON_NODES,
): { nodes: NetworkNode[], totalCount: number } { ): { nodes: NetworkNode[]; totalCount: number } {
const anchorNodes: NetworkNode[] = []; const anchorNodes: NetworkNode[] = [];
const centerX = width / 2; const centerX = width / 2;
const centerY = height / 2; const centerY = height / 2;
// Calculate eligible persons and their connection counts // Calculate eligible persons and their connection counts
const eligiblePersons = getEligiblePersons(personMap, showSignedBy, showReferenced, limit); const eligiblePersons = getEligiblePersons(
personMap,
showSignedBy,
showReferenced,
limit,
);
// Create nodes for the limited set // Create nodes for the limited set
debug("Creating person anchor nodes", { debug("Creating person anchor nodes", {
eligibleCount: eligiblePersons.length, eligibleCount: eligiblePersons.length,
limitedCount: eligiblePersons.length, limitedCount: eligiblePersons.length,
showSignedBy, showSignedBy,
showReferenced showReferenced,
}); });
eligiblePersons.forEach(({ pubkey, connection, connectedEventIds }) => { eligiblePersons.forEach(({ pubkey, connection, connectedEventIds }) => {
@ -226,7 +237,8 @@ export function createPersonAnchorNodes(
const anchorNode: NetworkNode = { const anchorNode: NetworkNode = {
id: `person-anchor-${pubkey}`, id: `person-anchor-${pubkey}`,
title: displayName, title: displayName,
content: `${connection.signedByEventIds.size} signed, ${connection.referencedInEventIds.size} referenced`, content:
`${connection.signedByEventIds.size} signed, ${connection.referencedInEventIds.size} referenced`,
author: "", author: "",
kind: 0, // Special kind for anchors kind: 0, // Special kind for anchors
type: "PersonAnchor", type: "PersonAnchor",
@ -245,11 +257,14 @@ export function createPersonAnchorNodes(
anchorNodes.push(anchorNode); anchorNodes.push(anchorNode);
}); });
debug("Created person anchor nodes", { count: anchorNodes.length, totalEligible: eligiblePersons.length }); debug("Created person anchor nodes", {
count: anchorNodes.length,
totalEligible: eligiblePersons.length,
});
return { return {
nodes: anchorNodes, nodes: anchorNodes,
totalCount: eligiblePersons.length totalCount: eligiblePersons.length,
}; };
} }
@ -264,10 +279,13 @@ export interface PersonLink extends NetworkLink {
export function createPersonLinks( export function createPersonLinks(
personAnchors: NetworkNode[], personAnchors: NetworkNode[],
nodes: NetworkNode[], nodes: NetworkNode[],
personMap: Map<string, PersonConnection> personMap: Map<string, PersonConnection>,
): PersonLink[] { ): PersonLink[] {
debug("Creating person links", { anchorCount: personAnchors.length, nodeCount: nodes.length }); debug("Creating person links", {
anchorCount: personAnchors.length,
nodeCount: nodes.length,
});
const nodeMap = new Map(nodes.map((n) => [n.id, n])); const nodeMap = new Map(nodes.map((n) => [n.id, n]));
const links: PersonLink[] = personAnchors.flatMap((anchor) => { const links: PersonLink[] = personAnchors.flatMap((anchor) => {
@ -286,11 +304,11 @@ export function createPersonLinks(
return undefined; return undefined;
} }
let connectionType: 'signed-by' | 'referenced' | undefined; let connectionType: "signed-by" | "referenced" | undefined;
if (connection.signedByEventIds.has(nodeId)) { if (connection.signedByEventIds.has(nodeId)) {
connectionType = 'signed-by'; connectionType = "signed-by";
} else if (connection.referencedInEventIds.has(nodeId)) { } else if (connection.referencedInEventIds.has(nodeId)) {
connectionType = 'referenced'; connectionType = "referenced";
} }
const link: PersonLink = { const link: PersonLink = {
@ -299,7 +317,7 @@ export function createPersonLinks(
isSequential: false, isSequential: false,
connectionType, connectionType,
}; };
return link; return link;
}).filter((link): link is PersonLink => link !== undefined); // Remove undefineds and type guard }).filter((link): link is PersonLink => link !== undefined); // Remove undefineds and type guard
}); });
@ -324,9 +342,9 @@ export interface PersonAnchorInfo {
*/ */
export function extractPersonAnchorInfo( export function extractPersonAnchorInfo(
personAnchors: NetworkNode[], personAnchors: NetworkNode[],
personMap: Map<string, PersonConnection> personMap: Map<string, PersonConnection>,
): PersonAnchorInfo[] { ): PersonAnchorInfo[] {
return personAnchors.map(anchor => { return personAnchors.map((anchor) => {
const connection = personMap.get(anchor.pubkey || ""); const connection = personMap.get(anchor.pubkey || "");
return { return {
pubkey: anchor.pubkey || "", pubkey: anchor.pubkey || "",
@ -336,4 +354,4 @@ export function extractPersonAnchorInfo(
isFromFollowList: connection?.isFromFollowList || false, isFromFollowList: connection?.isFromFollowList || false,
}; };
}); });
} }

83
src/lib/navigator/EventNetwork/utils/starForceSimulation.ts

@ -1,25 +1,25 @@
/** /**
* Star Network Force Simulation * Star Network Force Simulation
* *
* Custom force simulation optimized for star network layouts. * Custom force simulation optimized for star network layouts.
* Provides stronger connections between star centers and their content nodes, * Provides stronger connections between star centers and their content nodes,
* with specialized forces to maintain hierarchical structure. * with specialized forces to maintain hierarchical structure.
*/ */
import * as d3 from "d3"; import * as d3 from "d3";
import type { NetworkNode, NetworkLink } from "../types"; import type { NetworkLink, NetworkNode } from "../types";
import type { Simulation } from "./forceSimulation"; import type { Simulation } from "./forceSimulation";
import { createTagGravityForce } from "./tagNetworkBuilder"; import { createTagGravityForce } from "./tagNetworkBuilder";
// Configuration for star network forces // Configuration for star network forces
const STAR_CENTER_CHARGE = -300; // Stronger repulsion between star centers const STAR_CENTER_CHARGE = -300; // Stronger repulsion between star centers
const CONTENT_NODE_CHARGE = -50; // Weaker repulsion for content nodes const CONTENT_NODE_CHARGE = -50; // Weaker repulsion for content nodes
const STAR_LINK_STRENGTH = 0.5; // Moderate connection to star center const STAR_LINK_STRENGTH = 0.5; // Moderate connection to star center
const INTER_STAR_LINK_STRENGTH = 0.2; // Weaker connection between stars const INTER_STAR_LINK_STRENGTH = 0.2; // Weaker connection between stars
const STAR_LINK_DISTANCE = 80; // Fixed distance from center to content const STAR_LINK_DISTANCE = 80; // Fixed distance from center to content
const INTER_STAR_DISTANCE = 200; // Distance between star centers const INTER_STAR_DISTANCE = 200; // Distance between star centers
const CENTER_GRAVITY = 0.02; // Gentle pull toward canvas center const CENTER_GRAVITY = 0.02; // Gentle pull toward canvas center
const STAR_CENTER_WEIGHT = 10; // Weight multiplier for star centers const STAR_CENTER_WEIGHT = 10; // Weight multiplier for star centers
/** /**
* Creates a custom force simulation for star networks * Creates a custom force simulation for star networks
@ -28,15 +28,18 @@ export function createStarSimulation(
nodes: NetworkNode[], nodes: NetworkNode[],
links: NetworkLink[], links: NetworkLink[],
width: number, width: number,
height: number height: number,
): Simulation<NetworkNode, NetworkLink> { ): Simulation<NetworkNode, NetworkLink> {
// Create the simulation // Create the simulation
const simulation = d3.forceSimulation(nodes) as any const simulation = d3.forceSimulation(nodes) as any;
simulation simulation
.force("center", d3.forceCenter(width / 2, height / 2).strength(CENTER_GRAVITY)) .force(
"center",
d3.forceCenter(width / 2, height / 2).strength(CENTER_GRAVITY),
)
.velocityDecay(0.2) // Lower decay for more responsive simulation .velocityDecay(0.2) // Lower decay for more responsive simulation
.alphaDecay(0.0001) // Much slower alpha decay to prevent freezing .alphaDecay(0.0001) // Much slower alpha decay to prevent freezing
.alphaMin(0.001); // Keep minimum energy to prevent complete freeze .alphaMin(0.001); // Keep minimum energy to prevent complete freeze
// Custom charge force that varies by node type // Custom charge force that varies by node type
const chargeForce = d3.forceManyBody() const chargeForce = d3.forceManyBody()
@ -91,9 +94,9 @@ export function createStarSimulation(
// Custom radial force to keep content nodes around their star center // Custom radial force to keep content nodes around their star center
simulation.force("radial", createRadialForce(nodes, links)); simulation.force("radial", createRadialForce(nodes, links));
// Add tag gravity force if there are tag anchors // Add tag gravity force if there are tag anchors
const hasTagAnchors = nodes.some(n => n.isTagAnchor); const hasTagAnchors = nodes.some((n) => n.isTagAnchor);
if (hasTagAnchors) { if (hasTagAnchors) {
simulation.force("tagGravity", createTagGravityForce(nodes, links)); simulation.force("tagGravity", createTagGravityForce(nodes, links));
} }
@ -122,9 +125,9 @@ function applyRadialForce(
nodes: NetworkNode[], nodes: NetworkNode[],
nodeToCenter: Map<string, NetworkNode>, nodeToCenter: Map<string, NetworkNode>,
targetDistance: number, targetDistance: number,
alpha: number alpha: number,
): void { ): void {
nodes.forEach(node => { nodes.forEach((node) => {
if (node.kind === 30041) { if (node.kind === 30041) {
const center = nodeToCenter.get(node.id); const center = nodeToCenter.get(node.id);
if ( if (
@ -157,7 +160,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any {
// Build a map of content nodes to their star centers // Build a map of content nodes to their star centers
const nodeToCenter = new Map<string, NetworkNode>(); const nodeToCenter = new Map<string, NetworkNode>();
links.forEach(link => { links.forEach((link) => {
const source = link.source as NetworkNode; const source = link.source as NetworkNode;
const target = link.target as NetworkNode; const target = link.target as NetworkNode;
if (source.kind === 30040 && target.kind === 30041) { if (source.kind === 30040 && target.kind === 30041) {
@ -169,7 +172,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any {
applyRadialForce(nodes, nodeToCenter, STAR_LINK_DISTANCE, alpha); applyRadialForce(nodes, nodeToCenter, STAR_LINK_DISTANCE, alpha);
} }
force.initialize = function(_: NetworkNode[]) { force.initialize = function (_: NetworkNode[]) {
nodes = _; nodes = _;
}; };
@ -183,14 +186,14 @@ export function applyInitialStarPositions(
nodes: NetworkNode[], nodes: NetworkNode[],
links: NetworkLink[], links: NetworkLink[],
width: number, width: number,
height: number height: number,
): void { ): void {
// Group nodes by their star centers // Group nodes by their star centers
const starGroups = new Map<string, NetworkNode[]>(); const starGroups = new Map<string, NetworkNode[]>();
const starCenters: NetworkNode[] = []; const starCenters: NetworkNode[] = [];
// Identify star centers // Identify star centers
nodes.forEach(node => { nodes.forEach((node) => {
if (node.isContainer && node.kind === 30040) { if (node.isContainer && node.kind === 30040) {
starCenters.push(node); starCenters.push(node);
starGroups.set(node.id, []); starGroups.set(node.id, []);
@ -198,7 +201,7 @@ export function applyInitialStarPositions(
}); });
// Assign content nodes to their star centers // Assign content nodes to their star centers
links.forEach(link => { links.forEach((link) => {
const source = link.source as NetworkNode; const source = link.source as NetworkNode;
const target = link.target as NetworkNode; const target = link.target as NetworkNode;
if (source.kind === 30040 && target.kind === 30041) { if (source.kind === 30040 && target.kind === 30041) {
@ -222,7 +225,7 @@ export function applyInitialStarPositions(
const centerY = height / 2; const centerY = height / 2;
const radius = Math.min(width, height) * 0.3; const radius = Math.min(width, height) * 0.3;
const angleStep = (2 * Math.PI) / starCenters.length; const angleStep = (2 * Math.PI) / starCenters.length;
starCenters.forEach((center, i) => { starCenters.forEach((center, i) => {
const angle = i * angleStep; const angle = i * angleStep;
center.x = centerX + radius * Math.cos(angle); center.x = centerX + radius * Math.cos(angle);
@ -233,9 +236,9 @@ export function applyInitialStarPositions(
// Position content nodes around their star centers // Position content nodes around their star centers
starGroups.forEach((contentNodes, centerId) => { starGroups.forEach((contentNodes, centerId) => {
const center = nodes.find(n => n.id === centerId); const center = nodes.find((n) => n.id === centerId);
if (!center) return; if (!center) return;
const angleStep = (2 * Math.PI) / Math.max(contentNodes.length, 1); const angleStep = (2 * Math.PI) / Math.max(contentNodes.length, 1);
contentNodes.forEach((node, i) => { contentNodes.forEach((node, i) => {
const angle = i * angleStep; const angle = i * angleStep;
@ -252,7 +255,11 @@ export function applyInitialStarPositions(
* @param d - The node being dragged * @param d - The node being dragged
* @param simulation - The d3 force simulation instance * @param simulation - The d3 force simulation instance
*/ */
function dragstarted(event: any, d: NetworkNode, simulation: Simulation<NetworkNode, NetworkLink>) { function dragstarted(
event: any,
d: NetworkNode,
simulation: Simulation<NetworkNode, NetworkLink>,
) {
// If no other drag is active, set a low alpha target to keep the simulation running smoothly // If no other drag is active, set a low alpha target to keep the simulation running smoothly
if (!event.active) { if (!event.active) {
simulation.alphaTarget(0.1).restart(); simulation.alphaTarget(0.1).restart();
@ -281,7 +288,11 @@ function dragged(event: any, d: NetworkNode) {
* @param d - The node being dragged * @param d - The node being dragged
* @param simulation - The d3 force simulation instance * @param simulation - The d3 force simulation instance
*/ */
function dragended(event: any, d: NetworkNode, simulation: Simulation<NetworkNode, NetworkLink>) { function dragended(
event: any,
d: NetworkNode,
simulation: Simulation<NetworkNode, NetworkLink>,
) {
// If no other drag is active, lower the alpha target to let the simulation cool down // If no other drag is active, lower the alpha target to let the simulation cool down
if (!event.active) { if (!event.active) {
simulation.alphaTarget(0); simulation.alphaTarget(0);
@ -297,12 +308,16 @@ function dragended(event: any, d: NetworkNode, simulation: Simulation<NetworkNod
* @returns The d3 drag behavior * @returns The d3 drag behavior
*/ */
export function createStarDragHandler( export function createStarDragHandler(
simulation: Simulation<NetworkNode, NetworkLink> simulation: Simulation<NetworkNode, NetworkLink>,
): any { ): any {
// These handlers are now top-level functions, so we use closures to pass simulation to them. // These handlers are now top-level functions, so we use closures to pass simulation to them.
// This is a common pattern in JavaScript/TypeScript when you need to pass extra arguments to event handlers. // This is a common pattern in JavaScript/TypeScript when you need to pass extra arguments to event handlers.
return d3.drag() return d3.drag()
.on('start', function(event: any, d: NetworkNode) { dragstarted(event, d, simulation); }) .on("start", function (event: any, d: NetworkNode) {
.on('drag', dragged) dragstarted(event, d, simulation);
.on('end', function(event: any, d: NetworkNode) { dragended(event, d, simulation); }); })
} .on("drag", dragged)
.on("end", function (event: any, d: NetworkNode) {
dragended(event, d, simulation);
});
}

186
src/lib/navigator/EventNetwork/utils/starNetworkBuilder.ts

@ -1,19 +1,23 @@
/** /**
* Star Network Builder for NKBIP-01 Events * Star Network Builder for NKBIP-01 Events
* *
* This module provides utilities for building star network visualizations specifically * This module provides utilities for building star network visualizations specifically
* for NKBIP-01 events (kinds 30040 and 30041). Unlike the sequential network builder, * for NKBIP-01 events (kinds 30040 and 30041). Unlike the sequential network builder,
* this creates star formations where index events (30040) are central nodes with * this creates star formations where index events (30040) are central nodes with
* content events (30041) arranged around them. * content events (30041) arranged around them.
*/ */
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData, GraphState } from "../types"; import type { GraphData, GraphState, NetworkLink, NetworkNode } from "../types";
import { getMatchingTags } from '$lib/utils/nostrUtils'; import { getMatchingTags } from "$lib/utils/nostrUtils";
import { createNetworkNode, createEventMap, extractEventIdFromATag, getEventColor } from './networkBuilder'; import {
import { createDebugFunction } from './common'; createEventMap,
import { wikiKind, indexKind, zettelKinds } from '$lib/consts'; createNetworkNode,
extractEventIdFromATag,
getEventColor,
} from "./networkBuilder";
import { createDebugFunction } from "./common";
import { indexKind, wikiKind, zettelKinds } from "$lib/consts";
// Debug function // Debug function
const debug = createDebugFunction("StarNetworkBuilder"); const debug = createDebugFunction("StarNetworkBuilder");
@ -22,14 +26,14 @@ const debug = createDebugFunction("StarNetworkBuilder");
* Represents a star network with a central index node and peripheral content nodes * Represents a star network with a central index node and peripheral content nodes
*/ */
export interface StarNetwork { export interface StarNetwork {
center: NetworkNode; // Central index node (30040) center: NetworkNode; // Central index node (30040)
peripheralNodes: NetworkNode[]; // Content nodes (30041) and connected indices (30040) peripheralNodes: NetworkNode[]; // Content nodes (30041) and connected indices (30040)
links: NetworkLink[]; // Links within this star links: NetworkLink[]; // Links within this star
} }
/** /**
* Creates a star network from an index event and its references * Creates a star network from an index event and its references
* *
* @param indexEvent - The central index event (30040) * @param indexEvent - The central index event (30040)
* @param state - Current graph state * @param state - Current graph state
* @param level - Hierarchy level for this star * @param level - Hierarchy level for this star
@ -38,10 +42,10 @@ export interface StarNetwork {
export function createStarNetwork( export function createStarNetwork(
indexEvent: NDKEvent, indexEvent: NDKEvent,
state: GraphState, state: GraphState,
level: number = 0 level: number = 0,
): StarNetwork | null { ): StarNetwork | null {
debug("Creating star network", { indexId: indexEvent.id, level }); debug("Creating star network", { indexId: indexEvent.id, level });
const centerNode = state.nodeMap.get(indexEvent.id); const centerNode = state.nodeMap.get(indexEvent.id);
if (!centerNode) { if (!centerNode) {
debug("Center node not found for index event", indexEvent.id); debug("Center node not found for index event", indexEvent.id);
@ -50,32 +54,35 @@ export function createStarNetwork(
// Set the center node level // Set the center node level
centerNode.level = level; centerNode.level = level;
// Extract referenced event IDs from 'a' tags // Extract referenced event IDs from 'a' tags
const referencedIds = getMatchingTags(indexEvent, "a") const referencedIds = getMatchingTags(indexEvent, "a")
.map(tag => extractEventIdFromATag(tag)) .map((tag) => extractEventIdFromATag(tag))
.filter((id): id is string => id !== null); .filter((id): id is string => id !== null);
debug("Found referenced IDs", { count: referencedIds.length, ids: referencedIds }); debug("Found referenced IDs", {
count: referencedIds.length,
ids: referencedIds,
});
// Get peripheral nodes (both content and nested indices) // Get peripheral nodes (both content and nested indices)
const peripheralNodes: NetworkNode[] = []; const peripheralNodes: NetworkNode[] = [];
const links: NetworkLink[] = []; const links: NetworkLink[] = [];
referencedIds.forEach(id => { referencedIds.forEach((id) => {
const node = state.nodeMap.get(id); const node = state.nodeMap.get(id);
if (node) { if (node) {
// Set the peripheral node level // Set the peripheral node level
node.level += 1; node.level += 1;
peripheralNodes.push(node); peripheralNodes.push(node);
// Create link from center to peripheral node // Create link from center to peripheral node
links.push({ links.push({
source: centerNode, source: centerNode,
target: node, target: node,
isSequential: false // Star links are not sequential isSequential: false, // Star links are not sequential
}); });
debug("Added peripheral node", { nodeId: id, nodeType: node.type }); debug("Added peripheral node", { nodeId: id, nodeType: node.type });
} }
}); });
@ -83,13 +90,13 @@ export function createStarNetwork(
return { return {
center: centerNode, center: centerNode,
peripheralNodes, peripheralNodes,
links links,
}; };
} }
/** /**
* Processes all index events to create star networks * Processes all index events to create star networks
* *
* @param events - Array of all events * @param events - Array of all events
* @param maxLevel - Maximum nesting level to process * @param maxLevel - Maximum nesting level to process
* @returns Array of star networks * @returns Array of star networks
@ -97,17 +104,17 @@ export function createStarNetwork(
export function createStarNetworks( export function createStarNetworks(
events: NDKEvent[], events: NDKEvent[],
maxLevel: number, maxLevel: number,
existingNodeMap?: Map<string, NetworkNode> existingNodeMap?: Map<string, NetworkNode>,
): StarNetwork[] { ): StarNetwork[] {
debug("Creating star networks", { eventCount: events.length, maxLevel }); debug("Creating star networks", { eventCount: events.length, maxLevel });
// Use existing node map or create new one // Use existing node map or create new one
const nodeMap = existingNodeMap || new Map<string, NetworkNode>(); const nodeMap = existingNodeMap || new Map<string, NetworkNode>();
const eventMap = createEventMap(events); const eventMap = createEventMap(events);
// Create nodes for all events if not using existing map // Create nodes for all events if not using existing map
if (!existingNodeMap) { if (!existingNodeMap) {
events.forEach(event => { events.forEach((event) => {
if (!event.id) return; if (!event.id) return;
const node = createNetworkNode(event); const node = createNetworkNode(event);
nodeMap.set(event.id, node); nodeMap.set(event.id, node);
@ -118,16 +125,16 @@ export function createStarNetworks(
nodeMap, nodeMap,
links: [], links: [],
eventMap, eventMap,
referencedIds: new Set<string>() referencedIds: new Set<string>(),
}; };
// Find all index events and non-publication events // Find all index events and non-publication events
const publicationKinds = [wikiKind, indexKind, ...zettelKinds]; const publicationKinds = [wikiKind, indexKind, ...zettelKinds];
const indexEvents = events.filter(event => event.kind === indexKind); const indexEvents = events.filter((event) => event.kind === indexKind);
const nonPublicationEvents = events.filter(event => const nonPublicationEvents = events.filter((event) =>
event.kind !== undefined && !publicationKinds.includes(event.kind) event.kind !== undefined && !publicationKinds.includes(event.kind)
); );
debug("Found index events", { count: indexEvents.length }); debug("Found index events", { count: indexEvents.length });
debug("Found non-publication events", { count: nonPublicationEvents.length }); debug("Found non-publication events", { count: nonPublicationEvents.length });
@ -135,34 +142,34 @@ export function createStarNetworks(
const processedIndices = new Set<string>(); const processedIndices = new Set<string>();
// Process all index events regardless of level // Process all index events regardless of level
indexEvents.forEach(indexEvent => { indexEvents.forEach((indexEvent) => {
if (!indexEvent.id || processedIndices.has(indexEvent.id)) return; if (!indexEvent.id || processedIndices.has(indexEvent.id)) return;
const star = createStarNetwork(indexEvent, state, 0); const star = createStarNetwork(indexEvent, state, 0);
if (star && star.peripheralNodes.length > 0) { if (star && star.peripheralNodes.length > 0) {
starNetworks.push(star); starNetworks.push(star);
processedIndices.add(indexEvent.id); processedIndices.add(indexEvent.id);
debug("Created star network", { debug("Created star network", {
centerId: star.center.id, centerId: star.center.id,
peripheralCount: star.peripheralNodes.length peripheralCount: star.peripheralNodes.length,
}); });
} }
}); });
// Add non-publication events as standalone nodes (stars with no peripherals) // Add non-publication events as standalone nodes (stars with no peripherals)
nonPublicationEvents.forEach(event => { nonPublicationEvents.forEach((event) => {
if (!event.id || !nodeMap.has(event.id)) return; if (!event.id || !nodeMap.has(event.id)) return;
const node = nodeMap.get(event.id)!; const node = nodeMap.get(event.id)!;
const star: StarNetwork = { const star: StarNetwork = {
center: node, center: node,
peripheralNodes: [], peripheralNodes: [],
links: [] links: [],
}; };
starNetworks.push(star); starNetworks.push(star);
debug("Created standalone star for non-publication event", { debug("Created standalone star for non-publication event", {
eventId: event.id, eventId: event.id,
kind: event.kind kind: event.kind,
}); });
}); });
@ -171,36 +178,40 @@ export function createStarNetworks(
/** /**
* Creates inter-star connections between star networks * Creates inter-star connections between star networks
* *
* @param starNetworks - Array of star networks * @param starNetworks - Array of star networks
* @returns Additional links connecting different star networks * @returns Additional links connecting different star networks
*/ */
export function createInterStarConnections(starNetworks: StarNetwork[]): NetworkLink[] { export function createInterStarConnections(
starNetworks: StarNetwork[],
): NetworkLink[] {
debug("Creating inter-star connections", { starCount: starNetworks.length }); debug("Creating inter-star connections", { starCount: starNetworks.length });
const interStarLinks: NetworkLink[] = []; const interStarLinks: NetworkLink[] = [];
// Create a map of center nodes for quick lookup // Create a map of center nodes for quick lookup
const centerNodeMap = new Map<string, NetworkNode>(); const centerNodeMap = new Map<string, NetworkNode>();
starNetworks.forEach(star => { starNetworks.forEach((star) => {
centerNodeMap.set(star.center.id, star.center); centerNodeMap.set(star.center.id, star.center);
}); });
// For each star, check if any of its peripheral nodes are centers of other stars // For each star, check if any of its peripheral nodes are centers of other stars
starNetworks.forEach(star => { starNetworks.forEach((star) => {
star.peripheralNodes.forEach(peripheralNode => { star.peripheralNodes.forEach((peripheralNode) => {
// If this peripheral node is the center of another star, create an inter-star link // If this peripheral node is the center of another star, create an inter-star link
if (peripheralNode.isContainer && centerNodeMap.has(peripheralNode.id)) { if (peripheralNode.isContainer && centerNodeMap.has(peripheralNode.id)) {
const targetStar = starNetworks.find(s => s.center.id === peripheralNode.id); const targetStar = starNetworks.find((s) =>
s.center.id === peripheralNode.id
);
if (targetStar) { if (targetStar) {
interStarLinks.push({ interStarLinks.push({
source: star.center, source: star.center,
target: targetStar.center, target: targetStar.center,
isSequential: false isSequential: false,
}); });
debug("Created inter-star connection", { debug("Created inter-star connection", {
from: star.center.id, from: star.center.id,
to: targetStar.center.id to: targetStar.center.id,
}); });
} }
} }
@ -212,7 +223,7 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network
/** /**
* Applies star-specific positioning to nodes using a radial layout * Applies star-specific positioning to nodes using a radial layout
* *
* @param starNetworks - Array of star networks * @param starNetworks - Array of star networks
* @param width - Canvas width * @param width - Canvas width
* @param height - Canvas height * @param height - Canvas height
@ -220,61 +231,62 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network
export function applyStarLayout( export function applyStarLayout(
starNetworks: StarNetwork[], starNetworks: StarNetwork[],
width: number, width: number,
height: number height: number,
): void { ): void {
debug("Applying star layout", { debug("Applying star layout", {
starCount: starNetworks.length, starCount: starNetworks.length,
dimensions: { width, height } dimensions: { width, height },
}); });
const centerX = width / 2; const centerX = width / 2;
const centerY = height / 2; const centerY = height / 2;
// If only one star, center it // If only one star, center it
if (starNetworks.length === 1) { if (starNetworks.length === 1) {
const star = starNetworks[0]; const star = starNetworks[0];
// Position center node // Position center node
star.center.x = centerX; star.center.x = centerX;
star.center.y = centerY; star.center.y = centerY;
star.center.fx = centerX; // Fix center position star.center.fx = centerX; // Fix center position
star.center.fy = centerY; star.center.fy = centerY;
// Position peripheral nodes in a circle around center // Position peripheral nodes in a circle around center
const radius = Math.min(width, height) * 0.25; const radius = Math.min(width, height) * 0.25;
const angleStep = (2 * Math.PI) / star.peripheralNodes.length; const angleStep = (2 * Math.PI) / star.peripheralNodes.length;
star.peripheralNodes.forEach((node, index) => { star.peripheralNodes.forEach((node, index) => {
const angle = index * angleStep; const angle = index * angleStep;
node.x = centerX + radius * Math.cos(angle); node.x = centerX + radius * Math.cos(angle);
node.y = centerY + radius * Math.sin(angle); node.y = centerY + radius * Math.sin(angle);
}); });
return; return;
} }
// For multiple stars, arrange them in a grid or circle // For multiple stars, arrange them in a grid or circle
const starsPerRow = Math.ceil(Math.sqrt(starNetworks.length)); const starsPerRow = Math.ceil(Math.sqrt(starNetworks.length));
const starSpacingX = width / (starsPerRow + 1); const starSpacingX = width / (starsPerRow + 1);
const starSpacingY = height / (Math.ceil(starNetworks.length / starsPerRow) + 1); const starSpacingY = height /
(Math.ceil(starNetworks.length / starsPerRow) + 1);
starNetworks.forEach((star, index) => { starNetworks.forEach((star, index) => {
const row = Math.floor(index / starsPerRow); const row = Math.floor(index / starsPerRow);
const col = index % starsPerRow; const col = index % starsPerRow;
const starCenterX = (col + 1) * starSpacingX; const starCenterX = (col + 1) * starSpacingX;
const starCenterY = (row + 1) * starSpacingY; const starCenterY = (row + 1) * starSpacingY;
// Position center node // Position center node
star.center.x = starCenterX; star.center.x = starCenterX;
star.center.y = starCenterY; star.center.y = starCenterY;
star.center.fx = starCenterX; // Fix center position star.center.fx = starCenterX; // Fix center position
star.center.fy = starCenterY; star.center.fy = starCenterY;
// Position peripheral nodes around this star's center // Position peripheral nodes around this star's center
const radius = Math.min(starSpacingX, starSpacingY) * 0.3; const radius = Math.min(starSpacingX, starSpacingY) * 0.3;
const angleStep = (2 * Math.PI) / Math.max(star.peripheralNodes.length, 1); const angleStep = (2 * Math.PI) / Math.max(star.peripheralNodes.length, 1);
star.peripheralNodes.forEach((node, nodeIndex) => { star.peripheralNodes.forEach((node, nodeIndex) => {
const angle = nodeIndex * angleStep; const angle = nodeIndex * angleStep;
node.x = starCenterX + radius * Math.cos(angle); node.x = starCenterX + radius * Math.cos(angle);
@ -285,69 +297,69 @@ export function applyStarLayout(
/** /**
* Generates a complete star network graph from events * Generates a complete star network graph from events
* *
* @param events - Array of Nostr events * @param events - Array of Nostr events
* @param maxLevel - Maximum hierarchy level to process * @param maxLevel - Maximum hierarchy level to process
* @returns Complete graph data with star network layout * @returns Complete graph data with star network layout
*/ */
export function generateStarGraph( export function generateStarGraph(
events: NDKEvent[], events: NDKEvent[],
maxLevel: number maxLevel: number,
): GraphData { ): GraphData {
debug("Generating star graph", { eventCount: events.length, maxLevel }); debug("Generating star graph", { eventCount: events.length, maxLevel });
// Guard against empty events // Guard against empty events
if (!events || events.length === 0) { if (!events || events.length === 0) {
return { nodes: [], links: [] }; return { nodes: [], links: [] };
} }
// Initialize all nodes first // Initialize all nodes first
const nodeMap = new Map<string, NetworkNode>(); const nodeMap = new Map<string, NetworkNode>();
events.forEach(event => { events.forEach((event) => {
if (!event.id) return; if (!event.id) return;
const node = createNetworkNode(event); const node = createNetworkNode(event);
nodeMap.set(event.id, node); nodeMap.set(event.id, node);
}); });
// Create star networks with the existing node map // Create star networks with the existing node map
const starNetworks = createStarNetworks(events, maxLevel, nodeMap); const starNetworks = createStarNetworks(events, maxLevel, nodeMap);
// Create inter-star connections // Create inter-star connections
const interStarLinks = createInterStarConnections(starNetworks); const interStarLinks = createInterStarConnections(starNetworks);
// Collect nodes that are part of stars // Collect nodes that are part of stars
const nodesInStars = new Set<string>(); const nodesInStars = new Set<string>();
const allLinks: NetworkLink[] = []; const allLinks: NetworkLink[] = [];
// Add nodes and links from all stars // Add nodes and links from all stars
starNetworks.forEach(star => { starNetworks.forEach((star) => {
nodesInStars.add(star.center.id); nodesInStars.add(star.center.id);
star.peripheralNodes.forEach(node => { star.peripheralNodes.forEach((node) => {
nodesInStars.add(node.id); nodesInStars.add(node.id);
}); });
allLinks.push(...star.links); allLinks.push(...star.links);
}); });
// Add inter-star links // Add inter-star links
allLinks.push(...interStarLinks); allLinks.push(...interStarLinks);
// Include orphaned nodes (those not in any star) // Include orphaned nodes (those not in any star)
const allNodes: NetworkNode[] = []; const allNodes: NetworkNode[] = [];
nodeMap.forEach((node, id) => { nodeMap.forEach((node, id) => {
allNodes.push(node); allNodes.push(node);
}); });
const result = { const result = {
nodes: allNodes, nodes: allNodes,
links: allLinks links: allLinks,
}; };
debug("Star graph generation complete", { debug("Star graph generation complete", {
nodeCount: result.nodes.length, nodeCount: result.nodes.length,
linkCount: result.links.length, linkCount: result.links.length,
starCount: starNetworks.length, starCount: starNetworks.length,
orphanedNodes: allNodes.length - nodesInStars.size orphanedNodes: allNodes.length - nodesInStars.size,
}); });
return result; return result;
} }

29
src/lib/navigator/EventNetwork/utils/tagNetworkBuilder.ts

@ -6,9 +6,9 @@
*/ */
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData } from "../types"; import type { GraphData, NetworkLink, NetworkNode } from "../types";
import { getDisplayNameSync } from "$lib/utils/profileCache"; import { getDisplayNameSync } from "$lib/utils/profileCache";
import { SeededRandom, createDebugFunction } from "./common"; import { createDebugFunction, SeededRandom } from "./common";
// Configuration // Configuration
const TAG_ANCHOR_RADIUS = 15; const TAG_ANCHOR_RADIUS = 15;
@ -18,7 +18,6 @@ const TAG_ANCHOR_PLACEMENT_RADIUS = 1250; // Radius from center within which to
// Debug function // Debug function
const debug = createDebugFunction("TagNetworkBuilder"); const debug = createDebugFunction("TagNetworkBuilder");
/** /**
* Creates a deterministic seed from a string * Creates a deterministic seed from a string
*/ */
@ -63,7 +62,10 @@ export function extractUniqueTagsForType(
): Map<string, Set<string>> { ): Map<string, Set<string>> {
// Map of tagValue -> Set of event IDs // Map of tagValue -> Set of event IDs
const tagMap = new Map<string, Set<string>>(); const tagMap = new Map<string, Set<string>>();
debug("Extracting unique tags for type", { tagType, eventCount: events.length }); debug("Extracting unique tags for type", {
tagType,
eventCount: events.length,
});
events.forEach((event) => { events.forEach((event) => {
if (!event.tags || !event.id) return; if (!event.tags || !event.id) return;
@ -83,7 +85,7 @@ export function extractUniqueTagsForType(
tagMap.get(tagValue)!.add(event.id); tagMap.get(tagValue)!.add(event.id);
}); });
}); });
debug("Extracted tags", { tagCount: tagMap.size }); debug("Extracted tags", { tagCount: tagMap.size });
return tagMap; return tagMap;
@ -110,7 +112,7 @@ export function createTagAnchorNodes(
); );
if (validTags.length === 0) return []; if (validTags.length === 0) return [];
// Sort all tags by number of connections (events) descending // Sort all tags by number of connections (events) descending
validTags.sort((a, b) => b[1].size - a[1].size); validTags.sort((a, b) => b[1].size - a[1].size);
@ -172,8 +174,11 @@ export function createTagLinks(
tagAnchors: NetworkNode[], tagAnchors: NetworkNode[],
nodes: NetworkNode[], nodes: NetworkNode[],
): NetworkLink[] { ): NetworkLink[] {
debug("Creating tag links", { anchorCount: tagAnchors.length, nodeCount: nodes.length }); debug("Creating tag links", {
anchorCount: tagAnchors.length,
nodeCount: nodes.length,
});
const links: NetworkLink[] = []; const links: NetworkLink[] = [];
const nodeMap = new Map(nodes.map((n) => [n.id, n])); const nodeMap = new Map(nodes.map((n) => [n.id, n]));
@ -208,13 +213,13 @@ export function enhanceGraphWithTags(
displayLimit?: number, displayLimit?: number,
): GraphData { ): GraphData {
debug("Enhancing graph with tags", { tagType, displayLimit }); debug("Enhancing graph with tags", { tagType, displayLimit });
// Extract unique tags for the specified type // Extract unique tags for the specified type
const tagMap = extractUniqueTagsForType(events, tagType); const tagMap = extractUniqueTagsForType(events, tagType);
// Create tag anchor nodes // Create tag anchor nodes
let tagAnchors = createTagAnchorNodes(tagMap, tagType, width, height); let tagAnchors = createTagAnchorNodes(tagMap, tagType, width, height);
// Apply display limit if provided // Apply display limit if provided
if (displayLimit && displayLimit > 0 && tagAnchors.length > displayLimit) { if (displayLimit && displayLimit > 0 && tagAnchors.length > displayLimit) {
// Sort by connection count (already done in createTagAnchorNodes) // Sort by connection count (already done in createTagAnchorNodes)
@ -242,7 +247,7 @@ export function enhanceGraphWithTags(
export function applyTagGravity( export function applyTagGravity(
nodes: NetworkNode[], nodes: NetworkNode[],
nodeToAnchors: Map<string, NetworkNode[]>, nodeToAnchors: Map<string, NetworkNode[]>,
alpha: number alpha: number,
): void { ): void {
nodes.forEach((node) => { nodes.forEach((node) => {
if (node.isTagAnchor) return; // Tag anchors don't move if (node.isTagAnchor) return; // Tag anchors don't move
@ -301,7 +306,7 @@ export function createTagGravityForce(
}); });
debug("Creating tag gravity force"); debug("Creating tag gravity force");
function force(alpha: number) { function force(alpha: number) {
applyTagGravity(nodes, nodeToAnchors, alpha); applyTagGravity(nodes, nodeToAnchors, alpha);
} }

298
src/lib/ndk.ts

@ -1,27 +1,27 @@
import NDK, { import NDK, {
NDKEvent,
NDKNip07Signer, NDKNip07Signer,
NDKRelay, NDKRelay,
NDKRelayAuthPolicies, NDKRelayAuthPolicies,
NDKRelaySet, NDKRelaySet,
NDKUser, NDKUser,
NDKEvent,
} from "@nostr-dev-kit/ndk"; } from "@nostr-dev-kit/ndk";
import { writable, get, type Writable } from "svelte/store"; import { get, type Writable, writable } from "svelte/store";
import { import { anonymousRelays, loginStorageKey } from "./consts.ts";
loginStorageKey,
anonymousRelays,
} from "./consts.ts";
import { import {
buildCompleteRelaySet, buildCompleteRelaySet,
testRelayConnection,
deduplicateRelayUrls, deduplicateRelayUrls,
testRelayConnection,
} from "./utils/relay_management.ts"; } from "./utils/relay_management.ts";
// Re-export testRelayConnection for components that need it // Re-export testRelayConnection for components that need it
export { testRelayConnection }; export { testRelayConnection };
import { userStore } from "./stores/userStore.ts"; import { userStore } from "./stores/userStore.ts";
import { userPubkey } from "./stores/authStore.Svelte.ts"; import { userPubkey } from "./stores/authStore.Svelte.ts";
import { startNetworkStatusMonitoring, stopNetworkStatusMonitoring } from "./stores/networkStore.ts"; import {
startNetworkStatusMonitoring,
stopNetworkStatusMonitoring,
} from "./stores/networkStore.ts";
import { WebSocketPool } from "./data_structures/websocket_pool.ts"; import { WebSocketPool } from "./data_structures/websocket_pool.ts";
export const ndkInstance: Writable<NDK> = writable(); export const ndkInstance: Writable<NDK> = writable();
@ -35,34 +35,39 @@ export const activeInboxRelays = writable<string[]>([]);
export const activeOutboxRelays = writable<string[]>([]); export const activeOutboxRelays = writable<string[]>([]);
// AI-NOTE: 2025-01-08 - Persistent relay storage to avoid recalculation // AI-NOTE: 2025-01-08 - Persistent relay storage to avoid recalculation
let persistentRelaySet: { inboxRelays: string[]; outboxRelays: string[] } | null = null; let persistentRelaySet:
| { inboxRelays: string[]; outboxRelays: string[] }
| null = null;
let relaySetLastUpdated: number = 0; let relaySetLastUpdated: number = 0;
const RELAY_SET_CACHE_DURATION = 5 * 60 * 1000; // 5 minutes const RELAY_SET_CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
const RELAY_SET_STORAGE_KEY = 'alexandria/relay_set_cache'; const RELAY_SET_STORAGE_KEY = "alexandria/relay_set_cache";
/** /**
* Load persistent relay set from localStorage * Load persistent relay set from localStorage
*/ */
function loadPersistentRelaySet(): { relaySet: { inboxRelays: string[]; outboxRelays: string[] } | null; lastUpdated: number } { function loadPersistentRelaySet(): {
relaySet: { inboxRelays: string[]; outboxRelays: string[] } | null;
lastUpdated: number;
} {
// Only load from localStorage on client-side // Only load from localStorage on client-side
if (typeof window === 'undefined') return { relaySet: null, lastUpdated: 0 }; if (typeof window === "undefined") return { relaySet: null, lastUpdated: 0 };
try { try {
const stored = localStorage.getItem(RELAY_SET_STORAGE_KEY); const stored = localStorage.getItem(RELAY_SET_STORAGE_KEY);
if (!stored) return { relaySet: null, lastUpdated: 0 }; if (!stored) return { relaySet: null, lastUpdated: 0 };
const data = JSON.parse(stored); const data = JSON.parse(stored);
const now = Date.now(); const now = Date.now();
// Check if cache is expired // Check if cache is expired
if (now - data.timestamp > RELAY_SET_CACHE_DURATION) { if (now - data.timestamp > RELAY_SET_CACHE_DURATION) {
localStorage.removeItem(RELAY_SET_STORAGE_KEY); localStorage.removeItem(RELAY_SET_STORAGE_KEY);
return { relaySet: null, lastUpdated: 0 }; return { relaySet: null, lastUpdated: 0 };
} }
return { relaySet: data.relaySet, lastUpdated: data.timestamp }; return { relaySet: data.relaySet, lastUpdated: data.timestamp };
} catch (error) { } catch (error) {
console.warn('[NDK.ts] Failed to load persistent relay set:', error); console.warn("[NDK.ts] Failed to load persistent relay set:", error);
localStorage.removeItem(RELAY_SET_STORAGE_KEY); localStorage.removeItem(RELAY_SET_STORAGE_KEY);
return { relaySet: null, lastUpdated: 0 }; return { relaySet: null, lastUpdated: 0 };
} }
@ -71,18 +76,20 @@ function loadPersistentRelaySet(): { relaySet: { inboxRelays: string[]; outboxRe
/** /**
* Save persistent relay set to localStorage * Save persistent relay set to localStorage
*/ */
function savePersistentRelaySet(relaySet: { inboxRelays: string[]; outboxRelays: string[] }): void { function savePersistentRelaySet(
relaySet: { inboxRelays: string[]; outboxRelays: string[] },
): void {
// Only save to localStorage on client-side // Only save to localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
try { try {
const data = { const data = {
relaySet, relaySet,
timestamp: Date.now() timestamp: Date.now(),
}; };
localStorage.setItem(RELAY_SET_STORAGE_KEY, JSON.stringify(data)); localStorage.setItem(RELAY_SET_STORAGE_KEY, JSON.stringify(data));
} catch (error) { } catch (error) {
console.warn('[NDK.ts] Failed to save persistent relay set:', error); console.warn("[NDK.ts] Failed to save persistent relay set:", error);
} }
} }
@ -91,12 +98,12 @@ function savePersistentRelaySet(relaySet: { inboxRelays: string[]; outboxRelays:
*/ */
function clearPersistentRelaySet(): void { function clearPersistentRelaySet(): void {
// Only clear from localStorage on client-side // Only clear from localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
try { try {
localStorage.removeItem(RELAY_SET_STORAGE_KEY); localStorage.removeItem(RELAY_SET_STORAGE_KEY);
} catch (error) { } catch (error) {
console.warn('[NDK.ts] Failed to clear persistent relay set:', error); console.warn("[NDK.ts] Failed to clear persistent relay set:", error);
} }
} }
@ -230,8 +237,7 @@ class CustomRelayAuthPolicy {
export function checkEnvironmentForWebSocketDowngrade(): void { export function checkEnvironmentForWebSocketDowngrade(): void {
console.debug("[NDK.ts] Environment Check for WebSocket Protocol:"); console.debug("[NDK.ts] Environment Check for WebSocket Protocol:");
const isLocalhost = const isLocalhost = globalThis.location.hostname === "localhost" ||
globalThis.location.hostname === "localhost" ||
globalThis.location.hostname === "127.0.0.1"; globalThis.location.hostname === "127.0.0.1";
const isHttp = globalThis.location.protocol === "http:"; const isHttp = globalThis.location.protocol === "http:";
const isHttps = globalThis.location.protocol === "https:"; const isHttps = globalThis.location.protocol === "https:";
@ -281,8 +287,6 @@ export function checkWebSocketSupport(): void {
} }
} }
/** /**
* Gets the user's pubkey from local storage, if it exists. * Gets the user's pubkey from local storage, if it exists.
* @returns The user's pubkey, or null if there is no logged-in user. * @returns The user's pubkey, or null if there is no logged-in user.
@ -291,8 +295,8 @@ export function checkWebSocketSupport(): void {
*/ */
export function getPersistedLogin(): string | null { export function getPersistedLogin(): string | null {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return null; if (typeof window === "undefined") return null;
const pubkey = localStorage.getItem(loginStorageKey); const pubkey = localStorage.getItem(loginStorageKey);
return pubkey; return pubkey;
} }
@ -305,8 +309,8 @@ export function getPersistedLogin(): string | null {
*/ */
export function persistLogin(user: NDKUser): void { export function persistLogin(user: NDKUser): void {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
localStorage.setItem(loginStorageKey, user.pubkey); localStorage.setItem(loginStorageKey, user.pubkey);
} }
@ -316,8 +320,8 @@ export function persistLogin(user: NDKUser): void {
*/ */
export function clearLogin(): void { export function clearLogin(): void {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
localStorage.removeItem(loginStorageKey); localStorage.removeItem(loginStorageKey);
} }
@ -333,8 +337,8 @@ function getRelayStorageKey(user: NDKUser, type: "inbox" | "outbox"): string {
export function clearPersistedRelays(user: NDKUser): void { export function clearPersistedRelays(user: NDKUser): void {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
localStorage.removeItem(getRelayStorageKey(user, "inbox")); localStorage.removeItem(getRelayStorageKey(user, "inbox"));
localStorage.removeItem(getRelayStorageKey(user, "outbox")); localStorage.removeItem(getRelayStorageKey(user, "outbox"));
} }
@ -346,11 +350,11 @@ export function clearPersistedRelays(user: NDKUser): void {
*/ */
function ensureSecureWebSocket(url: string): string { function ensureSecureWebSocket(url: string): string {
// For localhost, always use ws:// (never wss://) // For localhost, always use ws:// (never wss://)
if (url.includes('localhost') || url.includes('127.0.0.1')) { if (url.includes("localhost") || url.includes("127.0.0.1")) {
// Convert any wss://localhost to ws://localhost // Convert any wss://localhost to ws://localhost
return url.replace(/^wss:\/\//, "ws://"); return url.replace(/^wss:\/\//, "ws://");
} }
// Replace ws:// with wss:// for remote relays // Replace ws:// with wss:// for remote relays
const secureUrl = url.replace(/^ws:\/\//, "wss://"); const secureUrl = url.replace(/^ws:\/\//, "wss://");
@ -369,7 +373,7 @@ function ensureSecureWebSocket(url: string): string {
function createRelayWithAuth(url: string, ndk: NDK): NDKRelay { function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
try { try {
// Reduce verbosity in development - only log relay creation if debug mode is enabled // Reduce verbosity in development - only log relay creation if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) { if (process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS) {
console.debug(`[NDK.ts] Creating relay with URL: ${url}`); console.debug(`[NDK.ts] Creating relay with URL: ${url}`);
} }
@ -387,7 +391,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
const connectionTimeout = setTimeout(() => { const connectionTimeout = setTimeout(() => {
try { try {
// Only log connection timeouts if debug mode is enabled // Only log connection timeouts if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) { if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Connection timeout for ${secureUrl}`); console.debug(`[NDK.ts] Connection timeout for ${secureUrl}`);
} }
relay.disconnect(); relay.disconnect();
@ -402,7 +408,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
relay.on("connect", () => { relay.on("connect", () => {
try { try {
// Only log successful connections if debug mode is enabled // Only log successful connections if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) { if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Relay connected: ${secureUrl}`); console.debug(`[NDK.ts] Relay connected: ${secureUrl}`);
} }
clearTimeout(connectionTimeout); clearTimeout(connectionTimeout);
@ -415,7 +423,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
relay.on("connect", () => { relay.on("connect", () => {
try { try {
// Only log successful connections if debug mode is enabled // Only log successful connections if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) { if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Relay connected: ${secureUrl}`); console.debug(`[NDK.ts] Relay connected: ${secureUrl}`);
} }
clearTimeout(connectionTimeout); clearTimeout(connectionTimeout);
@ -438,46 +448,66 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
return relay; return relay;
} catch (error) { } catch (error) {
// If relay creation fails, try to use an anonymous relay as fallback // If relay creation fails, try to use an anonymous relay as fallback
console.debug(`[NDK.ts] Failed to create relay for ${url}, trying anonymous relay fallback`); console.debug(
`[NDK.ts] Failed to create relay for ${url}, trying anonymous relay fallback`,
);
// Find an anonymous relay that's not the same as the failed URL // Find an anonymous relay that's not the same as the failed URL
const fallbackUrl = anonymousRelays.find(relay => relay !== url) || anonymousRelays[0]; const fallbackUrl = anonymousRelays.find((relay) => relay !== url) ||
anonymousRelays[0];
if (fallbackUrl) { if (fallbackUrl) {
console.debug(`[NDK.ts] Using anonymous relay as fallback: ${fallbackUrl}`); console.debug(
`[NDK.ts] Using anonymous relay as fallback: ${fallbackUrl}`,
);
try { try {
const fallbackRelay = new NDKRelay(fallbackUrl, NDKRelayAuthPolicies.signIn({ ndk }), ndk); const fallbackRelay = new NDKRelay(
fallbackUrl,
NDKRelayAuthPolicies.signIn({ ndk }),
ndk,
);
return fallbackRelay; return fallbackRelay;
} catch (fallbackError) { } catch (fallbackError) {
console.debug(`[NDK.ts] Fallback relay creation also failed: ${fallbackError}`); console.debug(
`[NDK.ts] Fallback relay creation also failed: ${fallbackError}`,
);
} }
} }
// If all else fails, create a minimal relay that will fail gracefully // If all else fails, create a minimal relay that will fail gracefully
console.debug(`[NDK.ts] All fallback attempts failed, creating minimal relay for ${url}`); console.debug(
`[NDK.ts] All fallback attempts failed, creating minimal relay for ${url}`,
);
const minimalRelay = new NDKRelay(url, undefined, ndk); const minimalRelay = new NDKRelay(url, undefined, ndk);
return minimalRelay; return minimalRelay;
} }
} }
/** /**
* Gets the active relay set for the current user * Gets the active relay set for the current user
* @param ndk NDK instance * @param ndk NDK instance
* @returns Promise that resolves to object with inbox and outbox relay arrays * @returns Promise that resolves to object with inbox and outbox relay arrays
*/ */
export async function getActiveRelaySet(ndk: NDK): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> { export async function getActiveRelaySet(
ndk: NDK,
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> {
const user = get(userStore); const user = get(userStore);
console.debug('[NDK.ts] getActiveRelaySet: User state:', { signedIn: user.signedIn, hasNdkUser: !!user.ndkUser, pubkey: user.pubkey }); console.debug("[NDK.ts] getActiveRelaySet: User state:", {
signedIn: user.signedIn,
hasNdkUser: !!user.ndkUser,
pubkey: user.pubkey,
});
if (user.signedIn && user.ndkUser) { if (user.signedIn && user.ndkUser) {
console.debug('[NDK.ts] getActiveRelaySet: Building relay set for authenticated user:', user.ndkUser.pubkey); console.debug(
"[NDK.ts] getActiveRelaySet: Building relay set for authenticated user:",
user.ndkUser.pubkey,
);
return await buildCompleteRelaySet(ndk, user.ndkUser); return await buildCompleteRelaySet(ndk, user.ndkUser);
} else { } else {
console.debug('[NDK.ts] getActiveRelaySet: Building relay set for anonymous user'); console.debug(
"[NDK.ts] getActiveRelaySet: Building relay set for anonymous user",
);
return await buildCompleteRelaySet(ndk, null); return await buildCompleteRelaySet(ndk, null);
} }
} }
@ -487,61 +517,88 @@ export async function getActiveRelaySet(ndk: NDK): Promise<{ inboxRelays: string
* @param ndk NDK instance * @param ndk NDK instance
* @param forceUpdate Force update even if cached (default: false) * @param forceUpdate Force update even if cached (default: false)
*/ */
export async function updateActiveRelayStores(ndk: NDK, forceUpdate: boolean = false): Promise<void> { export async function updateActiveRelayStores(
ndk: NDK,
forceUpdate: boolean = false,
): Promise<void> {
try { try {
// AI-NOTE: 2025-01-08 - Use persistent relay set to avoid recalculation // AI-NOTE: 2025-01-08 - Use persistent relay set to avoid recalculation
const now = Date.now(); const now = Date.now();
const cacheExpired = now - relaySetLastUpdated > RELAY_SET_CACHE_DURATION; const cacheExpired = now - relaySetLastUpdated > RELAY_SET_CACHE_DURATION;
// Load from persistent storage if not already loaded // Load from persistent storage if not already loaded
if (!persistentRelaySet) { if (!persistentRelaySet) {
const loaded = loadPersistentRelaySet(); const loaded = loadPersistentRelaySet();
persistentRelaySet = loaded.relaySet; persistentRelaySet = loaded.relaySet;
relaySetLastUpdated = loaded.lastUpdated; relaySetLastUpdated = loaded.lastUpdated;
} }
if (!forceUpdate && persistentRelaySet && !cacheExpired) { if (!forceUpdate && persistentRelaySet && !cacheExpired) {
console.debug('[NDK.ts] updateActiveRelayStores: Using cached relay set'); console.debug("[NDK.ts] updateActiveRelayStores: Using cached relay set");
activeInboxRelays.set(persistentRelaySet.inboxRelays); activeInboxRelays.set(persistentRelaySet.inboxRelays);
activeOutboxRelays.set(persistentRelaySet.outboxRelays); activeOutboxRelays.set(persistentRelaySet.outboxRelays);
return; return;
} }
console.debug('[NDK.ts] updateActiveRelayStores: Starting relay store update'); console.debug(
"[NDK.ts] updateActiveRelayStores: Starting relay store update",
);
// Get the active relay set from the relay management system // Get the active relay set from the relay management system
const relaySet = await getActiveRelaySet(ndk); const relaySet = await getActiveRelaySet(ndk);
console.debug('[NDK.ts] updateActiveRelayStores: Got relay set:', relaySet); console.debug("[NDK.ts] updateActiveRelayStores: Got relay set:", relaySet);
// Cache the relay set // Cache the relay set
persistentRelaySet = relaySet; persistentRelaySet = relaySet;
relaySetLastUpdated = now; relaySetLastUpdated = now;
savePersistentRelaySet(relaySet); // Save to persistent storage savePersistentRelaySet(relaySet); // Save to persistent storage
// Update the stores with the new relay configuration // Update the stores with the new relay configuration
activeInboxRelays.set(relaySet.inboxRelays); activeInboxRelays.set(relaySet.inboxRelays);
activeOutboxRelays.set(relaySet.outboxRelays); activeOutboxRelays.set(relaySet.outboxRelays);
console.debug('[NDK.ts] updateActiveRelayStores: Updated stores with inbox:', relaySet.inboxRelays.length, 'outbox:', relaySet.outboxRelays.length); console.debug(
"[NDK.ts] updateActiveRelayStores: Updated stores with inbox:",
relaySet.inboxRelays.length,
"outbox:",
relaySet.outboxRelays.length,
);
// Add relays to NDK pool (deduplicated) // Add relays to NDK pool (deduplicated)
const allRelayUrls = deduplicateRelayUrls([...relaySet.inboxRelays, ...relaySet.outboxRelays]); const allRelayUrls = deduplicateRelayUrls([
...relaySet.inboxRelays,
...relaySet.outboxRelays,
]);
// Reduce verbosity in development - only log relay addition if debug mode is enabled // Reduce verbosity in development - only log relay addition if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) { if (process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS) {
console.debug('[NDK.ts] updateActiveRelayStores: Adding', allRelayUrls.length, 'relays to NDK pool'); console.debug(
"[NDK.ts] updateActiveRelayStores: Adding",
allRelayUrls.length,
"relays to NDK pool",
);
} }
for (const url of allRelayUrls) { for (const url of allRelayUrls) {
try { try {
const relay = createRelayWithAuth(url, ndk); const relay = createRelayWithAuth(url, ndk);
ndk.pool?.addRelay(relay); ndk.pool?.addRelay(relay);
} catch (error) { } catch (error) {
console.debug('[NDK.ts] updateActiveRelayStores: Failed to add relay', url, ':', error); console.debug(
"[NDK.ts] updateActiveRelayStores: Failed to add relay",
url,
":",
error,
);
} }
} }
console.debug('[NDK.ts] updateActiveRelayStores: Relay store update completed'); console.debug(
"[NDK.ts] updateActiveRelayStores: Relay store update completed",
);
} catch (error) { } catch (error) {
console.warn('[NDK.ts] updateActiveRelayStores: Error updating relay stores:', error); console.warn(
"[NDK.ts] updateActiveRelayStores: Error updating relay stores:",
error,
);
} }
} }
@ -551,23 +608,25 @@ export async function updateActiveRelayStores(ndk: NDK, forceUpdate: boolean = f
export function logCurrentRelayConfiguration(): void { export function logCurrentRelayConfiguration(): void {
const inboxRelays = get(activeInboxRelays); const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays); const outboxRelays = get(activeOutboxRelays);
console.log('🔌 Current Relay Configuration:'); console.log("🔌 Current Relay Configuration:");
console.log('📥 Inbox Relays:', inboxRelays); console.log("📥 Inbox Relays:", inboxRelays);
console.log('📤 Outbox Relays:', outboxRelays); console.log("📤 Outbox Relays:", outboxRelays);
console.log(`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`); console.log(
`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`,
);
} }
/** /**
* Clears the relay set cache to force a rebuild * Clears the relay set cache to force a rebuild
*/ */
export function clearRelaySetCache(): void { export function clearRelaySetCache(): void {
console.debug('[NDK.ts] Clearing relay set cache'); console.debug("[NDK.ts] Clearing relay set cache");
persistentRelaySet = null; persistentRelaySet = null;
relaySetLastUpdated = 0; relaySetLastUpdated = 0;
// Clear from localStorage as well (client-side only) // Clear from localStorage as well (client-side only)
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
localStorage.removeItem('alexandria/relay_set_cache'); localStorage.removeItem("alexandria/relay_set_cache");
} }
} }
@ -576,7 +635,7 @@ export function clearRelaySetCache(): void {
* @param ndk NDK instance * @param ndk NDK instance
*/ */
export async function refreshRelayStores(ndk: NDK): Promise<void> { export async function refreshRelayStores(ndk: NDK): Promise<void> {
console.debug('[NDK.ts] Refreshing relay stores due to user state change'); console.debug("[NDK.ts] Refreshing relay stores due to user state change");
clearRelaySetCache(); // Clear cache when user state changes clearRelaySetCache(); // Clear cache when user state changes
await updateActiveRelayStores(ndk, true); // Force update await updateActiveRelayStores(ndk, true); // Force update
} }
@ -585,8 +644,12 @@ export async function refreshRelayStores(ndk: NDK): Promise<void> {
* Updates relay stores when network condition changes * Updates relay stores when network condition changes
* @param ndk NDK instance * @param ndk NDK instance
*/ */
export async function refreshRelayStoresOnNetworkChange(ndk: NDK): Promise<void> { export async function refreshRelayStoresOnNetworkChange(
console.debug('[NDK.ts] Refreshing relay stores due to network condition change'); ndk: NDK,
): Promise<void> {
console.debug(
"[NDK.ts] Refreshing relay stores due to network condition change",
);
await updateActiveRelayStores(ndk); await updateActiveRelayStores(ndk);
} }
@ -606,10 +669,10 @@ export function startNetworkMonitoringForRelays(): void {
* @returns NDKRelaySet * @returns NDKRelaySet
*/ */
function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet { function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet {
const relays = relayUrls.map(url => const relays = relayUrls.map((url) =>
new NDKRelay(url, NDKRelayAuthPolicies.signIn({ ndk }), ndk) new NDKRelay(url, NDKRelayAuthPolicies.signIn({ ndk }), ndk)
); );
return new NDKRelaySet(new Set(relays), ndk); return new NDKRelaySet(new Set(relays), ndk);
} }
@ -621,11 +684,11 @@ function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet {
*/ */
export async function getActiveRelaySetAsNDKRelaySet( export async function getActiveRelaySetAsNDKRelaySet(
ndk: NDK, ndk: NDK,
useInbox: boolean = true useInbox: boolean = true,
): Promise<NDKRelaySet> { ): Promise<NDKRelaySet> {
const relaySet = await getActiveRelaySet(ndk); const relaySet = await getActiveRelaySet(ndk);
const urls = useInbox ? relaySet.inboxRelays : relaySet.outboxRelays; const urls = useInbox ? relaySet.inboxRelays : relaySet.outboxRelays;
return createRelaySetFromUrls(urls, ndk); return createRelaySetFromUrls(urls, ndk);
} }
@ -650,11 +713,11 @@ export function initNdk(): NDK {
const attemptConnection = async () => { const attemptConnection = async () => {
// Only attempt connection on client-side // Only attempt connection on client-side
if (typeof window === 'undefined') { if (typeof window === "undefined") {
console.debug("[NDK.ts] Skipping NDK connection during SSR"); console.debug("[NDK.ts] Skipping NDK connection during SSR");
return; return;
} }
try { try {
await ndk.connect(); await ndk.connect();
console.debug("[NDK.ts] NDK connected successfully"); console.debug("[NDK.ts] NDK connected successfully");
@ -664,17 +727,21 @@ export function initNdk(): NDK {
startNetworkMonitoringForRelays(); startNetworkMonitoringForRelays();
} catch (error) { } catch (error) {
console.warn("[NDK.ts] Failed to connect NDK:", error); console.warn("[NDK.ts] Failed to connect NDK:", error);
// Only retry a limited number of times // Only retry a limited number of times
if (retryCount < maxRetries) { if (retryCount < maxRetries) {
retryCount++; retryCount++;
console.debug(`[NDK.ts] Attempting to reconnect (${retryCount}/${maxRetries})...`); console.debug(
`[NDK.ts] Attempting to reconnect (${retryCount}/${maxRetries})...`,
);
// Use a more reasonable retry delay and prevent memory leaks // Use a more reasonable retry delay and prevent memory leaks
setTimeout(() => { setTimeout(() => {
attemptConnection(); attemptConnection();
}, 2000 * retryCount); // Exponential backoff }, 2000 * retryCount); // Exponential backoff
} else { } else {
console.warn("[NDK.ts] Max retries reached, continuing with limited functionality"); console.warn(
"[NDK.ts] Max retries reached, continuing with limited functionality",
);
// Still try to update relay stores even if connection failed // Still try to update relay stores even if connection failed
try { try {
await updateActiveRelayStores(ndk); await updateActiveRelayStores(ndk);
@ -687,21 +754,24 @@ export function initNdk(): NDK {
}; };
// Only attempt connection on client-side // Only attempt connection on client-side
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
attemptConnection(); attemptConnection();
} }
// AI-NOTE: Set up userStore subscription after NDK initialization to prevent initialization errors // AI-NOTE: Set up userStore subscription after NDK initialization to prevent initialization errors
userStore.subscribe(async (userState) => { userStore.subscribe(async (userState) => {
ndkSignedIn.set(userState.signedIn); ndkSignedIn.set(userState.signedIn);
// Refresh relay stores when user state changes // Refresh relay stores when user state changes
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
if (ndk) { if (ndk) {
try { try {
await refreshRelayStores(ndk); await refreshRelayStores(ndk);
} catch (error) { } catch (error) {
console.warn('[NDK.ts] Failed to refresh relay stores on user state change:', error); console.warn(
"[NDK.ts] Failed to refresh relay stores on user state change:",
error,
);
} }
} }
}); });
@ -715,7 +785,7 @@ export function initNdk(): NDK {
*/ */
export function cleanupNdk(): void { export function cleanupNdk(): void {
console.debug("[NDK.ts] Cleaning up NDK resources"); console.debug("[NDK.ts] Cleaning up NDK resources");
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
if (ndk) { if (ndk) {
try { try {
@ -725,13 +795,13 @@ export function cleanupNdk(): void {
relay.disconnect(); relay.disconnect();
} }
} }
// Drain the WebSocket pool // Drain the WebSocket pool
WebSocketPool.instance.drain(); WebSocketPool.instance.drain();
// Stop network monitoring // Stop network monitoring
stopNetworkStatusMonitoring(); stopNetworkStatusMonitoring();
console.debug("[NDK.ts] NDK cleanup completed"); console.debug("[NDK.ts] NDK cleanup completed");
} catch (error) { } catch (error) {
console.warn("[NDK.ts] Error during NDK cleanup:", error); console.warn("[NDK.ts] Error during NDK cleanup:", error);
@ -761,7 +831,7 @@ export async function loginWithExtension(
userPubkey.set(signerUser.pubkey); userPubkey.set(signerUser.pubkey);
const user = ndk.getUser({ pubkey: signerUser.pubkey }); const user = ndk.getUser({ pubkey: signerUser.pubkey });
// Update relay stores with the new system // Update relay stores with the new system
await updateActiveRelayStores(ndk); await updateActiveRelayStores(ndk);
@ -787,22 +857,20 @@ export function logout(user: NDKUser): void {
activePubkey.set(null); activePubkey.set(null);
userPubkey.set(null); userPubkey.set(null);
ndkSignedIn.set(false); ndkSignedIn.set(false);
// Clear relay stores // Clear relay stores
activeInboxRelays.set([]); activeInboxRelays.set([]);
activeOutboxRelays.set([]); activeOutboxRelays.set([]);
// AI-NOTE: 2025-01-08 - Clear persistent relay set on logout // AI-NOTE: 2025-01-08 - Clear persistent relay set on logout
persistentRelaySet = null; persistentRelaySet = null;
relaySetLastUpdated = 0; relaySetLastUpdated = 0;
clearPersistentRelaySet(); // Clear persistent storage clearPersistentRelaySet(); // Clear persistent storage
// Stop network monitoring // Stop network monitoring
stopNetworkStatusMonitoring(); stopNetworkStatusMonitoring();
// Re-initialize with anonymous instance // Re-initialize with anonymous instance
const newNdk = initNdk(); const newNdk = initNdk();
ndkInstance.set(newNdk); ndkInstance.set(newNdk);
} }

8
src/lib/parser.ts

@ -7,11 +7,11 @@ import type {
Block, Block,
Document, Document,
Extensions, Extensions,
Section,
ProcessorOptions, ProcessorOptions,
Section,
} from "asciidoctor"; } from "asciidoctor";
import he from "he"; import he from "he";
import { writable, type Writable } from "svelte/store"; import { type Writable, writable } from "svelte/store";
import { zettelKinds } from "./consts.ts"; import { zettelKinds } from "./consts.ts";
import { getMatchingTags } from "./utils/nostrUtils.ts"; import { getMatchingTags } from "./utils/nostrUtils.ts";
@ -906,13 +906,13 @@ export default class Pharos {
["#d", nodeId], ["#d", nodeId],
...this.extractAndNormalizeWikilinks(content!), ...this.extractAndNormalizeWikilinks(content!),
]; ];
// Extract image from content if present // Extract image from content if present
const imageUrl = this.extractImageFromContent(content!); const imageUrl = this.extractImageFromContent(content!);
if (imageUrl) { if (imageUrl) {
event.tags.push(["image", imageUrl]); event.tags.push(["image", imageUrl]);
} }
event.created_at = Date.now(); event.created_at = Date.now();
event.pubkey = pubkey; event.pubkey = pubkey;

28
src/lib/services/event_search_service.ts

@ -8,33 +8,37 @@ export class EventSearchService {
*/ */
getSearchType(query: string): { type: string; term: string } | null { getSearchType(query: string): { type: string; term: string } | null {
const lowerQuery = query.toLowerCase(); const lowerQuery = query.toLowerCase();
if (lowerQuery.startsWith("d:")) { if (lowerQuery.startsWith("d:")) {
const dTag = query.slice(2).trim().toLowerCase(); const dTag = query.slice(2).trim().toLowerCase();
return dTag ? { type: "d", term: dTag } : null; return dTag ? { type: "d", term: dTag } : null;
} }
if (lowerQuery.startsWith("t:")) { if (lowerQuery.startsWith("t:")) {
const searchTerm = query.slice(2).trim(); const searchTerm = query.slice(2).trim();
return searchTerm ? { type: "t", term: searchTerm } : null; return searchTerm ? { type: "t", term: searchTerm } : null;
} }
if (lowerQuery.startsWith("n:")) { if (lowerQuery.startsWith("n:")) {
const searchTerm = query.slice(2).trim(); const searchTerm = query.slice(2).trim();
return searchTerm ? { type: "n", term: searchTerm } : null; return searchTerm ? { type: "n", term: searchTerm } : null;
} }
if (query.includes("@")) { if (query.includes("@")) {
return { type: "nip05", term: query }; return { type: "nip05", term: query };
} }
return null; return null;
} }
/** /**
* Checks if a search value matches the current event * Checks if a search value matches the current event
*/ */
isCurrentEventMatch(searchValue: string, event: any, relays: string[]): boolean { isCurrentEventMatch(
searchValue: string,
event: any,
relays: string[],
): boolean {
const currentEventId = event.id; const currentEventId = event.id;
let currentNaddr = null; let currentNaddr = null;
let currentNevent = null; let currentNevent = null;
@ -42,21 +46,23 @@ export class EventSearchService {
let currentNprofile = null; let currentNprofile = null;
try { try {
const { neventEncode, naddrEncode, nprofileEncode } = require("$lib/utils"); const { neventEncode, naddrEncode, nprofileEncode } = require(
"$lib/utils",
);
const { getMatchingTags, toNpub } = require("$lib/utils/nostrUtils"); const { getMatchingTags, toNpub } = require("$lib/utils/nostrUtils");
currentNevent = neventEncode(event, relays); currentNevent = neventEncode(event, relays);
} catch {} } catch {}
try { try {
const { naddrEncode } = require("$lib/utils"); const { naddrEncode } = require("$lib/utils");
const { getMatchingTags } = require("$lib/utils/nostrUtils"); const { getMatchingTags } = require("$lib/utils/nostrUtils");
currentNaddr = getMatchingTags(event, "d")[0]?.[1] currentNaddr = getMatchingTags(event, "d")[0]?.[1]
? naddrEncode(event, relays) ? naddrEncode(event, relays)
: null; : null;
} catch {} } catch {}
try { try {
const { toNpub } = require("$lib/utils/nostrUtils"); const { toNpub } = require("$lib/utils/nostrUtils");
currentNpub = event.kind === 0 ? toNpub(event.pubkey) : null; currentNpub = event.kind === 0 ? toNpub(event.pubkey) : null;

57
src/lib/services/publisher.ts

@ -1,8 +1,11 @@
import { get } from "svelte/store"; import { get } from "svelte/store";
import { ndkInstance } from "../ndk.ts"; import { ndkInstance } from "../ndk.ts";
import { getMimeTags } from "../utils/mime.ts"; import { getMimeTags } from "../utils/mime.ts";
import { parseAsciiDocWithMetadata, metadataToTags } from "../utils/asciidoc_metadata.ts"; import {
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; metadataToTags,
parseAsciiDocWithMetadata,
} from "../utils/asciidoc_metadata.ts";
import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
export interface PublishResult { export interface PublishResult {
@ -97,8 +100,9 @@ export async function publishZettel(
throw new Error("Failed to publish to any relays"); throw new Error("Failed to publish to any relays");
} }
} catch (error) { } catch (error) {
const errorMessage = const errorMessage = error instanceof Error
error instanceof Error ? error.message : "Unknown error"; ? error.message
: "Unknown error";
onError?.(errorMessage); onError?.(errorMessage);
return { success: false, error: errorMessage }; return { success: false, error: errorMessage };
} }
@ -115,14 +119,14 @@ export async function publishMultipleZettels(
const { content, kind = 30041, onError } = options; const { content, kind = 30041, onError } = options;
if (!content.trim()) { if (!content.trim()) {
const error = 'Please enter some content'; const error = "Please enter some content";
onError?.(error); onError?.(error);
return [{ success: false, error }]; return [{ success: false, error }];
} }
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
if (!ndk?.activeUser) { if (!ndk?.activeUser) {
const error = 'Please log in first'; const error = "Please log in first";
onError?.(error); onError?.(error);
return [{ success: false, error }]; return [{ success: false, error }];
} }
@ -130,12 +134,14 @@ export async function publishMultipleZettels(
try { try {
const parsed = parseAsciiDocWithMetadata(content); const parsed = parseAsciiDocWithMetadata(content);
if (parsed.sections.length === 0) { if (parsed.sections.length === 0) {
throw new Error('No valid sections found in content'); throw new Error("No valid sections found in content");
} }
const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map((r) => r.url); const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map((r) =>
r.url
);
if (allRelayUrls.length === 0) { if (allRelayUrls.length === 0) {
throw new Error('No relays available in NDK pool'); throw new Error("No relays available in NDK pool");
} }
const relaySet = NDKRelaySet.fromRelayUrls(allRelayUrls, ndk); const relaySet = NDKRelaySet.fromRelayUrls(allRelayUrls, ndk);
@ -164,31 +170,42 @@ export async function publishMultipleZettels(
results.push({ success: true, eventId: ndkEvent.id }); results.push({ success: true, eventId: ndkEvent.id });
publishedEvents.push(ndkEvent); publishedEvents.push(ndkEvent);
} else { } else {
results.push({ success: false, error: 'Failed to publish to any relays' }); results.push({
success: false,
error: "Failed to publish to any relays",
});
} }
} catch (err) { } catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error'; const errorMessage = err instanceof Error
? err.message
: "Unknown error";
results.push({ success: false, error: errorMessage }); results.push({ success: false, error: errorMessage });
} }
} }
// Debug: extract and log 'e' and 'a' tags from all published events // Debug: extract and log 'e' and 'a' tags from all published events
publishedEvents.forEach(ev => { publishedEvents.forEach((ev) => {
// Extract d-tag from tags // Extract d-tag from tags
const dTagEntry = ev.tags.find(t => t[0] === 'd'); const dTagEntry = ev.tags.find((t) => t[0] === "d");
const dTag = dTagEntry ? dTagEntry[1] : ''; const dTag = dTagEntry ? dTagEntry[1] : "";
const aTag = `${ev.kind}:${ev.pubkey}:${dTag}`; const aTag = `${ev.kind}:${ev.pubkey}:${dTag}`;
console.log(`Event ${ev.id} tags:`); console.log(`Event ${ev.id} tags:`);
console.log(' e:', ev.id); console.log(" e:", ev.id);
console.log(' a:', aTag); console.log(" a:", aTag);
// Print nevent and naddr using nip19 // Print nevent and naddr using nip19
const nevent = nip19.neventEncode({ id: ev.id }); const nevent = nip19.neventEncode({ id: ev.id });
const naddr = nip19.naddrEncode({ kind: ev.kind, pubkey: ev.pubkey, identifier: dTag }); const naddr = nip19.naddrEncode({
console.log(' nevent:', nevent); kind: ev.kind,
console.log(' naddr:', naddr); pubkey: ev.pubkey,
identifier: dTag,
});
console.log(" nevent:", nevent);
console.log(" naddr:", naddr);
}); });
return results; return results;
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'; const errorMessage = error instanceof Error
? error.message
: "Unknown error";
onError?.(errorMessage); onError?.(errorMessage);
return [{ success: false, error: errorMessage }]; return [{ success: false, error: errorMessage }];
} }

20
src/lib/services/search_state_manager.ts

@ -13,7 +13,7 @@ export class SearchStateManager {
searchResultCount: number | null; searchResultCount: number | null;
searchResultType: string | null; searchResultType: string | null;
}, },
onLoadingChange?: (loading: boolean) => void onLoadingChange?: (loading: boolean) => void,
): void { ): void {
if (onLoadingChange) { if (onLoadingChange) {
onLoadingChange(state.searching); onLoadingChange(state.searching);
@ -25,10 +25,16 @@ export class SearchStateManager {
*/ */
resetSearchState( resetSearchState(
callbacks: { callbacks: {
onSearchResults: (events: any[], secondOrder: any[], tTagEvents: any[], eventIds: Set<string>, addresses: Set<string>) => void; onSearchResults: (
events: any[],
secondOrder: any[],
tTagEvents: any[],
eventIds: Set<string>,
addresses: Set<string>,
) => void;
cleanupSearch: () => void; cleanupSearch: () => void;
clearTimeout: () => void; clearTimeout: () => void;
} },
): void { ): void {
callbacks.cleanupSearch(); callbacks.cleanupSearch();
callbacks.onSearchResults([], [], [], new Set(), new Set()); callbacks.onSearchResults([], [], [], new Set(), new Set());
@ -46,16 +52,18 @@ export class SearchStateManager {
cleanupSearch: () => void; cleanupSearch: () => void;
updateSearchState: (state: any) => void; updateSearchState: (state: any) => void;
resetProcessingFlags: () => void; resetProcessingFlags: () => void;
} },
): void { ): void {
const errorMessage = error instanceof Error ? error.message : defaultMessage; const errorMessage = error instanceof Error
? error.message
: defaultMessage;
callbacks.setLocalError(errorMessage); callbacks.setLocalError(errorMessage);
callbacks.cleanupSearch(); callbacks.cleanupSearch();
callbacks.updateSearchState({ callbacks.updateSearchState({
searching: false, searching: false,
searchCompleted: false, searchCompleted: false,
searchResultCount: null, searchResultCount: null,
searchResultType: null searchResultType: null,
}); });
callbacks.resetProcessingFlags(); callbacks.resetProcessingFlags();
} }

2
src/lib/state.ts

@ -1,5 +1,5 @@
import { browser } from "$app/environment"; import { browser } from "$app/environment";
import { writable, type Writable } from "svelte/store"; import { type Writable, writable } from "svelte/store";
import type { Tab } from "./types.ts"; import type { Tab } from "./types.ts";
export const pathLoaded: Writable<boolean> = writable(false); export const pathLoaded: Writable<boolean> = writable(false);

2
src/lib/stores/authStore.Svelte.ts

@ -1,4 +1,4 @@
import { writable, derived } from "svelte/store"; import { derived, writable } from "svelte/store";
/** /**
* Stores the user's public key if logged in, or null otherwise. * Stores the user's public key if logged in, or null otherwise.

26
src/lib/stores/networkStore.ts

@ -1,8 +1,14 @@
import { writable } from "svelte/store"; import { writable } from "svelte/store";
import { detectNetworkCondition, NetworkCondition, startNetworkMonitoring } from '../utils/network_detection.ts'; import {
detectNetworkCondition,
NetworkCondition,
startNetworkMonitoring,
} from "../utils/network_detection.ts";
// Network status store // Network status store
export const networkCondition = writable<NetworkCondition>(NetworkCondition.ONLINE); export const networkCondition = writable<NetworkCondition>(
NetworkCondition.ONLINE,
);
export const isNetworkChecking = writable<boolean>(false); export const isNetworkChecking = writable<boolean>(false);
// Network monitoring state // Network monitoring state
@ -16,14 +22,16 @@ export function startNetworkStatusMonitoring(): void {
return; // Already monitoring return; // Already monitoring
} }
console.debug('[networkStore.ts] Starting network status monitoring'); console.debug("[networkStore.ts] Starting network status monitoring");
stopNetworkMonitoring = startNetworkMonitoring( stopNetworkMonitoring = startNetworkMonitoring(
(condition: NetworkCondition) => { (condition: NetworkCondition) => {
console.debug(`[networkStore.ts] Network condition changed to: ${condition}`); console.debug(
`[networkStore.ts] Network condition changed to: ${condition}`,
);
networkCondition.set(condition); networkCondition.set(condition);
}, },
60000 // Check every 60 seconds to reduce spam 60000, // Check every 60 seconds to reduce spam
); );
} }
@ -32,7 +40,7 @@ export function startNetworkStatusMonitoring(): void {
*/ */
export function stopNetworkStatusMonitoring(): void { export function stopNetworkStatusMonitoring(): void {
if (stopNetworkMonitoring) { if (stopNetworkMonitoring) {
console.debug('[networkStore.ts] Stopping network status monitoring'); console.debug("[networkStore.ts] Stopping network status monitoring");
stopNetworkMonitoring(); stopNetworkMonitoring();
stopNetworkMonitoring = null; stopNetworkMonitoring = null;
} }
@ -47,9 +55,9 @@ export async function checkNetworkStatus(): Promise<void> {
const condition = await detectNetworkCondition(); const condition = await detectNetworkCondition();
networkCondition.set(condition); networkCondition.set(condition);
} catch (error) { } catch (error) {
console.warn('[networkStore.ts] Failed to check network status:', error); console.warn("[networkStore.ts] Failed to check network status:", error);
networkCondition.set(NetworkCondition.OFFLINE); networkCondition.set(NetworkCondition.OFFLINE);
} finally { } finally {
isNetworkChecking.set(false); isNetworkChecking.set(false);
} }
} }

119
src/lib/stores/userStore.ts

@ -1,14 +1,19 @@
import { writable, get } from "svelte/store"; import { get, writable } from "svelte/store";
import type { NostrProfile } from "../utils/nostrUtils.ts"; import type { NostrProfile } from "../utils/nostrUtils.ts";
import type { NDKUser, NDKSigner } from "@nostr-dev-kit/ndk"; import type { NDKSigner, NDKUser } from "@nostr-dev-kit/ndk";
import NDK, { import NDK, {
NDKNip07Signer, NDKNip07Signer,
NDKRelay,
NDKRelayAuthPolicies, NDKRelayAuthPolicies,
NDKRelaySet, NDKRelaySet,
NDKRelay,
} from "@nostr-dev-kit/ndk"; } from "@nostr-dev-kit/ndk";
import { getUserMetadata } from "../utils/nostrUtils.ts"; import { getUserMetadata } from "../utils/nostrUtils.ts";
import { ndkInstance, activeInboxRelays, activeOutboxRelays, updateActiveRelayStores } from "../ndk.ts"; import {
activeInboxRelays,
activeOutboxRelays,
ndkInstance,
updateActiveRelayStores,
} from "../ndk.ts";
import { loginStorageKey } from "../consts.ts"; import { loginStorageKey } from "../consts.ts";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { userPubkey } from "../stores/authStore.Svelte.ts"; import { userPubkey } from "../stores/authStore.Svelte.ts";
@ -46,8 +51,8 @@ function persistRelays(
outboxes: Set<NDKRelay>, outboxes: Set<NDKRelay>,
): void { ): void {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
localStorage.setItem( localStorage.setItem(
getRelayStorageKey(user, "inbox"), getRelayStorageKey(user, "inbox"),
JSON.stringify(Array.from(inboxes).map((relay) => relay.url)), JSON.stringify(Array.from(inboxes).map((relay) => relay.url)),
@ -60,10 +65,10 @@ function persistRelays(
function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] { function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') { if (typeof window === "undefined") {
return [new Set<string>(), new Set<string>()]; return [new Set<string>(), new Set<string>()];
} }
const inboxes = new Set<string>( const inboxes = new Set<string>(
JSON.parse(localStorage.getItem(getRelayStorageKey(user, "inbox")) ?? "[]"), JSON.parse(localStorage.getItem(getRelayStorageKey(user, "inbox")) ?? "[]"),
); );
@ -79,7 +84,10 @@ function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] {
async function getUserPreferredRelays( async function getUserPreferredRelays(
ndk: NDK, ndk: NDK,
user: NDKUser, user: NDKUser,
fallbacks: readonly string[] = [...get(activeInboxRelays), ...get(activeOutboxRelays)], fallbacks: readonly string[] = [
...get(activeInboxRelays),
...get(activeOutboxRelays),
],
): Promise<[Set<NDKRelay>, Set<NDKRelay>]> { ): Promise<[Set<NDKRelay>, Set<NDKRelay>]> {
const relayList = await ndk.fetchEvent( const relayList = await ndk.fetchEvent(
{ {
@ -144,8 +152,8 @@ export const loginMethodStorageKey = "alexandria/login/method";
function persistLogin(user: NDKUser, method: "extension" | "amber" | "npub") { function persistLogin(user: NDKUser, method: "extension" | "amber" | "npub") {
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window === 'undefined') return; if (typeof window === "undefined") return;
localStorage.setItem(loginStorageKey, user.pubkey); localStorage.setItem(loginStorageKey, user.pubkey);
localStorage.setItem(loginMethodStorageKey, method); localStorage.setItem(loginMethodStorageKey, method);
} }
@ -165,9 +173,9 @@ export async function loginWithExtension() {
const signer = new NDKNip07Signer(); const signer = new NDKNip07Signer();
const user = await signer.user(); const user = await signer.user();
const npub = user.npub; const npub = user.npub;
console.log("Login with extension - fetching profile for npub:", npub); console.log("Login with extension - fetching profile for npub:", npub);
// Try to fetch user metadata, but don't fail if it times out // Try to fetch user metadata, but don't fail if it times out
let profile: NostrProfile | null = null; let profile: NostrProfile | null = null;
try { try {
@ -183,7 +191,7 @@ export async function loginWithExtension() {
}; };
console.log("Login with extension - using fallback profile:", profile); console.log("Login with extension - using fallback profile:", profile);
} }
// Fetch user's preferred relays // Fetch user's preferred relays
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user);
for (const relay of persistedInboxes) { for (const relay of persistedInboxes) {
@ -193,7 +201,7 @@ export async function loginWithExtension() {
persistRelays(user, inboxes, outboxes); persistRelays(user, inboxes, outboxes);
ndk.signer = signer; ndk.signer = signer;
ndk.activeUser = user; ndk.activeUser = user;
const userState = { const userState = {
pubkey: user.pubkey, pubkey: user.pubkey,
npub, npub,
@ -209,22 +217,27 @@ export async function loginWithExtension() {
signer, signer,
signedIn: true, signedIn: true,
}; };
console.log("Login with extension - setting userStore with:", userState); console.log("Login with extension - setting userStore with:", userState);
userStore.set(userState); userStore.set(userState);
userPubkey.set(user.pubkey); userPubkey.set(user.pubkey);
// Update relay stores with the new user's relays // Update relay stores with the new user's relays
try { try {
console.debug('[userStore.ts] loginWithExtension: Updating relay stores for authenticated user'); console.debug(
"[userStore.ts] loginWithExtension: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user
} catch (error) { } catch (error) {
console.warn('[userStore.ts] loginWithExtension: Failed to update relay stores:', error); console.warn(
"[userStore.ts] loginWithExtension: Failed to update relay stores:",
error,
);
} }
clearLogin(); clearLogin();
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag"); localStorage.removeItem("alexandria/logout/flag");
} }
persistLogin(user, "extension"); persistLogin(user, "extension");
@ -238,9 +251,9 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
if (!ndk) throw new Error("NDK not initialized"); if (!ndk) throw new Error("NDK not initialized");
// Only clear previous login state after successful login // Only clear previous login state after successful login
const npub = user.npub; const npub = user.npub;
console.log("Login with Amber - fetching profile for npub:", npub); console.log("Login with Amber - fetching profile for npub:", npub);
let profile: NostrProfile | null = null; let profile: NostrProfile | null = null;
try { try {
profile = await getUserMetadata(npub, true); // Force fresh fetch profile = await getUserMetadata(npub, true); // Force fresh fetch
@ -254,7 +267,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
}; };
console.log("Login with Amber - using fallback profile:", profile); console.log("Login with Amber - using fallback profile:", profile);
} }
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user); const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user);
for (const relay of persistedInboxes) { for (const relay of persistedInboxes) {
ndk.addExplicitRelay(relay); ndk.addExplicitRelay(relay);
@ -263,7 +276,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
persistRelays(user, inboxes, outboxes); persistRelays(user, inboxes, outboxes);
ndk.signer = amberSigner; ndk.signer = amberSigner;
ndk.activeUser = user; ndk.activeUser = user;
const userState = { const userState = {
pubkey: user.pubkey, pubkey: user.pubkey,
npub, npub,
@ -279,22 +292,27 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
signer: amberSigner, signer: amberSigner,
signedIn: true, signedIn: true,
}; };
console.log("Login with Amber - setting userStore with:", userState); console.log("Login with Amber - setting userStore with:", userState);
userStore.set(userState); userStore.set(userState);
userPubkey.set(user.pubkey); userPubkey.set(user.pubkey);
// Update relay stores with the new user's relays // Update relay stores with the new user's relays
try { try {
console.debug('[userStore.ts] loginWithAmber: Updating relay stores for authenticated user'); console.debug(
"[userStore.ts] loginWithAmber: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user
} catch (error) { } catch (error) {
console.warn('[userStore.ts] loginWithAmber: Failed to update relay stores:', error); console.warn(
"[userStore.ts] loginWithAmber: Failed to update relay stores:",
error,
);
} }
clearLogin(); clearLogin();
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag"); localStorage.removeItem("alexandria/logout/flag");
} }
persistLogin(user, "amber"); persistLogin(user, "amber");
@ -331,23 +349,28 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
console.error("Failed to encode npub from hex pubkey:", hexPubkey, e); console.error("Failed to encode npub from hex pubkey:", hexPubkey, e);
throw e; throw e;
} }
console.log("Login with npub - fetching profile for npub:", npub); console.log("Login with npub - fetching profile for npub:", npub);
const user = ndk.getUser({ npub }); const user = ndk.getUser({ npub });
let profile: NostrProfile | null = null; let profile: NostrProfile | null = null;
// First, update relay stores to ensure we have relays available // First, update relay stores to ensure we have relays available
try { try {
console.debug('[userStore.ts] loginWithNpub: Updating relay stores for authenticated user'); console.debug(
"[userStore.ts] loginWithNpub: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk); await updateActiveRelayStores(ndk);
} catch (error) { } catch (error) {
console.warn('[userStore.ts] loginWithNpub: Failed to update relay stores:', error); console.warn(
"[userStore.ts] loginWithNpub: Failed to update relay stores:",
error,
);
} }
// Wait a moment for relay stores to be properly initialized // Wait a moment for relay stores to be properly initialized
await new Promise(resolve => setTimeout(resolve, 500)); await new Promise((resolve) => setTimeout(resolve, 500));
try { try {
profile = await getUserMetadata(npub, true); // Force fresh fetch profile = await getUserMetadata(npub, true); // Force fresh fetch
console.log("Login with npub - fetched profile:", profile); console.log("Login with npub - fetched profile:", profile);
@ -360,10 +383,10 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
}; };
console.log("Login with npub - using fallback profile:", profile); console.log("Login with npub - using fallback profile:", profile);
} }
ndk.signer = undefined; ndk.signer = undefined;
ndk.activeUser = user; ndk.activeUser = user;
const userState = { const userState = {
pubkey: user.pubkey, pubkey: user.pubkey,
npub, npub,
@ -374,14 +397,14 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
signer: null, signer: null,
signedIn: true, signedIn: true,
}; };
console.log("Login with npub - setting userStore with:", userState); console.log("Login with npub - setting userStore with:", userState);
userStore.set(userState); userStore.set(userState);
userPubkey.set(user.pubkey); userPubkey.set(user.pubkey);
clearLogin(); clearLogin();
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag"); localStorage.removeItem("alexandria/logout/flag");
} }
persistLogin(user, "npub"); persistLogin(user, "npub");
@ -393,13 +416,15 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
export function logoutUser() { export function logoutUser() {
console.log("Logging out user..."); console.log("Logging out user...");
const currentUser = get(userStore); const currentUser = get(userStore);
// Only access localStorage on client-side // Only access localStorage on client-side
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
if (currentUser.ndkUser) { if (currentUser.ndkUser) {
// Clear persisted relays for the user // Clear persisted relays for the user
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "inbox")); localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "inbox"));
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "outbox")); localStorage.removeItem(
getRelayStorageKey(currentUser.ndkUser, "outbox"),
);
} }
// Clear all possible login states from localStorage // Clear all possible login states from localStorage

43
src/lib/stores/visualizationConfig.ts

@ -1,4 +1,4 @@
import { writable, derived, get } from "svelte/store"; import { derived, get, writable } from "svelte/store";
export interface EventKindConfig { export interface EventKindConfig {
kind: number; kind: number;
@ -39,8 +39,10 @@ function createVisualizationConfig() {
eventConfigs: DEFAULT_EVENT_CONFIGS, eventConfigs: DEFAULT_EVENT_CONFIGS,
searchThroughFetched: true, searchThroughFetched: true,
}; };
const { subscribe, set, update } = writable<VisualizationConfig>(initialConfig); const { subscribe, set, update } = writable<VisualizationConfig>(
initialConfig,
);
function reset() { function reset() {
set(initialConfig); set(initialConfig);
@ -52,19 +54,19 @@ function createVisualizationConfig() {
if (config.eventConfigs.some((ec) => ec.kind === kind)) { if (config.eventConfigs.some((ec) => ec.kind === kind)) {
return config; return config;
} }
const newConfig: EventKindConfig = { kind, limit, enabled: true }; const newConfig: EventKindConfig = { kind, limit, enabled: true };
// Add nestedLevels for 30040 // Add nestedLevels for 30040
if (kind === 30040) { if (kind === 30040) {
newConfig.nestedLevels = 1; newConfig.nestedLevels = 1;
} }
// Add depth for kind 3 // Add depth for kind 3
if (kind === 3) { if (kind === 3) {
newConfig.depth = 0; newConfig.depth = 0;
} }
return { return {
...config, ...config,
eventConfigs: [...config.eventConfigs, newConfig], eventConfigs: [...config.eventConfigs, newConfig],
@ -83,7 +85,7 @@ function createVisualizationConfig() {
update((config) => ({ update((config) => ({
...config, ...config,
eventConfigs: config.eventConfigs.map((ec) => eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, limit } : ec, ec.kind === kind ? { ...ec, limit } : ec
), ),
})); }));
} }
@ -92,7 +94,7 @@ function createVisualizationConfig() {
update((config) => ({ update((config) => ({
...config, ...config,
eventConfigs: config.eventConfigs.map((ec) => eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === 30040 ? { ...ec, nestedLevels: levels } : ec, ec.kind === 30040 ? { ...ec, nestedLevels: levels } : ec
), ),
})); }));
} }
@ -101,7 +103,7 @@ function createVisualizationConfig() {
update((config) => ({ update((config) => ({
...config, ...config,
eventConfigs: config.eventConfigs.map((ec) => eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === 3 ? { ...ec, depth: depth } : ec, ec.kind === 3 ? { ...ec, depth: depth } : ec
), ),
})); }));
} }
@ -110,7 +112,7 @@ function createVisualizationConfig() {
update((config) => ({ update((config) => ({
...config, ...config,
eventConfigs: config.eventConfigs.map((ec) => eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, showAll: !ec.showAll } : ec, ec.kind === kind ? { ...ec, showAll: !ec.showAll } : ec
), ),
})); }));
} }
@ -134,7 +136,7 @@ function createVisualizationConfig() {
update((config) => ({ update((config) => ({
...config, ...config,
eventConfigs: config.eventConfigs.map((ec) => eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, enabled: !ec.enabled } : ec, ec.kind === kind ? { ...ec, enabled: !ec.enabled } : ec
), ),
})); }));
} }
@ -158,10 +160,12 @@ function createVisualizationConfig() {
export const visualizationConfig = createVisualizationConfig(); export const visualizationConfig = createVisualizationConfig();
// Helper to get all enabled event kinds // Helper to get all enabled event kinds
export const enabledEventKinds = derived(visualizationConfig, ($config) => export const enabledEventKinds = derived(
$config.eventConfigs visualizationConfig,
.filter((ec) => ec.enabled !== false) ($config) =>
.map((ec) => ec.kind), $config.eventConfigs
.filter((ec) => ec.enabled !== false)
.map((ec) => ec.kind),
); );
/** /**
@ -169,7 +173,10 @@ export const enabledEventKinds = derived(visualizationConfig, ($config) =>
* @param config - The VisualizationConfig object. * @param config - The VisualizationConfig object.
* @param kind - The event kind number to check. * @param kind - The event kind number to check.
*/ */
export function isKindEnabledFn(config: VisualizationConfig, kind: number): boolean { export function isKindEnabledFn(
config: VisualizationConfig,
kind: number,
): boolean {
const eventConfig = config.eventConfigs.find((ec) => ec.kind === kind); const eventConfig = config.eventConfigs.find((ec) => ec.kind === kind);
// If not found, return false. Otherwise, return true unless explicitly disabled. // If not found, return false. Otherwise, return true unless explicitly disabled.
return !!eventConfig && eventConfig.enabled !== false; return !!eventConfig && eventConfig.enabled !== false;
@ -178,5 +185,5 @@ export function isKindEnabledFn(config: VisualizationConfig, kind: number): bool
// Derived store: returns a function that checks if a kind is enabled in the current config. // Derived store: returns a function that checks if a kind is enabled in the current config.
export const isKindEnabledStore = derived( export const isKindEnabledStore = derived(
visualizationConfig, visualizationConfig,
($config) => (kind: number) => isKindEnabledFn($config, kind) ($config) => (kind: number) => isKindEnabledFn($config, kind),
); );

24
src/lib/utils.ts

@ -26,7 +26,7 @@ export function neventEncode(event: NDKEvent, relays: string[]) {
relays, relays,
author: event.pubkey, author: event.pubkey,
}); });
return nevent; return nevent;
} catch (error) { } catch (error) {
console.error(`[neventEncode] Error encoding nevent:`, error); console.error(`[neventEncode] Error encoding nevent:`, error);
@ -54,7 +54,10 @@ export function naddrEncode(event: NDKEvent, relays: string[]) {
* @param relays Optional relay list for the address * @param relays Optional relay list for the address
* @returns A tag address string * @returns A tag address string
*/ */
export function createTagAddress(event: NostrEvent, relays: string[] = []): string { export function createTagAddress(
event: NostrEvent,
relays: string[] = [],
): string {
const dTag = event.tags.find((tag: string[]) => tag[0] === "d")?.[1]; const dTag = event.tags.find((tag: string[]) => tag[0] === "d")?.[1];
if (!dTag) { if (!dTag) {
throw new Error("Event does not have a d tag"); throw new Error("Event does not have a d tag");
@ -144,10 +147,9 @@ export function next(): number {
export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) { export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) {
function scrollTab() { function scrollTab() {
const element = const element = typeof el === "string"
typeof el === "string" ? document.querySelector(`[id^="wikitab-v0-${el}"]`)
? document.querySelector(`[id^="wikitab-v0-${el}"]`) : el;
: el;
if (!element) return; if (!element) return;
element.scrollIntoView({ element.scrollIntoView({
@ -166,10 +168,9 @@ export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) {
} }
export function isElementInViewport(el: string | HTMLElement) { export function isElementInViewport(el: string | HTMLElement) {
const element = const element = typeof el === "string"
typeof el === "string" ? document.querySelector(`[id^="wikitab-v0-${el}"]`)
? document.querySelector(`[id^="wikitab-v0-${el}"]`) : el;
: el;
if (!element) return; if (!element) return;
const rect = element.getBoundingClientRect(); const rect = element.getBoundingClientRect();
@ -179,7 +180,8 @@ export function isElementInViewport(el: string | HTMLElement) {
rect.left >= 0 && rect.left >= 0 &&
rect.bottom <= rect.bottom <=
(globalThis.innerHeight || document.documentElement.clientHeight) && (globalThis.innerHeight || document.documentElement.clientHeight) &&
rect.right <= (globalThis.innerWidth || document.documentElement.clientWidth) rect.right <=
(globalThis.innerWidth || document.documentElement.clientWidth)
); );
} }

10
src/lib/utils/ZettelParser.ts

@ -41,7 +41,7 @@ export function parseZettelSection(section: string): ZettelSection {
const trimmed = line.trim(); const trimmed = line.trim();
if (trimmed.startsWith("==")) { if (trimmed.startsWith("==")) {
title = trimmed.replace(/^==+/, "").trim(); title = trimmed.replace(/^==+/, "").trim();
// Process header metadata (everything after title until blank line) // Process header metadata (everything after title until blank line)
let j = i + 1; let j = i + 1;
while (j < lines.length && lines[j].trim() !== "") { while (j < lines.length && lines[j].trim() !== "") {
@ -54,12 +54,12 @@ export function parseZettelSection(section: string): ZettelSection {
j++; j++;
} }
} }
// Skip the blank line // Skip the blank line
if (j < lines.length && lines[j].trim() === "") { if (j < lines.length && lines[j].trim() === "") {
j++; j++;
} }
// Everything after the blank line is content // Everything after the blank line is content
for (let k = j; k < lines.length; k++) { for (let k = j; k < lines.length; k++) {
contentLines.push(lines[k]); contentLines.push(lines[k]);
@ -100,13 +100,13 @@ export function extractTags(content: string): string[][] {
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i]; const line = lines[i];
const trimmed = line.trim(); const trimmed = line.trim();
if (trimmed.startsWith("==")) { if (trimmed.startsWith("==")) {
// Process header metadata (everything after title until blank line) // Process header metadata (everything after title until blank line)
let j = i + 1; let j = i + 1;
while (j < lines.length && lines[j].trim() !== "") { while (j < lines.length && lines[j].trim() !== "") {
const headerLine = lines[j].trim(); const headerLine = lines[j].trim();
if (headerLine.startsWith(":")) { if (headerLine.startsWith(":")) {
// Parse AsciiDoc attribute format: :tagname: value // Parse AsciiDoc attribute format: :tagname: value
const match = headerLine.match(/^:([^:]+):\s*(.*)$/); const match = headerLine.match(/^:([^:]+):\s*(.*)$/);

273
src/lib/utils/asciidoc_metadata.ts

@ -1,6 +1,6 @@
/** /**
* AsciiDoc Metadata Extraction Service using Asciidoctor * AsciiDoc Metadata Extraction Service using Asciidoctor
* *
* Thin wrapper around Asciidoctor's built-in metadata extraction capabilities. * Thin wrapper around Asciidoctor's built-in metadata extraction capabilities.
* Leverages the existing Pharos parser to avoid duplication. * Leverages the existing Pharos parser to avoid duplication.
*/ */
@ -23,7 +23,7 @@ export interface AsciiDocMetadata {
source?: string; source?: string;
publishedBy?: string; publishedBy?: string;
type?: string; type?: string;
autoUpdate?: 'yes' | 'ask' | 'no'; autoUpdate?: "yes" | "ask" | "no";
} }
export type SectionMetadata = AsciiDocMetadata; export type SectionMetadata = AsciiDocMetadata;
@ -41,29 +41,29 @@ export interface ParsedAsciiDoc {
// Shared attribute mapping based on Asciidoctor standard attributes // Shared attribute mapping based on Asciidoctor standard attributes
const ATTRIBUTE_MAP: Record<string, keyof AsciiDocMetadata> = { const ATTRIBUTE_MAP: Record<string, keyof AsciiDocMetadata> = {
// Standard Asciidoctor attributes // Standard Asciidoctor attributes
'author': 'authors', "author": "authors",
'description': 'summary', "description": "summary",
'keywords': 'tags', "keywords": "tags",
'revnumber': 'version', "revnumber": "version",
'revdate': 'publicationDate', "revdate": "publicationDate",
'revremark': 'edition', "revremark": "edition",
'title': 'title', "title": "title",
// Custom attributes for Alexandria // Custom attributes for Alexandria
'published_by': 'publishedBy', "published_by": "publishedBy",
'publisher': 'publisher', "publisher": "publisher",
'summary': 'summary', "summary": "summary",
'image': 'coverImage', "image": "coverImage",
'cover': 'coverImage', "cover": "coverImage",
'isbn': 'isbn', "isbn": "isbn",
'source': 'source', "source": "source",
'type': 'type', "type": "type",
'auto-update': 'autoUpdate', "auto-update": "autoUpdate",
'version': 'version', "version": "version",
'edition': 'edition', "edition": "edition",
'published_on': 'publicationDate', "published_on": "publicationDate",
'date': 'publicationDate', "date": "publicationDate",
'version-label': 'version', "version-label": "version",
}; };
/** /**
@ -78,37 +78,41 @@ function createProcessor() {
*/ */
function extractTagsFromAttributes(attributes: Record<string, any>): string[] { function extractTagsFromAttributes(attributes: Record<string, any>): string[] {
const tags: string[] = []; const tags: string[] = [];
const attrTags = attributes['tags']; const attrTags = attributes["tags"];
const attrKeywords = attributes['keywords']; const attrKeywords = attributes["keywords"];
if (attrTags && typeof attrTags === 'string') { if (attrTags && typeof attrTags === "string") {
tags.push(...attrTags.split(',').map(tag => tag.trim())); tags.push(...attrTags.split(",").map((tag) => tag.trim()));
} }
if (attrKeywords && typeof attrKeywords === 'string') { if (attrKeywords && typeof attrKeywords === "string") {
tags.push(...attrKeywords.split(',').map(tag => tag.trim())); tags.push(...attrKeywords.split(",").map((tag) => tag.trim()));
} }
return [...new Set(tags)]; // Remove duplicates return [...new Set(tags)]; // Remove duplicates
} }
/** /**
* Maps attributes to metadata with special handling for authors and tags * Maps attributes to metadata with special handling for authors and tags
*/ */
function mapAttributesToMetadata(attributes: Record<string, any>, metadata: AsciiDocMetadata, isDocument: boolean = false): void { function mapAttributesToMetadata(
attributes: Record<string, any>,
metadata: AsciiDocMetadata,
isDocument: boolean = false,
): void {
for (const [key, value] of Object.entries(attributes)) { for (const [key, value] of Object.entries(attributes)) {
const metadataKey = ATTRIBUTE_MAP[key.toLowerCase()]; const metadataKey = ATTRIBUTE_MAP[key.toLowerCase()];
if (metadataKey && value && typeof value === 'string') { if (metadataKey && value && typeof value === "string") {
if (metadataKey === 'authors' && isDocument) { if (metadataKey === "authors" && isDocument) {
// Skip author mapping for documents since it's handled manually // Skip author mapping for documents since it's handled manually
continue; continue;
} else if (metadataKey === 'authors' && !isDocument) { } else if (metadataKey === "authors" && !isDocument) {
// For sections, append author to existing authors array // For sections, append author to existing authors array
if (!metadata.authors) { if (!metadata.authors) {
metadata.authors = []; metadata.authors = [];
} }
metadata.authors.push(value); metadata.authors.push(value);
} else if (metadataKey === 'tags') { } else if (metadataKey === "tags") {
// Skip tags mapping since it's handled by extractTagsFromAttributes // Skip tags mapping since it's handled by extractTagsFromAttributes
continue; continue;
} else { } else {
@ -121,11 +125,14 @@ function mapAttributesToMetadata(attributes: Record<string, any>, metadata: Asci
/** /**
* Extracts authors from header line (document or section) * Extracts authors from header line (document or section)
*/ */
function extractAuthorsFromHeader(sourceContent: string, isSection: boolean = false): string[] { function extractAuthorsFromHeader(
sourceContent: string,
isSection: boolean = false,
): string[] {
const authors: string[] = []; const authors: string[] = [];
const lines = sourceContent.split(/\r?\n/); const lines = sourceContent.split(/\r?\n/);
const headerPattern = isSection ? /^==\s+/ : /^=\s+/; const headerPattern = isSection ? /^==\s+/ : /^=\s+/;
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i]; const line = lines[i];
if (line.match(headerPattern)) { if (line.match(headerPattern)) {
@ -133,51 +140,60 @@ function extractAuthorsFromHeader(sourceContent: string, isSection: boolean = fa
let j = i + 1; let j = i + 1;
while (j < lines.length) { while (j < lines.length) {
const authorLine = lines[j]; const authorLine = lines[j];
// Stop if we hit a blank line or content that's not an author // Stop if we hit a blank line or content that's not an author
if (authorLine.trim() === '') { if (authorLine.trim() === "") {
break; break;
} }
if (authorLine.includes('<') && !authorLine.startsWith(':')) { if (authorLine.includes("<") && !authorLine.startsWith(":")) {
// This is an author line like "John Doe <john@example.com>" // This is an author line like "John Doe <john@example.com>"
const authorName = authorLine.split('<')[0].trim(); const authorName = authorLine.split("<")[0].trim();
if (authorName) { if (authorName) {
authors.push(authorName); authors.push(authorName);
} }
} else if (isSection && authorLine.match(/^[A-Za-z\s]+$/) && authorLine.trim() !== '' && authorLine.trim().split(/\s+/).length <= 2) { } else if (
isSection && authorLine.match(/^[A-Za-z\s]+$/) &&
authorLine.trim() !== "" && authorLine.trim().split(/\s+/).length <= 2
) {
// This is a simple author name without email (for sections) // This is a simple author name without email (for sections)
authors.push(authorLine.trim()); authors.push(authorLine.trim());
} else if (authorLine.startsWith(':')) { } else if (authorLine.startsWith(":")) {
// This is an attribute line, skip it - attributes are handled by mapAttributesToMetadata // This is an attribute line, skip it - attributes are handled by mapAttributesToMetadata
// Don't break here, continue to next line // Don't break here, continue to next line
} else { } else {
// Not an author line, stop looking // Not an author line, stop looking
break; break;
} }
j++; j++;
} }
break; break;
} }
} }
return authors; return authors;
} }
/** /**
* Strips header and attribute lines from content * Strips header and attribute lines from content
*/ */
function stripHeaderAndAttributes(content: string, isSection: boolean = false): string { function stripHeaderAndAttributes(
content: string,
isSection: boolean = false,
): string {
const lines = content.split(/\r?\n/); const lines = content.split(/\r?\n/);
let contentStart = 0; let contentStart = 0;
const headerPattern = isSection ? /^==\s+/ : /^=\s+/; const headerPattern = isSection ? /^==\s+/ : /^=\s+/;
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i]; const line = lines[i];
// Skip title line, author line, revision line, and attribute lines // Skip title line, author line, revision line, and attribute lines
if (!line.match(headerPattern) && !line.includes('<') && !line.match(/^.+,\s*.+:\s*.+$/) && if (
!line.match(/^:[^:]+:\s*.+$/) && line.trim() !== '') { !line.match(headerPattern) && !line.includes("<") &&
!line.match(/^.+,\s*.+:\s*.+$/) &&
!line.match(/^:[^:]+:\s*.+$/) && line.trim() !== ""
) {
contentStart = i; contentStart = i;
break; break;
} }
@ -185,20 +201,26 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false):
// Filter out all attribute lines and author lines from the content // Filter out all attribute lines and author lines from the content
const contentLines = lines.slice(contentStart); const contentLines = lines.slice(contentStart);
const filteredLines = contentLines.filter(line => { const filteredLines = contentLines.filter((line) => {
// Skip attribute lines // Skip attribute lines
if (line.match(/^:[^:]+:\s*.+$/)) { if (line.match(/^:[^:]+:\s*.+$/)) {
return false; return false;
} }
// Skip author lines (simple names without email) // Skip author lines (simple names without email)
if (isSection && line.match(/^[A-Za-z\s]+$/) && line.trim() !== '' && line.trim().split(/\s+/).length <= 2) { if (
isSection && line.match(/^[A-Za-z\s]+$/) && line.trim() !== "" &&
line.trim().split(/\s+/).length <= 2
) {
return false; return false;
} }
return true; return true;
}); });
// Remove extra blank lines and normalize newlines // Remove extra blank lines and normalize newlines
return filteredLines.join('\n').replace(/\n\s*\n\s*\n/g, '\n\n').replace(/\n\s*\n/g, '\n').trim(); return filteredLines.join("\n").replace(/\n\s*\n\s*\n/g, "\n\n").replace(
/\n\s*\n/g,
"\n",
).trim();
} }
/** /**
@ -207,7 +229,7 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false):
function parseSectionAttributes(sectionContent: string): Record<string, any> { function parseSectionAttributes(sectionContent: string): Record<string, any> {
const attributes: Record<string, any> = {}; const attributes: Record<string, any> = {};
const lines = sectionContent.split(/\r?\n/); const lines = sectionContent.split(/\r?\n/);
for (const line of lines) { for (const line of lines) {
const match = line.match(/^:([^:]+):\s*(.+)$/); const match = line.match(/^:([^:]+):\s*(.+)$/);
if (match) { if (match) {
@ -215,14 +237,10 @@ function parseSectionAttributes(sectionContent: string): Record<string, any> {
attributes[key.trim()] = value.trim(); attributes[key.trim()] = value.trim();
} }
} }
return attributes; return attributes;
} }
/** /**
* Extracts metadata from AsciiDoc document using Asciidoctor * Extracts metadata from AsciiDoc document using Asciidoctor
*/ */
@ -231,7 +249,9 @@ export function extractDocumentMetadata(inputContent: string): {
content: string; content: string;
} { } {
const asciidoctor = createProcessor(); const asciidoctor = createProcessor();
const document = asciidoctor.load(inputContent, { standalone: false }) as Document; const document = asciidoctor.load(inputContent, {
standalone: false,
}) as Document;
const metadata: AsciiDocMetadata = {}; const metadata: AsciiDocMetadata = {};
const attributes = document.getAttributes(); const attributes = document.getAttributes();
@ -242,13 +262,16 @@ export function extractDocumentMetadata(inputContent: string): {
// Handle multiple authors - combine header line and attributes // Handle multiple authors - combine header line and attributes
const authors = extractAuthorsFromHeader(document.getSource()); const authors = extractAuthorsFromHeader(document.getSource());
// Get authors from attributes (but avoid duplicates) // Get authors from attributes (but avoid duplicates)
const attrAuthor = attributes['author']; const attrAuthor = attributes["author"];
if (attrAuthor && typeof attrAuthor === 'string' && !authors.includes(attrAuthor)) { if (
attrAuthor && typeof attrAuthor === "string" &&
!authors.includes(attrAuthor)
) {
authors.push(attrAuthor); authors.push(attrAuthor);
} }
if (authors.length > 0) { if (authors.length > 0) {
metadata.authors = [...new Set(authors)]; // Remove duplicates metadata.authors = [...new Set(authors)]; // Remove duplicates
} }
@ -265,12 +288,12 @@ export function extractDocumentMetadata(inputContent: string): {
// Map attributes to metadata (but skip version and publishedBy if we already have them from revision) // Map attributes to metadata (but skip version and publishedBy if we already have them from revision)
mapAttributesToMetadata(attributes, metadata, true); mapAttributesToMetadata(attributes, metadata, true);
// If we got version from revision, don't override it with attribute // If we got version from revision, don't override it with attribute
if (revisionNumber) { if (revisionNumber) {
metadata.version = revisionNumber; metadata.version = revisionNumber;
} }
// If we got publishedBy from revision, don't override it with attribute // If we got publishedBy from revision, don't override it with attribute
if (revisionRemark) { if (revisionRemark) {
metadata.publishedBy = revisionRemark; metadata.publishedBy = revisionRemark;
@ -295,17 +318,19 @@ export function extractSectionMetadata(inputSectionContent: string): {
title: string; title: string;
} { } {
const asciidoctor = createProcessor(); const asciidoctor = createProcessor();
const document = asciidoctor.load(`= Temp\n\n${inputSectionContent}`, { standalone: false }) as Document; const document = asciidoctor.load(`= Temp\n\n${inputSectionContent}`, {
standalone: false,
}) as Document;
const sections = document.getSections(); const sections = document.getSections();
if (sections.length === 0) { if (sections.length === 0) {
return { metadata: {}, content: inputSectionContent, title: '' }; return { metadata: {}, content: inputSectionContent, title: "" };
} }
const section = sections[0]; const section = sections[0];
const title = section.getTitle() || ''; const title = section.getTitle() || "";
const metadata: SectionMetadata = { title }; const metadata: SectionMetadata = { title };
// Parse attributes from the section content // Parse attributes from the section content
const attributes = parseSectionAttributes(inputSectionContent); const attributes = parseSectionAttributes(inputSectionContent);
@ -335,7 +360,7 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
const asciidoctor = createProcessor(); const asciidoctor = createProcessor();
const document = asciidoctor.load(content, { standalone: false }) as Document; const document = asciidoctor.load(content, { standalone: false }) as Document;
const { metadata: docMetadata } = extractDocumentMetadata(content); const { metadata: docMetadata } = extractDocumentMetadata(content);
// Parse the original content to find section attributes // Parse the original content to find section attributes
const lines = content.split(/\r?\n/); const lines = content.split(/\r?\n/);
const sectionsWithMetadata: Array<{ const sectionsWithMetadata: Array<{
@ -345,15 +370,15 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
}> = []; }> = [];
let currentSection: string | null = null; let currentSection: string | null = null;
let currentSectionContent: string[] = []; let currentSectionContent: string[] = [];
for (const line of lines) { for (const line of lines) {
if (line.match(/^==\s+/)) { if (line.match(/^==\s+/)) {
// Save previous section if exists // Save previous section if exists
if (currentSection) { if (currentSection) {
const sectionContent = currentSectionContent.join('\n'); const sectionContent = currentSectionContent.join("\n");
sectionsWithMetadata.push(extractSectionMetadata(sectionContent)); sectionsWithMetadata.push(extractSectionMetadata(sectionContent));
} }
// Start new section // Start new section
currentSection = line; currentSection = line;
currentSectionContent = [line]; currentSectionContent = [line];
@ -361,42 +386,46 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
currentSectionContent.push(line); currentSectionContent.push(line);
} }
} }
// Save the last section // Save the last section
if (currentSection) { if (currentSection) {
const sectionContent = currentSectionContent.join('\n'); const sectionContent = currentSectionContent.join("\n");
sectionsWithMetadata.push(extractSectionMetadata(sectionContent)); sectionsWithMetadata.push(extractSectionMetadata(sectionContent));
} }
return { return {
metadata: docMetadata, metadata: docMetadata,
content: document.getSource(), content: document.getSource(),
sections: sectionsWithMetadata sections: sectionsWithMetadata,
}; };
} }
/** /**
* Converts metadata to Nostr event tags * Converts metadata to Nostr event tags
*/ */
export function metadataToTags(metadata: AsciiDocMetadata | SectionMetadata): [string, string][] { export function metadataToTags(
metadata: AsciiDocMetadata | SectionMetadata,
): [string, string][] {
const tags: [string, string][] = []; const tags: [string, string][] = [];
if (metadata.title) tags.push(['title', metadata.title]); if (metadata.title) tags.push(["title", metadata.title]);
if (metadata.authors?.length) { if (metadata.authors?.length) {
metadata.authors.forEach(author => tags.push(['author', author])); metadata.authors.forEach((author) => tags.push(["author", author]));
}
if (metadata.version) tags.push(["version", metadata.version]);
if (metadata.edition) tags.push(["edition", metadata.edition]);
if (metadata.publicationDate) {
tags.push(["published_on", metadata.publicationDate]);
} }
if (metadata.version) tags.push(['version', metadata.version]); if (metadata.publishedBy) tags.push(["published_by", metadata.publishedBy]);
if (metadata.edition) tags.push(['edition', metadata.edition]); if (metadata.summary) tags.push(["summary", metadata.summary]);
if (metadata.publicationDate) tags.push(['published_on', metadata.publicationDate]); if (metadata.coverImage) tags.push(["image", metadata.coverImage]);
if (metadata.publishedBy) tags.push(['published_by', metadata.publishedBy]); if (metadata.isbn) tags.push(["i", metadata.isbn]);
if (metadata.summary) tags.push(['summary', metadata.summary]); if (metadata.source) tags.push(["source", metadata.source]);
if (metadata.coverImage) tags.push(['image', metadata.coverImage]); if (metadata.type) tags.push(["type", metadata.type]);
if (metadata.isbn) tags.push(['i', metadata.isbn]); if (metadata.autoUpdate) tags.push(["auto-update", metadata.autoUpdate]);
if (metadata.source) tags.push(['source', metadata.source]);
if (metadata.type) tags.push(['type', metadata.type]);
if (metadata.autoUpdate) tags.push(['auto-update', metadata.autoUpdate]);
if (metadata.tags?.length) { if (metadata.tags?.length) {
metadata.tags.forEach(tag => tags.push(['t', tag])); metadata.tags.forEach((tag) => tags.push(["t", tag]));
} }
return tags; return tags;
@ -408,7 +437,7 @@ export function metadataToTags(metadata: AsciiDocMetadata | SectionMetadata): [s
export function removeMetadataFromContent(content: string): string { export function removeMetadataFromContent(content: string): string {
const { content: cleanedContent } = extractDocumentMetadata(content); const { content: cleanedContent } = extractDocumentMetadata(content);
return cleanedContent; return cleanedContent;
} }
/** /**
* Extracts metadata from content that only contains sections (no document header) * Extracts metadata from content that only contains sections (no document header)
@ -424,19 +453,19 @@ export function extractMetadataFromSectionsOnly(content: string): {
content: string; content: string;
title: string; title: string;
}> = []; }> = [];
let currentSection: string | null = null; let currentSection: string | null = null;
let currentSectionContent: string[] = []; let currentSectionContent: string[] = [];
// Parse sections from the content // Parse sections from the content
for (const line of lines) { for (const line of lines) {
if (line.match(/^==\s+/)) { if (line.match(/^==\s+/)) {
// Save previous section if exists // Save previous section if exists
if (currentSection) { if (currentSection) {
const sectionContent = currentSectionContent.join('\n'); const sectionContent = currentSectionContent.join("\n");
sections.push(extractSectionMetadata(sectionContent)); sections.push(extractSectionMetadata(sectionContent));
} }
// Start new section // Start new section
currentSection = line; currentSection = line;
currentSectionContent = [line]; currentSectionContent = [line];
@ -444,20 +473,20 @@ export function extractMetadataFromSectionsOnly(content: string): {
currentSectionContent.push(line); currentSectionContent.push(line);
} }
} }
// Save the last section // Save the last section
if (currentSection) { if (currentSection) {
const sectionContent = currentSectionContent.join('\n'); const sectionContent = currentSectionContent.join("\n");
sections.push(extractSectionMetadata(sectionContent)); sections.push(extractSectionMetadata(sectionContent));
} }
// For section-only content, we don't have document metadata // For section-only content, we don't have document metadata
// Return the first section's title as the document title if available // Return the first section's title as the document title if available
const metadata: AsciiDocMetadata = {}; const metadata: AsciiDocMetadata = {};
if (sections.length > 0 && sections[0].title) { if (sections.length > 0 && sections[0].title) {
metadata.title = sections[0].title; metadata.title = sections[0].title;
} }
return { metadata, content }; return { metadata, content };
} }
@ -470,31 +499,31 @@ export function extractSmartMetadata(content: string): {
} { } {
// Check if content has a document header // Check if content has a document header
const hasDocumentHeader = content.match(/^=\s+/m); const hasDocumentHeader = content.match(/^=\s+/m);
if (hasDocumentHeader) { if (hasDocumentHeader) {
// Check if it's a minimal document header (just title, no other metadata) // Check if it's a minimal document header (just title, no other metadata)
const lines = content.split(/\r?\n/); const lines = content.split(/\r?\n/);
const titleLine = lines.find(line => line.match(/^=\s+/)); const titleLine = lines.find((line) => line.match(/^=\s+/));
const hasOtherMetadata = lines.some(line => const hasOtherMetadata = lines.some((line) =>
line.includes('<') || // author line line.includes("<") || // author line
line.match(/^.+,\s*.+:\s*.+$/) // revision line line.match(/^.+,\s*.+:\s*.+$/) // revision line
); );
if (hasOtherMetadata) { if (hasOtherMetadata) {
// Full document with metadata - use standard extraction // Full document with metadata - use standard extraction
return extractDocumentMetadata(content); return extractDocumentMetadata(content);
} else { } else {
// Minimal document header (just title) - preserve the title line for 30040 events // Minimal document header (just title) - preserve the title line for 30040 events
const title = titleLine?.replace(/^=\s+/, '').trim(); const title = titleLine?.replace(/^=\s+/, "").trim();
const metadata: AsciiDocMetadata = {}; const metadata: AsciiDocMetadata = {};
if (title) { if (title) {
metadata.title = title; metadata.title = title;
} }
// Keep the title line in content for 30040 events // Keep the title line in content for 30040 events
return { metadata, content }; return { metadata, content };
} }
} else { } else {
return extractMetadataFromSectionsOnly(content); return extractMetadataFromSectionsOnly(content);
} }
} }

4
src/lib/utils/community_checker.ts

@ -43,7 +43,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> {
} }
}; };
}); });
if (result) { if (result) {
return true; return true;
} }
@ -52,7 +52,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> {
continue; continue;
} }
} }
// If we get here, no relay found the user // If we get here, no relay found the user
communityCache.set(pubkey, false); communityCache.set(pubkey, false);
return false; return false;

59
src/lib/utils/displayLimits.ts

@ -1,7 +1,7 @@
import type { NDKEvent } from '@nostr-dev-kit/ndk'; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { VisualizationConfig } from '$lib/stores/visualizationConfig'; import type { VisualizationConfig } from "$lib/stores/visualizationConfig";
import { isEventId, isCoordinate, parseCoordinate } from './nostr_identifiers'; import { isCoordinate, isEventId, parseCoordinate } from "./nostr_identifiers";
import type { NostrEventId } from './nostr_identifiers'; import type { NostrEventId } from "./nostr_identifiers";
/** /**
* Filters events based on visualization configuration * Filters events based on visualization configuration
@ -9,7 +9,10 @@ import type { NostrEventId } from './nostr_identifiers';
* @param config - Visualization configuration * @param config - Visualization configuration
* @returns Filtered events that should be displayed * @returns Filtered events that should be displayed
*/ */
export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationConfig): NDKEvent[] { export function filterByDisplayLimits(
events: NDKEvent[],
config: VisualizationConfig,
): NDKEvent[] {
const result: NDKEvent[] = []; const result: NDKEvent[] = [];
const kindCounts = new Map<number, number>(); const kindCounts = new Map<number, number>();
@ -18,13 +21,13 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC
if (kind === undefined) continue; if (kind === undefined) continue;
// Get the config for this event kind // Get the config for this event kind
const eventConfig = config.eventConfigs.find(ec => ec.kind === kind); const eventConfig = config.eventConfigs.find((ec) => ec.kind === kind);
// Skip if the kind is disabled // Skip if the kind is disabled
if (eventConfig && eventConfig.enabled === false) { if (eventConfig && eventConfig.enabled === false) {
continue; continue;
} }
const limit = eventConfig?.limit; const limit = eventConfig?.limit;
// Special handling for content kinds (30041, 30818) with showAll option // Special handling for content kinds (30041, 30818) with showAll option
@ -58,48 +61,48 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC
* @returns Set of missing event identifiers * @returns Set of missing event identifiers
*/ */
export function detectMissingEvents( export function detectMissingEvents(
events: NDKEvent[], events: NDKEvent[],
existingIds: Set<NostrEventId>, existingIds: Set<NostrEventId>,
existingCoordinates?: Map<string, NDKEvent> existingCoordinates?: Map<string, NDKEvent>,
): Set<string> { ): Set<string> {
const missing = new Set<string>(); const missing = new Set<string>();
for (const event of events) { for (const event of events) {
// Check 'e' tags for direct event references (hex IDs) // Check 'e' tags for direct event references (hex IDs)
const eTags = event.getMatchingTags('e'); const eTags = event.getMatchingTags("e");
for (const eTag of eTags) { for (const eTag of eTags) {
if (eTag.length < 2) continue; if (eTag.length < 2) continue;
const eventId = eTag[1]; const eventId = eTag[1];
// Type check: ensure it's a valid hex event ID // Type check: ensure it's a valid hex event ID
if (!isEventId(eventId)) { if (!isEventId(eventId)) {
console.warn('Invalid event ID in e tag:', eventId); console.warn("Invalid event ID in e tag:", eventId);
continue; continue;
} }
if (!existingIds.has(eventId)) { if (!existingIds.has(eventId)) {
missing.add(eventId); missing.add(eventId);
} }
} }
// Check 'a' tags for NIP-33 references (kind:pubkey:d-tag) // Check 'a' tags for NIP-33 references (kind:pubkey:d-tag)
const aTags = event.getMatchingTags('a'); const aTags = event.getMatchingTags("a");
for (const aTag of aTags) { for (const aTag of aTags) {
if (aTag.length < 2) continue; if (aTag.length < 2) continue;
const identifier = aTag[1]; const identifier = aTag[1];
// Type check: ensure it's a valid coordinate // Type check: ensure it's a valid coordinate
if (!isCoordinate(identifier)) { if (!isCoordinate(identifier)) {
console.warn('Invalid coordinate in a tag:', identifier); console.warn("Invalid coordinate in a tag:", identifier);
continue; continue;
} }
// Parse the coordinate // Parse the coordinate
const parsed = parseCoordinate(identifier); const parsed = parseCoordinate(identifier);
if (!parsed) continue; if (!parsed) continue;
// If we have existing coordinates, check if this one exists // If we have existing coordinates, check if this one exists
if (existingCoordinates) { if (existingCoordinates) {
if (!existingCoordinates.has(identifier)) { if (!existingCoordinates.has(identifier)) {
@ -108,7 +111,10 @@ export function detectMissingEvents(
} else { } else {
// Without coordinate map, we can't detect missing NIP-33 events // Without coordinate map, we can't detect missing NIP-33 events
// This is a limitation when we only have hex IDs // This is a limitation when we only have hex IDs
console.debug('Cannot detect missing NIP-33 events without coordinate map:', identifier); console.debug(
"Cannot detect missing NIP-33 events without coordinate map:",
identifier,
);
} }
} }
} }
@ -123,20 +129,19 @@ export function detectMissingEvents(
*/ */
export function buildCoordinateMap(events: NDKEvent[]): Map<string, NDKEvent> { export function buildCoordinateMap(events: NDKEvent[]): Map<string, NDKEvent> {
const coordinateMap = new Map<string, NDKEvent>(); const coordinateMap = new Map<string, NDKEvent>();
for (const event of events) { for (const event of events) {
// Only process replaceable events (kinds 30000-39999) // Only process replaceable events (kinds 30000-39999)
if (event.kind && event.kind >= 30000 && event.kind < 40000) { if (event.kind && event.kind >= 30000 && event.kind < 40000) {
const dTag = event.tagValue('d'); const dTag = event.tagValue("d");
const author = event.pubkey; const author = event.pubkey;
if (dTag && author) { if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`; const coordinate = `${event.kind}:${author}:${dTag}`;
coordinateMap.set(coordinate, event); coordinateMap.set(coordinate, event);
} }
} }
} }
return coordinateMap; return coordinateMap;
} }

101
src/lib/utils/eventColors.ts

@ -13,11 +13,11 @@ const GOLDEN_RATIO = (1 + Math.sqrt(5)) / 2;
export function getEventKindColor(kind: number): string { export function getEventKindColor(kind: number): string {
// Use golden ratio for better distribution // Use golden ratio for better distribution
const hue = (kind * GOLDEN_RATIO * 360) % 360; const hue = (kind * GOLDEN_RATIO * 360) % 360;
// Use different saturation/lightness for better visibility // Use different saturation/lightness for better visibility
const saturation = 65 + (kind % 20); // 65-85% const saturation = 65 + (kind % 20); // 65-85%
const lightness = 55 + ((kind * 3) % 15); // 55-70% const lightness = 55 + ((kind * 3) % 15); // 55-70%
return `hsl(${Math.round(hue)}, ${saturation}%, ${lightness}%)`; return `hsl(${Math.round(hue)}, ${saturation}%, ${lightness}%)`;
} }
@ -28,55 +28,54 @@ export function getEventKindColor(kind: number): string {
*/ */
export function getEventKindName(kind: number): string { export function getEventKindName(kind: number): string {
const kindNames: Record<number, string> = { const kindNames: Record<number, string> = {
0: 'Metadata', 0: "Metadata",
1: 'Text Note', 1: "Text Note",
2: 'Recommend Relay', 2: "Recommend Relay",
3: 'Contact List', 3: "Contact List",
4: 'Encrypted DM', 4: "Encrypted DM",
5: 'Event Deletion', 5: "Event Deletion",
6: 'Repost', 6: "Repost",
7: 'Reaction', 7: "Reaction",
8: 'Badge Award', 8: "Badge Award",
16: 'Generic Repost', 16: "Generic Repost",
40: 'Channel Creation', 40: "Channel Creation",
41: 'Channel Metadata', 41: "Channel Metadata",
42: 'Channel Message', 42: "Channel Message",
43: 'Channel Hide Message', 43: "Channel Hide Message",
44: 'Channel Mute User', 44: "Channel Mute User",
1984: 'Reporting', 1984: "Reporting",
9734: 'Zap Request', 9734: "Zap Request",
9735: 'Zap', 9735: "Zap",
10000: 'Mute List', 10000: "Mute List",
10001: 'Pin List', 10001: "Pin List",
10002: 'Relay List', 10002: "Relay List",
22242: 'Client Authentication', 22242: "Client Authentication",
24133: 'Nostr Connect', 24133: "Nostr Connect",
27235: 'HTTP Auth', 27235: "HTTP Auth",
30000: 'Categorized People List', 30000: "Categorized People List",
30001: 'Categorized Bookmark List', 30001: "Categorized Bookmark List",
30008: 'Profile Badges', 30008: "Profile Badges",
30009: 'Badge Definition', 30009: "Badge Definition",
30017: 'Create or update a stall', 30017: "Create or update a stall",
30018: 'Create or update a product', 30018: "Create or update a product",
30023: 'Long-form Content', 30023: "Long-form Content",
30024: 'Draft Long-form Content', 30024: "Draft Long-form Content",
30040: 'Publication Index', 30040: "Publication Index",
30041: 'Publication Content', 30041: "Publication Content",
30078: 'Application-specific Data', 30078: "Application-specific Data",
30311: 'Live Event', 30311: "Live Event",
30402: 'Classified Listing', 30402: "Classified Listing",
30403: 'Draft Classified Listing', 30403: "Draft Classified Listing",
30617: 'Repository', 30617: "Repository",
30818: 'Wiki Page', 30818: "Wiki Page",
31922: 'Date-Based Calendar Event', 31922: "Date-Based Calendar Event",
31923: 'Time-Based Calendar Event', 31923: "Time-Based Calendar Event",
31924: 'Calendar', 31924: "Calendar",
31925: 'Calendar Event RSVP', 31925: "Calendar Event RSVP",
31989: 'Handler recommendation', 31989: "Handler recommendation",
31990: 'Handler information', 31990: "Handler information",
34550: 'Community Definition', 34550: "Community Definition",
}; };
return kindNames[kind] || `Kind ${kind}`; return kindNames[kind] || `Kind ${kind}`;
} }

152
src/lib/utils/eventDeduplication.ts

@ -1,69 +1,88 @@
import type { NDKEvent } from '@nostr-dev-kit/ndk'; import type { NDKEvent } from "@nostr-dev-kit/ndk";
/** /**
* Deduplicate content events by keeping only the most recent version * Deduplicate content events by keeping only the most recent version
* @param contentEventSets Array of event sets from different sources * @param contentEventSets Array of event sets from different sources
* @returns Map of coordinate to most recent event * @returns Map of coordinate to most recent event
*/ */
export function deduplicateContentEvents(contentEventSets: Set<NDKEvent>[]): Map<string, NDKEvent> { export function deduplicateContentEvents(
contentEventSets: Set<NDKEvent>[],
): Map<string, NDKEvent> {
const eventsByCoordinate = new Map<string, NDKEvent>(); const eventsByCoordinate = new Map<string, NDKEvent>();
// Track statistics for debugging // Track statistics for debugging
let totalEvents = 0; let totalEvents = 0;
let duplicateCoordinates = 0; let duplicateCoordinates = 0;
const duplicateDetails: Array<{ coordinate: string; count: number; events: string[] }> = []; const duplicateDetails: Array<
{ coordinate: string; count: number; events: string[] }
> = [];
contentEventSets.forEach((eventSet) => { contentEventSets.forEach((eventSet) => {
eventSet.forEach(event => { eventSet.forEach((event) => {
totalEvents++; totalEvents++;
const dTag = event.tagValue("d"); const dTag = event.tagValue("d");
const author = event.pubkey; const author = event.pubkey;
const kind = event.kind; const kind = event.kind;
if (dTag && author && kind) { if (dTag && author && kind) {
const coordinate = `${kind}:${author}:${dTag}`; const coordinate = `${kind}:${author}:${dTag}`;
const existing = eventsByCoordinate.get(coordinate); const existing = eventsByCoordinate.get(coordinate);
if (existing) { if (existing) {
// We found a duplicate coordinate // We found a duplicate coordinate
duplicateCoordinates++; duplicateCoordinates++;
// Track details for the first few duplicates // Track details for the first few duplicates
if (duplicateDetails.length < 5) { if (duplicateDetails.length < 5) {
const existingDetails = duplicateDetails.find(d => d.coordinate === coordinate); const existingDetails = duplicateDetails.find((d) =>
d.coordinate === coordinate
);
if (existingDetails) { if (existingDetails) {
existingDetails.count++; existingDetails.count++;
existingDetails.events.push(`${event.id} (created_at: ${event.created_at})`); existingDetails.events.push(
`${event.id} (created_at: ${event.created_at})`,
);
} else { } else {
duplicateDetails.push({ duplicateDetails.push({
coordinate, coordinate,
count: 2, // existing + current count: 2, // existing + current
events: [ events: [
`${existing.id} (created_at: ${existing.created_at})`, `${existing.id} (created_at: ${existing.created_at})`,
`${event.id} (created_at: ${event.created_at})` `${event.id} (created_at: ${event.created_at})`,
] ],
}); });
} }
} }
} }
// Keep the most recent event (highest created_at) // Keep the most recent event (highest created_at)
if (!existing || (event.created_at !== undefined && existing.created_at !== undefined && event.created_at > existing.created_at)) { if (
!existing ||
(event.created_at !== undefined &&
existing.created_at !== undefined &&
event.created_at > existing.created_at)
) {
eventsByCoordinate.set(coordinate, event); eventsByCoordinate.set(coordinate, event);
} }
} }
}); });
}); });
// Log deduplication results if any duplicates were found // Log deduplication results if any duplicates were found
if (duplicateCoordinates > 0) { if (duplicateCoordinates > 0) {
console.log(`[eventDeduplication] Found ${duplicateCoordinates} duplicate events out of ${totalEvents} total events`); console.log(
console.log(`[eventDeduplication] Reduced to ${eventsByCoordinate.size} unique coordinates`); `[eventDeduplication] Found ${duplicateCoordinates} duplicate events out of ${totalEvents} total events`,
);
console.log(
`[eventDeduplication] Reduced to ${eventsByCoordinate.size} unique coordinates`,
);
console.log(`[eventDeduplication] Duplicate details:`, duplicateDetails); console.log(`[eventDeduplication] Duplicate details:`, duplicateDetails);
} else if (totalEvents > 0) { } else if (totalEvents > 0) {
console.log(`[eventDeduplication] No duplicates found in ${totalEvents} events`); console.log(
`[eventDeduplication] No duplicates found in ${totalEvents} events`,
);
} }
return eventsByCoordinate; return eventsByCoordinate;
} }
@ -77,83 +96,95 @@ export function deduplicateContentEvents(contentEventSets: Set<NDKEvent>[]): Map
export function deduplicateAndCombineEvents( export function deduplicateAndCombineEvents(
nonPublicationEvents: NDKEvent[], nonPublicationEvents: NDKEvent[],
validIndexEvents: Set<NDKEvent>, validIndexEvents: Set<NDKEvent>,
contentEvents: Set<NDKEvent> contentEvents: Set<NDKEvent>,
): NDKEvent[] { ): NDKEvent[] {
// Track statistics for debugging // Track statistics for debugging
const initialCount = nonPublicationEvents.length + validIndexEvents.size + contentEvents.size; const initialCount = nonPublicationEvents.length + validIndexEvents.size +
contentEvents.size;
let replaceableEventsProcessed = 0; let replaceableEventsProcessed = 0;
let duplicateCoordinatesFound = 0; let duplicateCoordinatesFound = 0;
const duplicateDetails: Array<{ coordinate: string; count: number; events: string[] }> = []; const duplicateDetails: Array<
{ coordinate: string; count: number; events: string[] }
> = [];
// First, build coordinate map for replaceable events // First, build coordinate map for replaceable events
const coordinateMap = new Map<string, NDKEvent>(); const coordinateMap = new Map<string, NDKEvent>();
const allEventsToProcess = [ const allEventsToProcess = [
...nonPublicationEvents, // Non-publication events fetched earlier ...nonPublicationEvents, // Non-publication events fetched earlier
...Array.from(validIndexEvents), ...Array.from(validIndexEvents),
...Array.from(contentEvents) ...Array.from(contentEvents),
]; ];
// First pass: identify the most recent version of each replaceable event // First pass: identify the most recent version of each replaceable event
allEventsToProcess.forEach(event => { allEventsToProcess.forEach((event) => {
if (!event.id) return; if (!event.id) return;
// For replaceable events (30000-39999), track by coordinate // For replaceable events (30000-39999), track by coordinate
if (event.kind && event.kind >= 30000 && event.kind < 40000) { if (event.kind && event.kind >= 30000 && event.kind < 40000) {
replaceableEventsProcessed++; replaceableEventsProcessed++;
const dTag = event.tagValue("d"); const dTag = event.tagValue("d");
const author = event.pubkey; const author = event.pubkey;
if (dTag && author) { if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`; const coordinate = `${event.kind}:${author}:${dTag}`;
const existing = coordinateMap.get(coordinate); const existing = coordinateMap.get(coordinate);
if (existing) { if (existing) {
// We found a duplicate coordinate // We found a duplicate coordinate
duplicateCoordinatesFound++; duplicateCoordinatesFound++;
// Track details for the first few duplicates // Track details for the first few duplicates
if (duplicateDetails.length < 5) { if (duplicateDetails.length < 5) {
const existingDetails = duplicateDetails.find(d => d.coordinate === coordinate); const existingDetails = duplicateDetails.find((d) =>
d.coordinate === coordinate
);
if (existingDetails) { if (existingDetails) {
existingDetails.count++; existingDetails.count++;
existingDetails.events.push(`${event.id} (created_at: ${event.created_at})`); existingDetails.events.push(
`${event.id} (created_at: ${event.created_at})`,
);
} else { } else {
duplicateDetails.push({ duplicateDetails.push({
coordinate, coordinate,
count: 2, // existing + current count: 2, // existing + current
events: [ events: [
`${existing.id} (created_at: ${existing.created_at})`, `${existing.id} (created_at: ${existing.created_at})`,
`${event.id} (created_at: ${event.created_at})` `${event.id} (created_at: ${event.created_at})`,
] ],
}); });
} }
} }
} }
// Keep the most recent version // Keep the most recent version
if (!existing || (event.created_at !== undefined && existing.created_at !== undefined && event.created_at > existing.created_at)) { if (
!existing ||
(event.created_at !== undefined &&
existing.created_at !== undefined &&
event.created_at > existing.created_at)
) {
coordinateMap.set(coordinate, event); coordinateMap.set(coordinate, event);
} }
} }
} }
}); });
// Second pass: build final event map // Second pass: build final event map
const finalEventMap = new Map<string, NDKEvent>(); const finalEventMap = new Map<string, NDKEvent>();
const seenCoordinates = new Set<string>(); const seenCoordinates = new Set<string>();
allEventsToProcess.forEach(event => { allEventsToProcess.forEach((event) => {
if (!event.id) return; if (!event.id) return;
// For replaceable events, only add if it's the chosen version // For replaceable events, only add if it's the chosen version
if (event.kind && event.kind >= 30000 && event.kind < 40000) { if (event.kind && event.kind >= 30000 && event.kind < 40000) {
const dTag = event.tagValue("d"); const dTag = event.tagValue("d");
const author = event.pubkey; const author = event.pubkey;
if (dTag && author) { if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`; const coordinate = `${event.kind}:${author}:${dTag}`;
const chosenEvent = coordinateMap.get(coordinate); const chosenEvent = coordinateMap.get(coordinate);
// Only add this event if it's the chosen one for this coordinate // Only add this event if it's the chosen one for this coordinate
if (chosenEvent && chosenEvent.id === event.id) { if (chosenEvent && chosenEvent.id === event.id) {
if (!seenCoordinates.has(coordinate)) { if (!seenCoordinates.has(coordinate)) {
@ -164,23 +195,32 @@ export function deduplicateAndCombineEvents(
return; return;
} }
} }
// Non-replaceable events are added directly // Non-replaceable events are added directly
finalEventMap.set(event.id, event); finalEventMap.set(event.id, event);
}); });
const finalCount = finalEventMap.size; const finalCount = finalEventMap.size;
const reduction = initialCount - finalCount; const reduction = initialCount - finalCount;
// Log deduplication results if any duplicates were found // Log deduplication results if any duplicates were found
if (duplicateCoordinatesFound > 0) { if (duplicateCoordinatesFound > 0) {
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Found ${duplicateCoordinatesFound} duplicate coordinates out of ${replaceableEventsProcessed} replaceable events`); console.log(
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Reduced from ${initialCount} to ${finalCount} events (${reduction} removed)`); `[eventDeduplication] deduplicateAndCombineEvents: Found ${duplicateCoordinatesFound} duplicate coordinates out of ${replaceableEventsProcessed} replaceable events`,
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Duplicate details:`, duplicateDetails); );
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: Reduced from ${initialCount} to ${finalCount} events (${reduction} removed)`,
);
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: Duplicate details:`,
duplicateDetails,
);
} else if (replaceableEventsProcessed > 0) { } else if (replaceableEventsProcessed > 0) {
console.log(`[eventDeduplication] deduplicateAndCombineEvents: No duplicates found in ${replaceableEventsProcessed} replaceable events`); console.log(
`[eventDeduplication] deduplicateAndCombineEvents: No duplicates found in ${replaceableEventsProcessed} replaceable events`,
);
} }
return Array.from(finalEventMap.values()); return Array.from(finalEventMap.values());
} }
@ -202,13 +242,13 @@ export function getEventCoordinate(event: NDKEvent): string | null {
if (!isReplaceableEvent(event)) { if (!isReplaceableEvent(event)) {
return null; return null;
} }
const dTag = event.tagValue("d"); const dTag = event.tagValue("d");
const author = event.pubkey; const author = event.pubkey;
if (!dTag || !author) { if (!dTag || !author) {
return null; return null;
} }
return `${event.kind}:${author}:${dTag}`; return `${event.kind}:${author}:${dTag}`;
} }

72
src/lib/utils/event_input_utils.ts

@ -3,12 +3,12 @@ import { get } from "svelte/store";
import { ndkInstance } from "../ndk.ts"; import { ndkInstance } from "../ndk.ts";
import { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk"; import { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk";
import { EVENT_KINDS } from "./search_constants"; import { EVENT_KINDS } from "./search_constants";
import { import {
extractDocumentMetadata, extractDocumentMetadata,
extractSectionMetadata, extractSectionMetadata,
parseAsciiDocWithMetadata,
metadataToTags, metadataToTags,
removeMetadataFromContent parseAsciiDocWithMetadata,
removeMetadataFromContent,
} from "./asciidoc_metadata"; } from "./asciidoc_metadata";
// ========================= // =========================
@ -92,12 +92,14 @@ export function validate30040EventSet(content: string): {
const lines = content.split(/\r?\n/); const lines = content.split(/\r?\n/);
const { metadata } = extractDocumentMetadata(content); const { metadata } = extractDocumentMetadata(content);
const documentTitle = metadata.title; const documentTitle = metadata.title;
const nonEmptyLines = lines.filter(line => line.trim() !== "").map(line => line.trim()); const nonEmptyLines = lines.filter((line) => line.trim() !== "").map((line) =>
const isIndexCardFormat = documentTitle && line.trim()
nonEmptyLines.length === 2 && );
nonEmptyLines[0].startsWith("=") && const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
nonEmptyLines[1].toLowerCase() === "index card"; nonEmptyLines[1].toLowerCase() === "index card";
if (isIndexCardFormat) { if (isIndexCardFormat) {
return { valid: true }; return { valid: true };
} }
@ -125,18 +127,20 @@ export function validate30040EventSet(content: string): {
if (documentHeaderMatches && documentHeaderMatches.length > 1) { if (documentHeaderMatches && documentHeaderMatches.length > 1) {
return { return {
valid: false, valid: false,
reason: '30040 events must have exactly one document title ("="). Found multiple document headers.', reason:
'30040 events must have exactly one document title ("="). Found multiple document headers.',
}; };
} }
// Parse the content to check sections // Parse the content to check sections
const parsed = parseAsciiDocWithMetadata(content); const parsed = parseAsciiDocWithMetadata(content);
const hasSections = parsed.sections.length > 0; const hasSections = parsed.sections.length > 0;
if (!hasSections) { if (!hasSections) {
return { return {
valid: true, valid: true,
warning: "No section headers (==) found. This will create a 30040 index event and a single 30041 preamble section. Continue?", warning:
"No section headers (==) found. This will create a 30040 index event and a single 30041 preamble section. Continue?",
}; };
} }
@ -147,7 +151,9 @@ export function validate30040EventSet(content: string): {
} }
// Check for empty sections // Check for empty sections
const emptySections = parsed.sections.filter(section => section.content.trim() === ""); const emptySections = parsed.sections.filter((section) =>
section.content.trim() === ""
);
if (emptySections.length > 0) { if (emptySections.length > 0) {
return { return {
valid: true, valid: true,
@ -226,21 +232,23 @@ export function build30040EventSet(
// Check if this is an "index card" format (no sections, just title + "index card") // Check if this is an "index card" format (no sections, just title + "index card")
const lines = content.split(/\r?\n/); const lines = content.split(/\r?\n/);
const documentTitle = parsed.metadata.title; const documentTitle = parsed.metadata.title;
// For index card format, the content should be exactly: title + "index card" // For index card format, the content should be exactly: title + "index card"
const nonEmptyLines = lines.filter(line => line.trim() !== "").map(line => line.trim()); const nonEmptyLines = lines.filter((line) => line.trim() !== "").map((line) =>
const isIndexCardFormat = documentTitle && line.trim()
nonEmptyLines.length === 2 && );
nonEmptyLines[0].startsWith("=") && const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
nonEmptyLines[1].toLowerCase() === "index card"; nonEmptyLines[1].toLowerCase() === "index card";
if (isIndexCardFormat) { if (isIndexCardFormat) {
console.log("Creating index card format (no sections)"); console.log("Creating index card format (no sections)");
const indexDTag = normalizeDTagValue(documentTitle); const indexDTag = normalizeDTagValue(documentTitle);
// Convert document metadata to tags // Convert document metadata to tags
const metadataTags = metadataToTags(parsed.metadata); const metadataTags = metadataToTags(parsed.metadata);
const indexEvent: NDKEvent = new NDKEventClass(ndk, { const indexEvent: NDKEvent = new NDKEventClass(ndk, {
kind: 30040, kind: 30040,
content: "", content: "",
@ -253,7 +261,7 @@ export function build30040EventSet(
pubkey: baseEvent.pubkey, pubkey: baseEvent.pubkey,
created_at: baseEvent.created_at, created_at: baseEvent.created_at,
}); });
console.log("Final index event (index card):", indexEvent); console.log("Final index event (index card):", indexEvent);
console.log("=== build30040EventSet completed (index card) ==="); console.log("=== build30040EventSet completed (index card) ===");
return { indexEvent, sectionEvents: [] }; return { indexEvent, sectionEvents: [] };
@ -266,24 +274,24 @@ export function build30040EventSet(
// Create section events with their metadata // Create section events with their metadata
const sectionEvents: NDKEvent[] = parsed.sections.map((section, i) => { const sectionEvents: NDKEvent[] = parsed.sections.map((section, i) => {
const sectionDTag = `${indexDTag}-${normalizeDTagValue(section.title)}`; const sectionDTag = `${indexDTag}-${normalizeDTagValue(section.title)}`;
console.log(`Creating section ${i}:`, { console.log(`Creating section ${i}:`, {
title: section.title, title: section.title,
dTag: sectionDTag, dTag: sectionDTag,
content: section.content, content: section.content,
metadata: section.metadata metadata: section.metadata,
}); });
// Convert section metadata to tags // Convert section metadata to tags
const sectionMetadataTags = metadataToTags(section.metadata); const sectionMetadataTags = metadataToTags(section.metadata);
return new NDKEventClass(ndk, { return new NDKEventClass(ndk, {
kind: 30041, kind: 30041,
content: section.content, content: section.content,
tags: [ tags: [
...tags, ...tags,
...sectionMetadataTags, ...sectionMetadataTags,
["d", sectionDTag], ["d", sectionDTag],
["title", section.title] ["title", section.title],
], ],
pubkey: baseEvent.pubkey, pubkey: baseEvent.pubkey,
created_at: baseEvent.created_at, created_at: baseEvent.created_at,
@ -291,7 +299,7 @@ export function build30040EventSet(
}); });
// Create proper a tags with format: kind:pubkey:d-tag // Create proper a tags with format: kind:pubkey:d-tag
const aTags = sectionEvents.map(event => { const aTags = sectionEvents.map((event) => {
const dTag = event.tags.find(([k]) => k === "d")?.[1]; const dTag = event.tags.find(([k]) => k === "d")?.[1];
return ["a", `30041:${baseEvent.pubkey}:${dTag}`] as [string, string]; return ["a", `30041:${baseEvent.pubkey}:${dTag}`] as [string, string];
}); });

71
src/lib/utils/event_kind_utils.ts

@ -1,4 +1,4 @@
import type { EventKindConfig } from '$lib/stores/visualizationConfig'; import type { EventKindConfig } from "$lib/stores/visualizationConfig";
/** /**
* Validates an event kind input value. * Validates an event kind input value.
@ -7,29 +7,29 @@ import type { EventKindConfig } from '$lib/stores/visualizationConfig';
* @returns The validated kind number, or null if validation fails. * @returns The validated kind number, or null if validation fails.
*/ */
export function validateEventKind( export function validateEventKind(
value: string | number, value: string | number,
existingKinds: number[] existingKinds: number[],
): { kind: number | null; error: string } { ): { kind: number | null; error: string } {
// Convert to string for consistent handling // Convert to string for consistent handling
const strValue = String(value); const strValue = String(value);
if (strValue === null || strValue === undefined || strValue.trim() === '') { if (strValue === null || strValue === undefined || strValue.trim() === "") {
return { kind: null, error: '' }; return { kind: null, error: "" };
} }
const kind = parseInt(strValue.trim()); const kind = parseInt(strValue.trim());
if (isNaN(kind)) { if (isNaN(kind)) {
return { kind: null, error: 'Must be a number' }; return { kind: null, error: "Must be a number" };
} }
if (kind < 0) { if (kind < 0) {
return { kind: null, error: 'Must be non-negative' }; return { kind: null, error: "Must be non-negative" };
} }
if (existingKinds.includes(kind)) { if (existingKinds.includes(kind)) {
return { kind: null, error: 'Already added' }; return { kind: null, error: "Already added" };
} }
return { kind, error: '' }; return { kind, error: "" };
} }
/** /**
@ -44,20 +44,20 @@ export function handleAddEventKind(
newKind: string, newKind: string,
existingKinds: number[], existingKinds: number[],
addKindFunction: (kind: number) => void, addKindFunction: (kind: number) => void,
resetStateFunction: () => void resetStateFunction: () => void,
): { success: boolean; error: string } { ): { success: boolean; error: string } {
console.log('[handleAddEventKind] called with:', newKind); console.log("[handleAddEventKind] called with:", newKind);
const validation = validateEventKind(newKind, existingKinds); const validation = validateEventKind(newKind, existingKinds);
console.log('[handleAddEventKind] Validation result:', validation); console.log("[handleAddEventKind] Validation result:", validation);
if (validation.kind !== null) { if (validation.kind !== null) {
console.log('[handleAddEventKind] Adding event kind:', validation.kind); console.log("[handleAddEventKind] Adding event kind:", validation.kind);
addKindFunction(validation.kind); addKindFunction(validation.kind);
resetStateFunction(); resetStateFunction();
return { success: true, error: '' }; return { success: true, error: "" };
} else { } else {
console.log('[handleAddEventKind] Validation failed:', validation.error); console.log("[handleAddEventKind] Validation failed:", validation.error);
return { success: false, error: validation.error }; return { success: false, error: validation.error };
} }
} }
@ -71,11 +71,11 @@ export function handleAddEventKind(
export function handleEventKindKeydown( export function handleEventKindKeydown(
e: KeyboardEvent, e: KeyboardEvent,
onEnter: () => void, onEnter: () => void,
onEscape: () => void onEscape: () => void,
): void { ): void {
if (e.key === 'Enter') { if (e.key === "Enter") {
onEnter(); onEnter();
} else if (e.key === 'Escape') { } else if (e.key === "Escape") {
onEscape(); onEscape();
} }
} }
@ -87,12 +87,19 @@ export function handleEventKindKeydown(
*/ */
export function getEventKindDisplayName(kind: number): string { export function getEventKindDisplayName(kind: number): string {
switch (kind) { switch (kind) {
case 30040: return 'Publication Index'; case 30040:
case 30041: return 'Publication Content'; return "Publication Index";
case 30818: return 'Wiki'; case 30041:
case 1: return 'Text Note'; return "Publication Content";
case 0: return 'Metadata'; case 30818:
case 3: return 'Follow List'; return "Wiki";
default: return `Kind ${kind}`; case 1:
return "Text Note";
case 0:
return "Metadata";
case 3:
return "Follow List";
default:
return `Kind ${kind}`;
} }
} }

78
src/lib/utils/event_search.ts

@ -4,7 +4,7 @@ import { nip19 } from "nostr-tools";
import { NDKEvent } from "@nostr-dev-kit/ndk"; import { NDKEvent } from "@nostr-dev-kit/ndk";
import type { Filter } from "./search_types.ts"; import type { Filter } from "./search_types.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { wellKnownUrl, isValidNip05Address } from "./search_utils.ts"; import { isValidNip05Address, wellKnownUrl } from "./search_utils.ts";
import { TIMEOUTS, VALIDATION } from "./search_constants.ts"; import { TIMEOUTS, VALIDATION } from "./search_constants.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
@ -22,31 +22,39 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> {
// This ensures searches can proceed even if some relay types are not available // This ensures searches can proceed even if some relay types are not available
let attempts = 0; let attempts = 0;
const maxAttempts = 5; // Reduced since we'll use fallback relays const maxAttempts = 5; // Reduced since we'll use fallback relays
while (attempts < maxAttempts) { while (attempts < maxAttempts) {
// Check if we have any relays in the pool // Check if we have any relays in the pool
if (ndk.pool.relays.size > 0) { if (ndk.pool.relays.size > 0) {
console.log(`[Search] Found ${ndk.pool.relays.size} relays in NDK pool`); console.log(`[Search] Found ${ndk.pool.relays.size} relays in NDK pool`);
break; break;
} }
// Also check if we have any active relays // Also check if we have any active relays
const inboxRelays = get(activeInboxRelays); const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays); const outboxRelays = get(activeOutboxRelays);
if (inboxRelays.length > 0 || outboxRelays.length > 0) { if (inboxRelays.length > 0 || outboxRelays.length > 0) {
console.log(`[Search] Found active relays - inbox: ${inboxRelays.length}, outbox: ${outboxRelays.length}`); console.log(
`[Search] Found active relays - inbox: ${inboxRelays.length}, outbox: ${outboxRelays.length}`,
);
break; break;
} }
console.log(`[Search] Waiting for relays to be available (attempt ${attempts + 1}/${maxAttempts})`); console.log(
await new Promise(resolve => setTimeout(resolve, 500)); `[Search] Waiting for relays to be available (attempt ${
attempts + 1
}/${maxAttempts})`,
);
await new Promise((resolve) => setTimeout(resolve, 500));
attempts++; attempts++;
} }
// AI-NOTE: 2025-01-24 - Don't fail if no relays are available, let fetchEventWithFallback handle fallbacks // AI-NOTE: 2025-01-24 - Don't fail if no relays are available, let fetchEventWithFallback handle fallbacks
// The fetchEventWithFallback function will use all available relays including fallback relays // The fetchEventWithFallback function will use all available relays including fallback relays
if (ndk.pool.relays.size === 0) { if (ndk.pool.relays.size === 0) {
console.warn("[Search] No relays in pool, but proceeding with search - fallback relays will be used"); console.warn(
"[Search] No relays in pool, but proceeding with search - fallback relays will be used",
);
} }
// Clean the query and normalize to lowercase // Clean the query and normalize to lowercase
@ -89,50 +97,70 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> {
try { try {
const decoded = nip19.decode(cleanedQuery); const decoded = nip19.decode(cleanedQuery);
if (!decoded) throw new Error("Invalid identifier"); if (!decoded) throw new Error("Invalid identifier");
console.log(`[Search] Decoded identifier:`, { console.log(`[Search] Decoded identifier:`, {
type: decoded.type, type: decoded.type,
data: decoded.data, data: decoded.data,
query: cleanedQuery query: cleanedQuery,
}); });
switch (decoded.type) { switch (decoded.type) {
case "nevent": case "nevent":
console.log(`[Search] Processing nevent:`, { console.log(`[Search] Processing nevent:`, {
id: decoded.data.id, id: decoded.data.id,
kind: decoded.data.kind, kind: decoded.data.kind,
relays: decoded.data.relays relays: decoded.data.relays,
}); });
// Use the relays from the nevent if available // Use the relays from the nevent if available
if (decoded.data.relays && decoded.data.relays.length > 0) { if (decoded.data.relays && decoded.data.relays.length > 0) {
console.log(`[Search] Using relays from nevent:`, decoded.data.relays); console.log(
`[Search] Using relays from nevent:`,
decoded.data.relays,
);
// Try to fetch the event using the nevent's relays // Try to fetch the event using the nevent's relays
try { try {
// Create a temporary relay set for this search // Create a temporary relay set for this search
const neventRelaySet = NDKRelaySetFromNDK.fromRelayUrls(decoded.data.relays, ndk); const neventRelaySet = NDKRelaySetFromNDK.fromRelayUrls(
decoded.data.relays,
ndk,
);
if (neventRelaySet.relays.size > 0) { if (neventRelaySet.relays.size > 0) {
console.log(`[Search] Created relay set with ${neventRelaySet.relays.size} relays from nevent`); console.log(
`[Search] Created relay set with ${neventRelaySet.relays.size} relays from nevent`,
);
// Try to fetch the event using the nevent's relays // Try to fetch the event using the nevent's relays
const event = await ndk const event = await ndk
.fetchEvent({ ids: [decoded.data.id] }, undefined, neventRelaySet) .fetchEvent(
{ ids: [decoded.data.id] },
undefined,
neventRelaySet,
)
.withTimeout(TIMEOUTS.EVENT_FETCH); .withTimeout(TIMEOUTS.EVENT_FETCH);
if (event) { if (event) {
console.log(`[Search] Found event using nevent relays:`, event.id); console.log(
`[Search] Found event using nevent relays:`,
event.id,
);
return event; return event;
} else { } else {
console.log(`[Search] Event not found on nevent relays, trying default relays`); console.log(
`[Search] Event not found on nevent relays, trying default relays`,
);
} }
} }
} catch (error) { } catch (error) {
console.warn(`[Search] Error fetching from nevent relays:`, error); console.warn(
`[Search] Error fetching from nevent relays:`,
error,
);
} }
} }
filterOrId = decoded.data.id; filterOrId = decoded.data.id;
break; break;
case "note": case "note":

12
src/lib/utils/image_utils.ts

@ -11,14 +11,16 @@ export function generateDarkPastelColor(seed: string): string {
hash = ((hash << 5) - hash) + char; hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer hash = hash & hash; // Convert to 32-bit integer
} }
// Use the hash to generate lighter pastel colors // Use the hash to generate lighter pastel colors
// Keep values in the 120-200 range for better pastel effect // Keep values in the 120-200 range for better pastel effect
const r = Math.abs(hash) % 80 + 120; // 120-200 range const r = Math.abs(hash) % 80 + 120; // 120-200 range
const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range
const b = Math.abs(hash >> 16) % 80 + 120; // 120-200 range const b = Math.abs(hash >> 16) % 80 + 120; // 120-200 range
return `#${r.toString(16).padStart(2, '0')}${g.toString(16).padStart(2, '0')}${b.toString(16).padStart(2, '0')}`; return `#${r.toString(16).padStart(2, "0")}${
g.toString(16).padStart(2, "0")
}${b.toString(16).padStart(2, "0")}`;
} }
/** /**
@ -28,4 +30,4 @@ export function generateDarkPastelColor(seed: string): string {
*/ */
export function testColorGeneration(eventId: string): string { export function testColorGeneration(eventId: string): string {
return generateDarkPastelColor(eventId); return generateDarkPastelColor(eventId);
} }

76
src/lib/utils/kind24_utils.ts

@ -18,7 +18,7 @@ import { buildCompleteRelaySet } from "./relay_management";
*/ */
export async function getKind24RelaySet( export async function getKind24RelaySet(
senderPubkey: string, senderPubkey: string,
recipientPubkey: string recipientPubkey: string,
): Promise<string[]> { ): Promise<string[]> {
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
if (!ndk) { if (!ndk) {
@ -27,14 +27,16 @@ export async function getKind24RelaySet(
const senderPrefix = senderPubkey.slice(0, 8); const senderPrefix = senderPubkey.slice(0, 8);
const recipientPrefix = recipientPubkey.slice(0, 8); const recipientPrefix = recipientPubkey.slice(0, 8);
console.log(`[getKind24RelaySet] Getting relays for ${senderPrefix} -> ${recipientPrefix}`); console.log(
`[getKind24RelaySet] Getting relays for ${senderPrefix} -> ${recipientPrefix}`,
);
try { try {
// Fetch both users' complete relay sets using existing utilities // Fetch both users' complete relay sets using existing utilities
const [senderRelaySet, recipientRelaySet] = await Promise.all([ const [senderRelaySet, recipientRelaySet] = await Promise.all([
buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: senderPubkey })), buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: senderPubkey })),
buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: recipientPubkey })) buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: recipientPubkey })),
]); ]);
// Use sender's outbox relays and recipient's inbox relays // Use sender's outbox relays and recipient's inbox relays
@ -42,24 +44,33 @@ export async function getKind24RelaySet(
const recipientInboxRelays = recipientRelaySet.inboxRelays; const recipientInboxRelays = recipientRelaySet.inboxRelays;
// Prioritize common relays for better privacy // Prioritize common relays for better privacy
const commonRelays = senderOutboxRelays.filter(relay => const commonRelays = senderOutboxRelays.filter((relay) =>
recipientInboxRelays.includes(relay) recipientInboxRelays.includes(relay)
); );
const senderOnlyRelays = senderOutboxRelays.filter(relay => const senderOnlyRelays = senderOutboxRelays.filter((relay) =>
!recipientInboxRelays.includes(relay) !recipientInboxRelays.includes(relay)
); );
const recipientOnlyRelays = recipientInboxRelays.filter(relay => const recipientOnlyRelays = recipientInboxRelays.filter((relay) =>
!senderOutboxRelays.includes(relay) !senderOutboxRelays.includes(relay)
); );
// Prioritize: common relays first, then sender outbox, then recipient inbox // Prioritize: common relays first, then sender outbox, then recipient inbox
const finalRelays = [...commonRelays, ...senderOnlyRelays, ...recipientOnlyRelays]; const finalRelays = [
...commonRelays,
console.log(`[getKind24RelaySet] ${senderPrefix}->${recipientPrefix} - Common: ${commonRelays.length}, Sender-only: ${senderOnlyRelays.length}, Recipient-only: ${recipientOnlyRelays.length}, Total: ${finalRelays.length}`); ...senderOnlyRelays,
...recipientOnlyRelays,
];
console.log(
`[getKind24RelaySet] ${senderPrefix}->${recipientPrefix} - Common: ${commonRelays.length}, Sender-only: ${senderOnlyRelays.length}, Recipient-only: ${recipientOnlyRelays.length}, Total: ${finalRelays.length}`,
);
return finalRelays; return finalRelays;
} catch (error) { } catch (error) {
console.error(`[getKind24RelaySet] Error getting relay set for ${senderPrefix}->${recipientPrefix}:`, error); console.error(
`[getKind24RelaySet] Error getting relay set for ${senderPrefix}->${recipientPrefix}:`,
error,
);
throw error; throw error;
} }
} }
@ -74,8 +85,10 @@ export async function getKind24RelaySet(
export async function createKind24Reply( export async function createKind24Reply(
content: string, content: string,
recipientPubkey: string, recipientPubkey: string,
originalEvent?: NDKEvent originalEvent?: NDKEvent,
): Promise<{ success: boolean; eventId?: string; error?: string; relays?: string[] }> { ): Promise<
{ success: boolean; eventId?: string; error?: string; relays?: string[] }
> {
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
if (!ndk?.activeUser) { if (!ndk?.activeUser) {
return { success: false, error: "Not logged in" }; return { success: false, error: "Not logged in" };
@ -87,49 +100,56 @@ export async function createKind24Reply(
try { try {
// Get optimal relay set for this sender-recipient pair // Get optimal relay set for this sender-recipient pair
const targetRelays = await getKind24RelaySet(ndk.activeUser.pubkey, recipientPubkey); const targetRelays = await getKind24RelaySet(
ndk.activeUser.pubkey,
recipientPubkey,
);
if (targetRelays.length === 0) { if (targetRelays.length === 0) {
return { success: false, error: "No relays available for publishing" }; return { success: false, error: "No relays available for publishing" };
} }
// Build tags for the kind 24 event // Build tags for the kind 24 event
const tags: string[][] = [ const tags: string[][] = [
["p", recipientPubkey, targetRelays[0]] // Use first relay as primary ["p", recipientPubkey, targetRelays[0]], // Use first relay as primary
]; ];
// Add q tag if replying to an original event // Add q tag if replying to an original event
if (originalEvent) { if (originalEvent) {
tags.push(["q", originalEvent.id, targetRelays[0] || anonymousRelays[0]]); tags.push(["q", originalEvent.id, targetRelays[0] || anonymousRelays[0]]);
} }
// Create and sign the event // Create and sign the event
const { event: signedEventData } = await createSignedEvent( const { event: signedEventData } = await createSignedEvent(
content, content,
ndk.activeUser.pubkey, ndk.activeUser.pubkey,
24, 24,
tags tags,
); );
// Create NDKEvent and publish // Create NDKEvent and publish
const event = new NDKEvent(ndk, signedEventData); const event = new NDKEvent(ndk, signedEventData);
const relaySet = NDKRelaySet.fromRelayUrls(targetRelays, ndk); const relaySet = NDKRelaySet.fromRelayUrls(targetRelays, ndk);
const publishedToRelays = await event.publish(relaySet); const publishedToRelays = await event.publish(relaySet);
if (publishedToRelays.size > 0) { if (publishedToRelays.size > 0) {
console.log(`[createKind24Reply] Successfully published to ${publishedToRelays.size} relays`); console.log(
`[createKind24Reply] Successfully published to ${publishedToRelays.size} relays`,
);
return { success: true, eventId: event.id, relays: targetRelays }; return { success: true, eventId: event.id, relays: targetRelays };
} else { } else {
console.warn(`[createKind24Reply] Failed to publish to any relays`); console.warn(`[createKind24Reply] Failed to publish to any relays`);
return { success: false, error: "Failed to publish to any relays", relays: targetRelays }; return {
success: false,
error: "Failed to publish to any relays",
relays: targetRelays,
};
} }
} catch (error) { } catch (error) {
console.error("[createKind24Reply] Error creating kind 24 reply:", error); console.error("[createKind24Reply] Error creating kind 24 reply:", error);
return { return {
success: false, success: false,
error: error instanceof Error ? error.message : "Unknown error" error: error instanceof Error ? error.message : "Unknown error",
}; };
} }
} }

58
src/lib/utils/markup/MarkupInfo.md

@ -1,10 +1,14 @@
# Markup Support in Alexandria # Markup Support in Alexandria
Alexandria supports multiple markup formats for different use cases. Below is a summary of the supported tags and features for each parser, as well as the formats used for publications and wikis. Alexandria supports multiple markup formats for different use cases. Below is a
summary of the supported tags and features for each parser, as well as the
formats used for publications and wikis.
## Basic Markup Parser ## Basic Markup Parser
The **basic markup parser** follows the [Nostr best-practice guidelines](https://github.com/nostrability/nostrability/issues/146) and supports: The **basic markup parser** follows the
[Nostr best-practice guidelines](https://github.com/nostrability/nostrability/issues/146)
and supports:
- **Headers:** - **Headers:**
- ATX-style: `# H1` through `###### H6` - ATX-style: `# H1` through `###### H6`
@ -18,7 +22,8 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/
- **Links:** `[text](url)` - **Links:** `[text](url)`
- **Images:** `![alt](url)` - **Images:** `![alt](url)`
- **Hashtags:** `#hashtag` - **Hashtags:** `#hashtag`
- **Nostr identifiers:** npub, nprofile, nevent, naddr, note, with or without `nostr:` prefix (note is deprecated) - **Nostr identifiers:** npub, nprofile, nevent, naddr, note, with or without
`nostr:` prefix (note is deprecated)
- **Emoji shortcodes:** `:smile:` will render as 😄 - **Emoji shortcodes:** `:smile:` will render as 😄
## Advanced Markup Parser ## Advanced Markup Parser
@ -26,17 +31,25 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/
The **advanced markup parser** includes all features of the basic parser, plus: The **advanced markup parser** includes all features of the basic parser, plus:
- **Inline code:** `` `code` `` - **Inline code:** `` `code` ``
- **Syntax highlighting:** for code blocks in many programming languages (from [highlight.js](https://highlightjs.org/)) - **Syntax highlighting:** for code blocks in many programming languages (from
[highlight.js](https://highlightjs.org/))
- **Tables:** Pipe-delimited tables with or without headers - **Tables:** Pipe-delimited tables with or without headers
- **Footnotes:** `[^1]` or `[^Smith]`, which should appear where the footnote shall be placed, and will be displayed as unique, consecutive numbers - **Footnotes:** `[^1]` or `[^Smith]`, which should appear where the footnote
- **Footnote References:** `[^1]: footnote text` or `[^Smith]: Smith, Adam. 1984 "The Wiggle Mysteries`, which will be listed in order, at the bottom of the event, with back-reference links to the footnote, and text footnote labels appended shall be placed, and will be displayed as unique, consecutive numbers
- **Wikilinks:** `[[NIP-54]]` will render as a hyperlink and goes to [NIP-54](./events?d=nip-54) - **Footnote References:** `[^1]: footnote text` or
`[^Smith]: Smith, Adam. 1984 "The Wiggle Mysteries`, which will be listed in
order, at the bottom of the event, with back-reference links to the footnote,
and text footnote labels appended
- **Wikilinks:** `[[NIP-54]]` will render as a hyperlink and goes to
[NIP-54](./events?d=nip-54)
## Publications and Wikis ## Publications and Wikis
**Publications** and **wikis** in Alexandria use **AsciiDoc** as their primary markup language, not Markdown. **Publications** and **wikis** in Alexandria use **AsciiDoc** as their primary
markup language, not Markdown.
AsciiDoc supports a much broader set of formatting, semantic, and structural features, including: AsciiDoc supports a much broader set of formatting, semantic, and structural
features, including:
- Section and document structure - Section and document structure
- Advanced tables, callouts, admonitions - Advanced tables, callouts, admonitions
@ -48,7 +61,8 @@ AsciiDoc supports a much broader set of formatting, semantic, and structural fea
### Advanced Content Types ### Advanced Content Types
Alexandria supports rendering of advanced content types commonly used in academic, technical, and business documents: Alexandria supports rendering of advanced content types commonly used in
academic, technical, and business documents:
#### Math Rendering #### Math Rendering
@ -113,18 +127,26 @@ TikZ diagrams for mathematical illustrations:
### Rendering Features ### Rendering Features
- **Automatic Detection**: Content types are automatically detected based on syntax - **Automatic Detection**: Content types are automatically detected based on
- **Fallback Display**: If rendering fails, the original source code is displayed syntax
- **Fallback Display**: If rendering fails, the original source code is
displayed
- **Source Code**: Click "Show source" to view the original code - **Source Code**: Click "Show source" to view the original code
- **Responsive Design**: All rendered content is responsive and works on mobile devices - **Responsive Design**: All rendered content is responsive and works on mobile
devices
For more information on AsciiDoc, see the [AsciiDoc documentation](https://asciidoc.org/). For more information on AsciiDoc, see the
[AsciiDoc documentation](https://asciidoc.org/).
--- ---
**Note:** **Note:**
- The markdown parsers are primarily used for comments, issues, and other user-generated content. - The markdown parsers are primarily used for comments, issues, and other
- Publications and wikis are rendered using AsciiDoc for maximum expressiveness and compatibility. user-generated content.
- All URLs are sanitized to remove tracking parameters, and YouTube links are presented in a clean, privacy-friendly format. - Publications and wikis are rendered using AsciiDoc for maximum expressiveness
- [Here is a test markup file](/tests/integration/markupTestfile.md) that you can use to test out the parser and see how things should be formatted. and compatibility.
- All URLs are sanitized to remove tracking parameters, and YouTube links are
presented in a clean, privacy-friendly format.
- [Here is a test markup file](/tests/integration/markupTestfile.md) that you
can use to test out the parser and see how things should be formatted.

3
src/lib/utils/markup/advancedAsciidoctorPostProcessor.ts

@ -188,7 +188,8 @@ function processPlantUMLBlocks(html: string): string {
try { try {
const rawContent = decodeHTMLEntities(content); const rawContent = decodeHTMLEntities(content);
const encoded = plantumlEncoder.encode(rawContent); const encoded = plantumlEncoder.encode(rawContent);
const plantUMLUrl = `https://www.plantuml.com/plantuml/svg/${encoded}`; const plantUMLUrl =
`https://www.plantuml.com/plantuml/svg/${encoded}`;
return `<div class="plantuml-block my-4"> return `<div class="plantuml-block my-4">
<img src="${plantUMLUrl}" alt="PlantUML diagram" <img src="${plantUMLUrl}" alt="PlantUML diagram"
class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg" class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg"

44
src/lib/utils/markup/advancedMarkupParser.ts

@ -10,8 +10,9 @@ hljs.configure({
// Escapes HTML characters for safe display // Escapes HTML characters for safe display
function escapeHtml(text: string): string { function escapeHtml(text: string): string {
const div = const div = typeof document !== "undefined"
typeof document !== "undefined" ? document.createElement("div") : null; ? document.createElement("div")
: null;
if (div) { if (div) {
div.textContent = text; div.textContent = text;
return div.innerHTML; return div.innerHTML;
@ -100,8 +101,8 @@ function processTables(content: string): string {
}; };
// Check if second row is a delimiter row (only hyphens) // Check if second row is a delimiter row (only hyphens)
const hasHeader = const hasHeader = rows.length > 1 &&
rows.length > 1 && rows[1].trim().match(/^\|[-\s|]+\|$/); rows[1].trim().match(/^\|[-\s|]+\|$/);
// Extract header and body rows // Extract header and body rows
let headerCells: string[] = []; let headerCells: string[] = [];
@ -124,7 +125,8 @@ function processTables(content: string): string {
if (hasHeader) { if (hasHeader) {
html += "<thead>\n<tr>\n"; html += "<thead>\n<tr>\n";
headerCells.forEach((cell) => { headerCells.forEach((cell) => {
html += `<th class="py-2 px-4 text-left border-b-2 border-gray-200 dark:border-gray-700 font-semibold">${cell}</th>\n`; html +=
`<th class="py-2 px-4 text-left border-b-2 border-gray-200 dark:border-gray-700 font-semibold">${cell}</th>\n`;
}); });
html += "</tr>\n</thead>\n"; html += "</tr>\n</thead>\n";
} }
@ -135,7 +137,8 @@ function processTables(content: string): string {
const cells = processCells(row); const cells = processCells(row);
html += "<tr>\n"; html += "<tr>\n";
cells.forEach((cell) => { cells.forEach((cell) => {
html += `<td class="py-2 px-4 text-left border-b border-gray-200 dark:border-gray-700">${cell}</td>\n`; html +=
`<td class="py-2 px-4 text-left border-b border-gray-200 dark:border-gray-700">${cell}</td>\n`;
}); });
html += "</tr>\n"; html += "</tr>\n";
}); });
@ -197,7 +200,9 @@ function processFootnotes(content: string): string {
if (!referenceMap.has(id)) referenceMap.set(id, []); if (!referenceMap.has(id)) referenceMap.set(id, []);
referenceMap.get(id)!.push(refNum); referenceMap.get(id)!.push(refNum);
referenceOrder.push({ id, refNum, label: id }); referenceOrder.push({ id, refNum, label: id });
return `<sup><a href="#fn-${id}" id="fnref-${id}-${referenceMap.get(id)!.length}" class="text-primary-600 hover:underline">[${refNum}]</a></sup>`; return `<sup><a href="#fn-${id}" id="fnref-${id}-${
referenceMap.get(id)!.length
}" class="text-primary-600 hover:underline">[${refNum}]</a></sup>`;
}, },
); );
@ -216,12 +221,15 @@ function processFootnotes(content: string): string {
const backrefs = refs const backrefs = refs
.map( .map(
(num, i) => (num, i) =>
`<a href=\"#fnref-${id}-${i + 1}\" class=\"text-primary-600 hover:underline footnote-backref\">↩${num}</a>`, `<a href=\"#fnref-${id}-${
i + 1
}\" class=\"text-primary-600 hover:underline footnote-backref\">${num}</a>`,
) )
.join(" "); .join(" ");
// If label is not a number, show it after all backrefs // If label is not a number, show it after all backrefs
const labelSuffix = isNaN(Number(label)) ? ` ${label}` : ""; const labelSuffix = isNaN(Number(label)) ? ` ${label}` : "";
processedContent += `<li id=\"fn-${id}\"><span class=\"marker\">${text}</span> ${backrefs}${labelSuffix}</li>\n`; processedContent +=
`<li id=\"fn-${id}\"><span class=\"marker\">${text}</span> ${backrefs}${labelSuffix}</li>\n`;
} }
processedContent += "</ol>"; processedContent += "</ol>";
} }
@ -233,8 +241,6 @@ function processFootnotes(content: string): string {
} }
} }
/** /**
* Process code blocks by finding consecutive code lines and preserving their content * Process code blocks by finding consecutive code lines and preserving their content
*/ */
@ -357,13 +363,17 @@ function restoreCodeBlocks(text: string, blocks: Map<string, string>): string {
language, language,
ignoreIllegals: true, ignoreIllegals: true,
}).value; }).value;
html = `<pre class="code-block"><code class="hljs language-${language}">${highlighted}</code></pre>`; html =
`<pre class="code-block"><code class="hljs language-${language}">${highlighted}</code></pre>`;
} catch (e: unknown) { } catch (e: unknown) {
console.warn("Failed to highlight code block:", e); console.warn("Failed to highlight code block:", e);
html = `<pre class="code-block"><code class="hljs ${language ? `language-${language}` : ""}">${code}</code></pre>`; html = `<pre class="code-block"><code class="hljs ${
language ? `language-${language}` : ""
}">${code}</code></pre>`;
} }
} else { } else {
html = `<pre class="code-block"><code class="hljs">${code}</code></pre>`; html =
`<pre class="code-block"><code class="hljs">${code}</code></pre>`;
} }
result = result.replace(id, html); result = result.replace(id, html);
@ -672,8 +682,6 @@ function isLaTeXContent(content: string): boolean {
return latexPatterns.some((pattern) => pattern.test(trimmed)); return latexPatterns.some((pattern) => pattern.test(trimmed));
} }
/** /**
* Parse markup text with advanced formatting * Parse markup text with advanced formatting
*/ */
@ -711,6 +719,8 @@ export async function parseAdvancedmarkup(text: string): Promise<string> {
return processedText; return processedText;
} catch (e: unknown) { } catch (e: unknown) {
console.error("Error in parseAdvancedmarkup:", e); console.error("Error in parseAdvancedmarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`; return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
} }
} }

24
src/lib/utils/markup/asciidoctorPostProcessor.ts

@ -1,6 +1,9 @@
import { processImageWithReveal, processNostrIdentifiersInText, processWikilinks, processAsciiDocAnchors } from "./markupServices"; import {
processAsciiDocAnchors,
processImageWithReveal,
processNostrIdentifiersInText,
processWikilinks,
} from "./markupServices";
/** /**
* Processes nostr addresses in HTML content, but skips addresses that are * Processes nostr addresses in HTML content, but skips addresses that are
@ -41,8 +44,7 @@ async function processNostrAddresses(html: string): Promise<string> {
const processedMatch = await processNostrIdentifiersInText(fullMatch); const processedMatch = await processNostrIdentifiersInText(fullMatch);
// Replace the match in the HTML // Replace the match in the HTML
processedHtml = processedHtml = processedHtml.slice(0, matchIndex) +
processedHtml.slice(0, matchIndex) +
processedMatch + processedMatch +
processedHtml.slice(matchIndex + fullMatch.length); processedHtml.slice(matchIndex + fullMatch.length);
} }
@ -61,18 +63,18 @@ function processImageBlocks(html: string): string {
// Extract src and alt from img attributes // Extract src and alt from img attributes
const srcMatch = imgAttributes.match(/src="([^"]+)"/); const srcMatch = imgAttributes.match(/src="([^"]+)"/);
const altMatch = imgAttributes.match(/alt="([^"]*)"/); const altMatch = imgAttributes.match(/alt="([^"]*)"/);
const src = srcMatch ? srcMatch[1] : ''; const src = srcMatch ? srcMatch[1] : "";
const alt = altMatch ? altMatch[1] : ''; const alt = altMatch ? altMatch[1] : "";
const titleHtml = title ? `<div class="title">${title}</div>` : ''; const titleHtml = title ? `<div class="title">${title}</div>` : "";
return `<div class="imageblock"> return `<div class="imageblock">
<div class="content"> <div class="content">
${processImageWithReveal(src, alt)} ${processImageWithReveal(src, alt)}
</div> </div>
${titleHtml} ${titleHtml}
</div>`; </div>`;
} },
); );
} }

51
src/lib/utils/markup/basicMarkupParser.ts

@ -1,16 +1,16 @@
import * as emoji from "node-emoji"; import * as emoji from "node-emoji";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { import {
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processEmojiShortcodes,
processWebSocketUrls,
processHashtags,
processBasicTextFormatting, processBasicTextFormatting,
processBlockquotes, processBlockquotes,
processEmojiShortcodes,
processHashtags,
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processWebSocketUrls,
processWikilinks, processWikilinks,
stripTrackingParams stripTrackingParams,
} from "./markupServices"; } from "./markupServices";
/* Regex constants for basic markup parsing */ /* Regex constants for basic markup parsing */
@ -21,8 +21,6 @@ const MARKUP_IMAGE = /!\[([^\]]*)\]\(([^)]+)\)/g;
// AI-NOTE: 2025-01-24 - Added negative lookbehind (?<!\]\() to prevent processing URLs in markdown syntax // AI-NOTE: 2025-01-24 - Added negative lookbehind (?<!\]\() to prevent processing URLs in markdown syntax
const DIRECT_LINK = /(?<!["'=])(?<!\]\()(https?:\/\/[^\s<>"]+)(?!["'])/g; const DIRECT_LINK = /(?<!["'=])(?<!\]\()(https?:\/\/[^\s<>"]+)(?!["'])/g;
// Add this helper function near the top: // Add this helper function near the top:
function replaceAlexandriaNostrLinks(text: string): string { function replaceAlexandriaNostrLinks(text: string): string {
// Regex for Alexandria/localhost URLs // Regex for Alexandria/localhost URLs
@ -82,12 +80,6 @@ function replaceAlexandriaNostrLinks(text: string): string {
return text; return text;
} }
function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string { function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
function parseList( function parseList(
start: number, start: number,
@ -96,7 +88,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
): [string, number] { ): [string, number] {
let html = ""; let html = "";
let i = start; let i = start;
html += `<${type} class="${type === "ol" ? "list-decimal" : "list-disc"} ml-6 mb-2">`; html += `<${type} class="${
type === "ol" ? "list-decimal" : "list-disc"
} ml-6 mb-2">`;
while (i < lines.length) { while (i < lines.length) {
const line = lines[i]; const line = lines[i];
const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/); const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/);
@ -168,7 +162,9 @@ function processBasicFormatting(content: string): string {
processedText = processedText.replace( processedText = processedText.replace(
MARKUP_LINK, MARKUP_LINK,
(_match, text, url) => (_match, text, url) =>
`<a href="${stripTrackingParams(url)}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`, `<a href="${
stripTrackingParams(url)
}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
); );
// Process WebSocket URLs using shared services // Process WebSocket URLs using shared services
@ -181,7 +177,7 @@ function processBasicFormatting(content: string): string {
// Process text formatting using shared services // Process text formatting using shared services
processedText = processBasicTextFormatting(processedText); processedText = processBasicTextFormatting(processedText);
// Process hashtags using shared services // Process hashtags using shared services
processedText = processHashtags(processedText); processedText = processHashtags(processedText);
@ -220,12 +216,6 @@ function processBasicFormatting(content: string): string {
return processedText; return processedText;
} }
export async function parseBasicmarkup(text: string): Promise<string> { export async function parseBasicmarkup(text: string): Promise<string> {
if (!text) return ""; if (!text) return "";
@ -249,9 +239,10 @@ export async function parseBasicmarkup(text: string): Promise<string> {
// AI-NOTE: 2025-01-24 - Added img tag to skip wrapping to prevent image rendering issues // AI-NOTE: 2025-01-24 - Added img tag to skip wrapping to prevent image rendering issues
// Skip wrapping if para already contains block-level elements, math blocks, or images // Skip wrapping if para already contains block-level elements, math blocks, or images
if ( if (
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i.test( /(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i
para, .test(
) para,
)
) { ) {
return para; return para;
} }
@ -268,6 +259,8 @@ export async function parseBasicmarkup(text: string): Promise<string> {
return processedText; return processedText;
} catch (e: unknown) { } catch (e: unknown) {
console.error("Error in parseBasicmarkup:", e); console.error("Error in parseBasicmarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`; return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
} }
} }

56
src/lib/utils/markup/embeddedMarkupParser.ts

@ -1,18 +1,18 @@
import * as emoji from "node-emoji"; import * as emoji from "node-emoji";
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { import {
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processEmojiShortcodes,
processWebSocketUrls,
processHashtags,
processBasicTextFormatting, processBasicTextFormatting,
processBlockquotes, processBlockquotes,
processWikilinks, processEmojiShortcodes,
processHashtags,
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processNostrIdentifiersWithEmbeddedEvents, processNostrIdentifiersWithEmbeddedEvents,
stripTrackingParams processWebSocketUrls,
} from "./markupServices"; processWikilinks,
stripTrackingParams,
} from "./markupServices.ts";
/* Regex constants for basic markup parsing */ /* Regex constants for basic markup parsing */
@ -89,7 +89,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
): [string, number] { ): [string, number] {
let html = ""; let html = "";
let i = start; let i = start;
html += `<${type} class="${type === "ol" ? "list-decimal" : "list-disc"} ml-6 mb-2">`; html += `<${type} class="${
type === "ol" ? "list-decimal" : "list-disc"
} ml-6 mb-2">`;
while (i < lines.length) { while (i < lines.length) {
const line = lines[i]; const line = lines[i];
const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/); const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/);
@ -161,7 +163,9 @@ function processBasicFormatting(content: string): string {
processedText = processedText.replace( processedText = processedText.replace(
MARKUP_LINK, MARKUP_LINK,
(_match, text, url) => (_match, text, url) =>
`<a href="${stripTrackingParams(url)}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`, `<a href="${
stripTrackingParams(url)
}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
); );
// Process WebSocket URLs using shared services // Process WebSocket URLs using shared services
@ -174,7 +178,7 @@ function processBasicFormatting(content: string): string {
// Process text formatting using shared services // Process text formatting using shared services
processedText = processBasicTextFormatting(processedText); processedText = processBasicTextFormatting(processedText);
// Process hashtags using shared services // Process hashtags using shared services
processedText = processHashtags(processedText); processedText = processHashtags(processedText);
@ -218,7 +222,10 @@ function processBasicFormatting(content: string): string {
* AI-NOTE: 2025-01-24 - Enhanced markup parser that supports nested Nostr event embedding * AI-NOTE: 2025-01-24 - Enhanced markup parser that supports nested Nostr event embedding
* Up to 3 levels of nesting are supported, after which events are shown as links * Up to 3 levels of nesting are supported, after which events are shown as links
*/ */
export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0): Promise<string> { export async function parseEmbeddedMarkup(
text: string,
nestingLevel: number = 0,
): Promise<string> {
if (!text) return ""; if (!text) return "";
try { try {
@ -233,29 +240,30 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0
// Process paragraphs - split by double newlines and wrap in p tags // Process paragraphs - split by double newlines and wrap in p tags
// Skip wrapping if content already contains block-level elements // Skip wrapping if content already contains block-level elements
const blockLevelEls =
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i;
processedText = processedText processedText = processedText
.split(/\n\n+/) .split(/\n\n+/)
.map((para) => para.trim()) .map((para) => para.trim())
.filter((para) => para.length > 0) .filter((para) => para.length > 0)
.map((para) => { .map((para) => {
// AI-NOTE: 2025-01-24 - Added img tag to skip wrapping to prevent image rendering issues
// Skip wrapping if para already contains block-level elements, math blocks, or images // Skip wrapping if para already contains block-level elements, math blocks, or images
if ( if (blockLevelEls.test(para)) {
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i.test(
para,
)
) {
return para; return para;
} }
return `<p class="my-1">${para}</p>`; return `<p class="my-1">${para}</p>`;
}) })
.join("\n"); .join("\n");
// Process profile identifiers (npub, nprofile) first using the regular processor // Process profile identifiers (npub, nprofile) first using the regular processor
processedText = await processNostrIdentifiersInText(processedText); processedText = await processNostrIdentifiersInText(processedText);
// Then process event identifiers with embedded events (only event-related identifiers) // Then process event identifiers with embedded events (only event-related identifiers)
processedText = processNostrIdentifiersWithEmbeddedEvents(processedText, nestingLevel); processedText = processNostrIdentifiersWithEmbeddedEvents(
processedText,
nestingLevel,
);
// Replace wikilinks // Replace wikilinks
processedText = processWikilinks(processedText); processedText = processWikilinks(processedText);
@ -263,6 +271,8 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0
return processedText; return processedText;
} catch (e: unknown) { } catch (e: unknown) {
console.error("Error in parseEmbeddedMarkup:", e); console.error("Error in parseEmbeddedMarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`; return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
} }
} }

147
src/lib/utils/markup/markupServices.ts

@ -1,18 +1,25 @@
import { processNostrIdentifiers, NOSTR_PROFILE_REGEX } from "../nostrUtils.ts"; import {
createProfileLink,
getUserMetadata,
NOSTR_PROFILE_REGEX,
} from "../nostrUtils.ts";
import * as emoji from "node-emoji"; import * as emoji from "node-emoji";
// Media URL patterns // Media URL patterns
const IMAGE_EXTENSIONS = /\.(jpg|jpeg|gif|png|webp|svg)$/i; const IMAGE_EXTENSIONS = /\.(jpg|jpeg|gif|png|webp|svg)$/i;
const VIDEO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp4|webm|mov|avi)(?:[^\s<]*)?/i; const VIDEO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp4|webm|mov|avi)(?:[^\s<]*)?/i;
const AUDIO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp3|wav|ogg|m4a)(?:[^\s<]*)?/i; const AUDIO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp3|wav|ogg|m4a)(?:[^\s<]*)?/i;
const YOUTUBE_URL_REGEX = /https?:\/\/(?:www\.)?(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})(?:[^\s<]*)?/; const YOUTUBE_URL_REGEX =
/https?:\/\/(?:www\.)?(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})(?:[^\s<]*)?/;
/** /**
* Shared service for processing images with expand functionality * Shared service for processing images with expand functionality
*/ */
export function processImageWithReveal(src: string, alt: string = "Image"): string { export function processImageWithReveal(
src: string,
alt: string = "Image",
): string {
if (!src || !IMAGE_EXTENSIONS.test(src.split("?")[0])) { if (!src || !IMAGE_EXTENSIONS.test(src.split("?")[0])) {
return `<img src="${src}" alt="${alt}">`; return `<img src="${src}" alt="${alt}">`;
} }
@ -43,26 +50,32 @@ export function processImageWithReveal(src: string, alt: string = "Image"): stri
*/ */
export function processMediaUrl(url: string, alt?: string): string { export function processMediaUrl(url: string, alt?: string): string {
const clean = stripTrackingParams(url); const clean = stripTrackingParams(url);
if (YOUTUBE_URL_REGEX.test(clean)) { if (YOUTUBE_URL_REGEX.test(clean)) {
const videoId = extractYouTubeVideoId(clean); const videoId = extractYouTubeVideoId(clean);
if (videoId) { if (videoId) {
return `<iframe class="w-full aspect-video rounded-lg shadow-lg my-2" src="https://www.youtube-nocookie.com/embed/${videoId}" title="${alt || "YouTube video"}" frameborder="0" allow="fullscreen" sandbox="allow-scripts allow-same-origin allow-presentation"></iframe>`; return `<iframe class="w-full aspect-video rounded-lg shadow-lg my-2" src="https://www.youtube-nocookie.com/embed/${videoId}" title="${
alt || "YouTube video"
}" frameborder="0" allow="fullscreen" sandbox="allow-scripts allow-same-origin allow-presentation"></iframe>`;
} }
} }
if (VIDEO_URL_REGEX.test(clean)) { if (VIDEO_URL_REGEX.test(clean)) {
return `<video controls class="max-w-full rounded-lg shadow-lg my-2" preload="none" playsinline><source src="${clean}">${alt || "Video"}</video>`; return `<video controls class="max-w-full rounded-lg shadow-lg my-2" preload="none" playsinline><source src="${clean}">${
alt || "Video"
}</video>`;
} }
if (AUDIO_URL_REGEX.test(clean)) { if (AUDIO_URL_REGEX.test(clean)) {
return `<audio controls class="w-full my-2" preload="none"><source src="${clean}">${alt || "Audio"}</audio>`; return `<audio controls class="w-full my-2" preload="none"><source src="${clean}">${
alt || "Audio"
}</audio>`;
} }
if (IMAGE_EXTENSIONS.test(clean.split("?")[0])) { if (IMAGE_EXTENSIONS.test(clean.split("?")[0])) {
return processImageWithReveal(clean, alt || "Embedded media"); return processImageWithReveal(clean, alt || "Embedded media");
} }
// Default to clickable link // Default to clickable link
return `<a href="${clean}" target="_blank" rel="noopener noreferrer" class="text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300">${clean}</a>`; return `<a href="${clean}" target="_blank" rel="noopener noreferrer" class="text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300">${clean}</a>`;
} }
@ -70,40 +83,45 @@ export function processMediaUrl(url: string, alt?: string): string {
/** /**
* Shared service for processing nostr identifiers * Shared service for processing nostr identifiers
*/ */
export async function processNostrIdentifiersInText(text: string): Promise<string> { export async function processNostrIdentifiersInText(
text: string,
): Promise<string> {
let processedText = text; let processedText = text;
// Find all profile-related nostr addresses (only npub and nprofile) // Find all profile-related nostr addresses (only npub and nprofile)
const matches = Array.from(processedText.matchAll(NOSTR_PROFILE_REGEX)); const matches = Array.from(processedText.matchAll(NOSTR_PROFILE_REGEX));
// Process them in reverse order to avoid index shifting issues // Process them in reverse order to avoid index shifting issues
for (let i = matches.length - 1; i >= 0; i--) { for (let i = matches.length - 1; i >= 0; i--) {
const match = matches[i]; const match = matches[i];
const [fullMatch] = match; const [fullMatch] = match;
const matchIndex = match.index ?? 0; const matchIndex = match.index ?? 0;
// Skip if part of a URL // Skip if part of a URL
const before = processedText.slice(Math.max(0, matchIndex - 12), matchIndex); const before = processedText.slice(
Math.max(0, matchIndex - 12),
matchIndex,
);
if (/https?:\/\/$|www\.$/i.test(before)) { if (/https?:\/\/$|www\.$/i.test(before)) {
continue; continue;
} }
// Process the nostr identifier directly // Process the nostr identifier directly
let identifier = fullMatch; let identifier = fullMatch;
if (!identifier.startsWith("nostr:")) { if (!identifier.startsWith("nostr:")) {
identifier = "nostr:" + identifier; identifier = "nostr:" + identifier;
} }
// Get user metadata and create link // Get user metadata and create link
const { getUserMetadata, createProfileLink } = await import("../nostrUtils.ts");
const metadata = await getUserMetadata(identifier); const metadata = await getUserMetadata(identifier);
const displayText = metadata.displayName || metadata.name; const displayText = metadata.displayName || metadata.name;
const link = createProfileLink(identifier, displayText); const link = createProfileLink(identifier, displayText);
// Replace the match in the text // Replace the match in the text
processedText = processedText.slice(0, matchIndex) + link + processedText.slice(matchIndex + fullMatch.length); processedText = processedText.slice(0, matchIndex) + link +
processedText.slice(matchIndex + fullMatch.length);
} }
return processedText; return processedText;
} }
@ -112,37 +130,45 @@ export async function processNostrIdentifiersInText(text: string): Promise<strin
* Replaces nostr: links with embedded event placeholders * Replaces nostr: links with embedded event placeholders
* Only processes event-related identifiers (nevent, naddr, note), not profile identifiers (npub, nprofile) * Only processes event-related identifiers (nevent, naddr, note), not profile identifiers (npub, nprofile)
*/ */
export function processNostrIdentifiersWithEmbeddedEvents(text: string, nestingLevel: number = 0): string { export function processNostrIdentifiersWithEmbeddedEvents(
text: string,
nestingLevel: number = 0,
): string {
const eventPattern = /nostr:(note|nevent|naddr)[a-zA-Z0-9]{20,}/g; const eventPattern = /nostr:(note|nevent|naddr)[a-zA-Z0-9]{20,}/g;
let processedText = text; let processedText = text;
// Maximum nesting level allowed // Maximum nesting level allowed
const MAX_NESTING_LEVEL = 3; const MAX_NESTING_LEVEL = 3;
// Find all event-related nostr addresses // Find all event-related nostr addresses
const matches = Array.from(processedText.matchAll(eventPattern)); const matches = Array.from(processedText.matchAll(eventPattern));
// Process them in reverse order to avoid index shifting issues // Process them in reverse order to avoid index shifting issues
for (let i = matches.length - 1; i >= 0; i--) { for (let i = matches.length - 1; i >= 0; i--) {
const match = matches[i]; const match = matches[i];
const [fullMatch] = match; const [fullMatch] = match;
const matchIndex = match.index ?? 0; const matchIndex = match.index ?? 0;
let replacement: string; let replacement: string;
if (nestingLevel >= MAX_NESTING_LEVEL) { if (nestingLevel >= MAX_NESTING_LEVEL) {
// At max nesting level, just show the link // At max nesting level, just show the link
replacement = `<a href="/events?id=${fullMatch}" class="text-primary-600 dark:text-primary-500 hover:underline break-all">${fullMatch}</a>`; replacement =
`<a href="/events?id=${fullMatch}" class="text-primary-600 dark:text-primary-500 hover:underline break-all">${fullMatch}</a>`;
} else { } else {
// Create a placeholder for embedded event // Create a placeholder for embedded event
const componentId = `embedded-event-${Math.random().toString(36).substr(2, 9)}`; const componentId = `embedded-event-${
replacement = `<div class="embedded-event-placeholder" data-nostr-id="${fullMatch}" data-nesting-level="${nestingLevel}" id="${componentId}"></div>`; Math.random().toString(36).substr(2, 9)
}`;
replacement =
`<div class="embedded-event-placeholder" data-nostr-id="${fullMatch}" data-nesting-level="${nestingLevel}" id="${componentId}"></div>`;
} }
// Replace the match in the text // Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement + processedText.slice(matchIndex + fullMatch.length); processedText = processedText.slice(0, matchIndex) + replacement +
processedText.slice(matchIndex + fullMatch.length);
} }
return processedText; return processedText;
} }
@ -169,7 +195,10 @@ export function processWebSocketUrls(text: string): string {
*/ */
export function processHashtags(text: string): string { export function processHashtags(text: string): string {
const hashtagRegex = /(?<![^\s])#([a-zA-Z0-9_]+)(?!\w)/g; const hashtagRegex = /(?<![^\s])#([a-zA-Z0-9_]+)(?!\w)/g;
return text.replace(hashtagRegex, '<button class="text-primary-600 dark:text-primary-500 hover:underline cursor-pointer" onclick="window.location.href=\'/events?t=$1\'">#$1</button>'); return text.replace(
hashtagRegex,
'<button class="text-primary-600 dark:text-primary-500 hover:underline cursor-pointer" onclick="window.location.href=\'/events?t=$1\'">#$1</button>',
);
} }
/** /**
@ -177,20 +206,26 @@ export function processHashtags(text: string): string {
*/ */
export function processBasicTextFormatting(text: string): string { export function processBasicTextFormatting(text: string): string {
// Bold: **text** or *text* // Bold: **text** or *text*
text = text.replace(/(\*\*|[*])((?:[^*\n]|\*(?!\*))+)\1/g, "<strong>$2</strong>"); text = text.replace(
/(\*\*|[*])((?:[^*\n]|\*(?!\*))+)\1/g,
"<strong>$2</strong>",
);
// Italic: _text_ or __text__ // Italic: _text_ or __text__
text = text.replace(/\b(_[^_\n]+_|\b__[^_\n]+__)\b/g, (match) => { text = text.replace(/\b(_[^_\n]+_|\b__[^_\n]+__)\b/g, (match) => {
const text = match.replace(/^_+|_+$/g, ""); const text = match.replace(/^_+|_+$/g, "");
return `<em>${text}</em>`; return `<em>${text}</em>`;
}); });
// Strikethrough: ~~text~~ or ~text~ // Strikethrough: ~~text~~ or ~text~
text = text.replace(/~~([^~\n]+)~~|~([^~\n]+)~/g, (_match, doubleText, singleText) => { text = text.replace(
const text = doubleText || singleText; /~~([^~\n]+)~~|~([^~\n]+)~/g,
return `<del class="line-through">${text}</del>`; (_match, doubleText, singleText) => {
}); const text = doubleText || singleText;
return `<del class="line-through">${text}</del>`;
},
);
return text; return text;
} }
@ -203,7 +238,9 @@ export function processBlockquotes(text: string): string {
const lines = match.split("\n").map((line) => { const lines = match.split("\n").map((line) => {
return line.replace(/^[ \t]*>[ \t]?/, "").trim(); return line.replace(/^[ \t]*>[ \t]?/, "").trim();
}); });
return `<blockquote class="pl-4 border-l-4 border-gray-300 dark:border-gray-600 my-4">${lines.join("\n")}</blockquote>`; return `<blockquote class="pl-4 border-l-4 border-gray-300 dark:border-gray-600 my-4">${
lines.join("\n")
}</blockquote>`;
}); });
} }
@ -212,8 +249,16 @@ export function stripTrackingParams(url: string): string {
try { try {
const urlObj = new URL(url); const urlObj = new URL(url);
// Remove common tracking parameters // Remove common tracking parameters
const trackingParams = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_term', 'utm_content', 'fbclid', 'gclid']; const trackingParams = [
trackingParams.forEach(param => urlObj.searchParams.delete(param)); "utm_source",
"utm_medium",
"utm_campaign",
"utm_term",
"utm_content",
"fbclid",
"gclid",
];
trackingParams.forEach((param) => urlObj.searchParams.delete(param));
return urlObj.toString(); return urlObj.toString();
} catch { } catch {
return url; return url;
@ -221,7 +266,9 @@ export function stripTrackingParams(url: string): string {
} }
function extractYouTubeVideoId(url: string): string | null { function extractYouTubeVideoId(url: string): string | null {
const match = url.match(/(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})/); const match = url.match(
/(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})/,
);
return match ? match[1] : null; return match ? match[1] : null;
} }
@ -263,4 +310,4 @@ export function processAsciiDocAnchors(text: string): string {
const url = `/events?d=${normalized}`; const url = `/events?d=${normalized}`;
return `<a class="wikilink text-primary-600 dark:text-primary-500 hover:underline" data-dtag="${normalized}" data-url="${url}" href="${url}">${id}</a>`; return `<a class="wikilink text-primary-600 dark:text-primary-500 hover:underline" data-dtag="${normalized}" data-url="${url}" href="${url}">${id}</a>`;
}); });
} }

4
src/lib/utils/markup/tikzRenderer.ts

@ -44,7 +44,9 @@ function createBasicSVG(tikzCode: string): string {
</text> </text>
<foreignObject x="10" y="60" width="${width - 20}" height="${height - 70}"> <foreignObject x="10" y="60" width="${width - 20}" height="${height - 70}">
<div xmlns="http://www.w3.org/1999/xhtml" style="font-family: monospace; font-size: 10px; color: #666; overflow: hidden;"> <div xmlns="http://www.w3.org/1999/xhtml" style="font-family: monospace; font-size: 10px; color: #666; overflow: hidden;">
<pre style="margin: 0; white-space: pre-wrap; word-break: break-all;">${escapeHtml(tikzCode)}</pre> <pre style="margin: 0; white-space: pre-wrap; word-break: break-all;">${
escapeHtml(tikzCode)
}</pre>
</div> </div>
</foreignObject> </foreignObject>
</svg>`; </svg>`;

2
src/lib/utils/mime.ts

@ -104,7 +104,7 @@ export function getMimeTags(kind: number): [string, string][] {
MTag = ["M", `article/long-form/${replaceability}`]; MTag = ["M", `article/long-form/${replaceability}`];
break; break;
// Add more cases as needed... // Add more cases as needed...
} }
return [mTag, MTag]; return [mTag, MTag];

106
src/lib/utils/network_detection.ts

@ -4,18 +4,18 @@ import { deduplicateRelayUrls } from "./relay_management.ts";
* Network conditions for relay selection * Network conditions for relay selection
*/ */
export enum NetworkCondition { export enum NetworkCondition {
ONLINE = 'online', ONLINE = "online",
SLOW = 'slow', SLOW = "slow",
OFFLINE = 'offline' OFFLINE = "offline",
} }
/** /**
* Network connectivity test endpoints * Network connectivity test endpoints
*/ */
const NETWORK_ENDPOINTS = [ const NETWORK_ENDPOINTS = [
'https://www.google.com/favicon.ico', "https://www.google.com/favicon.ico",
'https://httpbin.org/status/200', "https://httpbin.org/status/200",
'https://api.github.com/zen' "https://api.github.com/zen",
]; ];
/** /**
@ -27,20 +27,23 @@ export async function isNetworkOnline(): Promise<boolean> {
try { try {
// Use a simple fetch without HEAD method to avoid CORS issues // Use a simple fetch without HEAD method to avoid CORS issues
await fetch(endpoint, { await fetch(endpoint, {
method: 'GET', method: "GET",
cache: 'no-cache', cache: "no-cache",
signal: AbortSignal.timeout(3000), signal: AbortSignal.timeout(3000),
mode: 'no-cors' // Use no-cors mode to avoid CORS issues mode: "no-cors", // Use no-cors mode to avoid CORS issues
}); });
// With no-cors mode, we can't check response.ok, so we assume success if no error // With no-cors mode, we can't check response.ok, so we assume success if no error
return true; return true;
} catch (error) { } catch (error) {
console.debug(`[network_detection.ts] Failed to reach ${endpoint}:`, error); console.debug(
`[network_detection.ts] Failed to reach ${endpoint}:`,
error,
);
continue; continue;
} }
} }
console.debug('[network_detection.ts] All network endpoints failed'); console.debug("[network_detection.ts] All network endpoints failed");
return false; return false;
} }
@ -50,25 +53,30 @@ export async function isNetworkOnline(): Promise<boolean> {
*/ */
export async function testNetworkSpeed(): Promise<number> { export async function testNetworkSpeed(): Promise<number> {
const startTime = performance.now(); const startTime = performance.now();
for (const endpoint of NETWORK_ENDPOINTS) { for (const endpoint of NETWORK_ENDPOINTS) {
try { try {
await fetch(endpoint, { await fetch(endpoint, {
method: 'GET', method: "GET",
cache: 'no-cache', cache: "no-cache",
signal: AbortSignal.timeout(5000), signal: AbortSignal.timeout(5000),
mode: 'no-cors' // Use no-cors mode to avoid CORS issues mode: "no-cors", // Use no-cors mode to avoid CORS issues
}); });
const endTime = performance.now(); const endTime = performance.now();
return endTime - startTime; return endTime - startTime;
} catch (error) { } catch (error) {
console.debug(`[network_detection.ts] Speed test failed for ${endpoint}:`, error); console.debug(
`[network_detection.ts] Speed test failed for ${endpoint}:`,
error,
);
continue; continue;
} }
} }
console.debug('[network_detection.ts] Network speed test failed for all endpoints'); console.debug(
"[network_detection.ts] Network speed test failed for all endpoints",
);
return Infinity; // Very slow if it fails return Infinity; // Very slow if it fails
} }
@ -78,21 +86,25 @@ export async function testNetworkSpeed(): Promise<number> {
*/ */
export async function detectNetworkCondition(): Promise<NetworkCondition> { export async function detectNetworkCondition(): Promise<NetworkCondition> {
const isOnline = await isNetworkOnline(); const isOnline = await isNetworkOnline();
if (!isOnline) { if (!isOnline) {
console.debug('[network_detection.ts] Network condition: OFFLINE'); console.debug("[network_detection.ts] Network condition: OFFLINE");
return NetworkCondition.OFFLINE; return NetworkCondition.OFFLINE;
} }
const speed = await testNetworkSpeed(); const speed = await testNetworkSpeed();
// Consider network slow if response time > 2000ms // Consider network slow if response time > 2000ms
if (speed > 2000) { if (speed > 2000) {
console.debug(`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`); console.debug(
`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`,
);
return NetworkCondition.SLOW; return NetworkCondition.SLOW;
} }
console.debug(`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`); console.debug(
`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`,
);
return NetworkCondition.ONLINE; return NetworkCondition.ONLINE;
} }
@ -108,39 +120,49 @@ export function getRelaySetForNetworkCondition(
networkCondition: NetworkCondition, networkCondition: NetworkCondition,
discoveredLocalRelays: string[], discoveredLocalRelays: string[],
lowbandwidthRelays: string[], lowbandwidthRelays: string[],
fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] } fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] },
): { inboxRelays: string[]; outboxRelays: string[] } { ): { inboxRelays: string[]; outboxRelays: string[] } {
switch (networkCondition) { switch (networkCondition) {
case NetworkCondition.OFFLINE: case NetworkCondition.OFFLINE:
// When offline, use local relays if available, otherwise rely on cache // When offline, use local relays if available, otherwise rely on cache
// This will be improved when IndexedDB local relay is implemented // This will be improved when IndexedDB local relay is implemented
if (discoveredLocalRelays.length > 0) { if (discoveredLocalRelays.length > 0) {
console.debug('[network_detection.ts] Using local relays (offline)'); console.debug("[network_detection.ts] Using local relays (offline)");
return { return {
inboxRelays: discoveredLocalRelays, inboxRelays: discoveredLocalRelays,
outboxRelays: discoveredLocalRelays outboxRelays: discoveredLocalRelays,
}; };
} else { } else {
console.debug('[network_detection.ts] No local relays available, will rely on cache (offline)'); console.debug(
"[network_detection.ts] No local relays available, will rely on cache (offline)",
);
return { return {
inboxRelays: [], inboxRelays: [],
outboxRelays: [] outboxRelays: [],
}; };
} }
case NetworkCondition.SLOW: { case NetworkCondition.SLOW: {
// Local relays + low bandwidth relays when slow (deduplicated) // Local relays + low bandwidth relays when slow (deduplicated)
console.debug('[network_detection.ts] Using local + low bandwidth relays (slow network)'); console.debug(
const slowInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); "[network_detection.ts] Using local + low bandwidth relays (slow network)",
const slowOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]); );
const slowInboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...lowbandwidthRelays,
]);
const slowOutboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...lowbandwidthRelays,
]);
return { return {
inboxRelays: slowInboxRelays, inboxRelays: slowInboxRelays,
outboxRelays: slowOutboxRelays outboxRelays: slowOutboxRelays,
}; };
} }
case NetworkCondition.ONLINE: case NetworkCondition.ONLINE:
default: default:
// Full relay set when online // Full relay set when online
console.debug('[network_detection.ts] Using full relay set (online)'); console.debug("[network_detection.ts] Using full relay set (online)");
return fullRelaySet; return fullRelaySet;
} }
} }
@ -161,14 +183,16 @@ export function startNetworkMonitoring(
const checkNetwork = async () => { const checkNetwork = async () => {
try { try {
const currentCondition = await detectNetworkCondition(); const currentCondition = await detectNetworkCondition();
if (currentCondition !== lastCondition) { if (currentCondition !== lastCondition) {
console.debug(`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`); console.debug(
`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`,
);
lastCondition = currentCondition; lastCondition = currentCondition;
onNetworkChange(currentCondition); onNetworkChange(currentCondition);
} }
} catch (error) { } catch (error) {
console.warn('[network_detection.ts] Network monitoring error:', error); console.warn("[network_detection.ts] Network monitoring error:", error);
} }
}; };
@ -185,4 +209,4 @@ export function startNetworkMonitoring(
intervalId = null; intervalId = null;
} }
}; };
} }

41
src/lib/utils/nostrEventService.ts

@ -1,11 +1,11 @@
import { nip19 } from "nostr-tools"; import { nip19 } from "nostr-tools";
import { getEventHash, signEvent, prefixNostrAddresses } from "./nostrUtils.ts"; import { getEventHash, prefixNostrAddresses, signEvent } from "./nostrUtils.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { goto } from "$app/navigation"; import { goto } from "$app/navigation";
import { EVENT_KINDS, TIME_CONSTANTS } from "./search_constants.ts"; import { EVENT_KINDS, TIME_CONSTANTS } from "./search_constants.ts";
import { EXPIRATION_DURATION } from "../consts.ts"; import { EXPIRATION_DURATION } from "../consts.ts";
import { ndkInstance } from "../ndk.ts"; import { ndkInstance } from "../ndk.ts";
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
export interface RootEventInfo { export interface RootEventInfo {
rootId: string; rootId: string;
@ -96,21 +96,21 @@ export function extractRootEventInfo(parent: NDKEvent): RootEventInfo {
rootInfo.rootId = rootE[1]; rootInfo.rootId = rootE[1];
rootInfo.rootRelay = getRelayString(rootE[2]); rootInfo.rootRelay = getRelayString(rootE[2]);
rootInfo.rootPubkey = getPubkeyString(rootE[3] || rootInfo.rootPubkey); rootInfo.rootPubkey = getPubkeyString(rootE[3] || rootInfo.rootPubkey);
rootInfo.rootKind = rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; rootInfo.rootKind;
} else if (rootA) { } else if (rootA) {
rootInfo.rootAddress = rootA[1]; rootInfo.rootAddress = rootA[1];
rootInfo.rootRelay = getRelayString(rootA[2]); rootInfo.rootRelay = getRelayString(rootA[2]);
rootInfo.rootPubkey = getPubkeyString( rootInfo.rootPubkey = getPubkeyString(
getTagValue(parent.tags, "P") || rootInfo.rootPubkey, getTagValue(parent.tags, "P") || rootInfo.rootPubkey,
); );
rootInfo.rootKind = rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; rootInfo.rootKind;
} else if (rootI) { } else if (rootI) {
rootInfo.rootIValue = rootI[1]; rootInfo.rootIValue = rootI[1];
rootInfo.rootIRelay = getRelayString(rootI[2]); rootInfo.rootIRelay = getRelayString(rootI[2]);
rootInfo.rootKind = rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind; rootInfo.rootKind;
} }
return rootInfo; return rootInfo;
@ -224,7 +224,8 @@ export function buildReplyTags(
if (isParentReplaceable) { if (isParentReplaceable) {
const dTag = getTagValue(parent.tags || [], "d"); const dTag = getTagValue(parent.tags || [], "d");
if (dTag) { if (dTag) {
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; const parentAddress =
`${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
addTags(tags, createTag("a", parentAddress, "", "root")); addTags(tags, createTag("a", parentAddress, "", "root"));
} }
} }
@ -233,7 +234,8 @@ export function buildReplyTags(
if (isParentReplaceable) { if (isParentReplaceable) {
const dTag = getTagValue(parent.tags || [], "d"); const dTag = getTagValue(parent.tags || [], "d");
if (dTag) { if (dTag) {
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`; const parentAddress =
`${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
if (isReplyToComment) { if (isReplyToComment) {
// Root scope (uppercase) - use the original article // Root scope (uppercase) - use the original article
@ -317,14 +319,16 @@ export async function createSignedEvent(
pubkey: string, pubkey: string,
kind: number, kind: number,
tags: string[][], tags: string[][],
// deno-lint-ignore no-explicit-any // deno-lint-ignore no-explicit-any
): Promise<{ id: string; sig: string; event: any }> { ): Promise<{ id: string; sig: string; event: any }> {
const prefixedContent = prefixNostrAddresses(content); const prefixedContent = prefixNostrAddresses(content);
// Add expiration tag for kind 24 events (NIP-40) // Add expiration tag for kind 24 events (NIP-40)
const finalTags = [...tags]; const finalTags = [...tags];
if (kind === 24) { if (kind === 24) {
const expirationTimestamp = Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR) + EXPIRATION_DURATION; const expirationTimestamp =
Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR) +
EXPIRATION_DURATION;
finalTags.push(["expiration", String(expirationTimestamp)]); finalTags.push(["expiration", String(expirationTimestamp)]);
} }
@ -344,7 +348,10 @@ export async function createSignedEvent(
}; };
let sig, id; let sig, id;
if (typeof window !== "undefined" && globalThis.nostr && globalThis.nostr.signEvent) { if (
typeof window !== "undefined" && globalThis.nostr &&
globalThis.nostr.signEvent
) {
const signed = await globalThis.nostr.signEvent(eventToSign); const signed = await globalThis.nostr.signEvent(eventToSign);
sig = signed.sig as string; sig = signed.sig as string;
id = "id" in signed ? (signed.id as string) : getEventHash(eventToSign); id = "id" in signed ? (signed.id as string) : getEventHash(eventToSign);
@ -387,7 +394,7 @@ export async function publishEvent(
try { try {
// If event is a plain object, create an NDKEvent from it // If event is a plain object, create an NDKEvent from it
let ndkEvent: NDKEvent; let ndkEvent: NDKEvent;
if (event.publish && typeof event.publish === 'function') { if (event.publish && typeof event.publish === "function") {
// It's already an NDKEvent // It's already an NDKEvent
ndkEvent = event; ndkEvent = event;
} else { } else {
@ -397,15 +404,15 @@ export async function publishEvent(
// Publish with timeout // Publish with timeout
await ndkEvent.publish(relaySet).withTimeout(5000); await ndkEvent.publish(relaySet).withTimeout(5000);
// For now, assume all relays were successful // For now, assume all relays were successful
// In a more sophisticated implementation, you'd track individual relay responses // In a more sophisticated implementation, you'd track individual relay responses
successfulRelays.push(...relayUrls); successfulRelays.push(...relayUrls);
console.debug("[nostrEventService] Published event successfully:", { console.debug("[nostrEventService] Published event successfully:", {
eventId: ndkEvent.id, eventId: ndkEvent.id,
relayCount: relayUrls.length, relayCount: relayUrls.length,
successfulRelays successfulRelays,
}); });
} catch (error) { } catch (error) {
console.error("[nostrEventService] Failed to publish event:", error); console.error("[nostrEventService] Failed to publish event:", error);

124
src/lib/utils/nostrUtils.ts

@ -5,7 +5,12 @@ import { npubCache } from "./npubCache.ts";
import NDK, { NDKEvent, NDKRelaySet, NDKUser } from "@nostr-dev-kit/ndk"; import NDK, { NDKEvent, NDKRelaySet, NDKUser } from "@nostr-dev-kit/ndk";
import type { NDKKind, NostrEvent } from "@nostr-dev-kit/ndk"; import type { NDKKind, NostrEvent } from "@nostr-dev-kit/ndk";
import type { Filter } from "./search_types.ts"; import type { Filter } from "./search_types.ts";
import { communityRelays, secondaryRelays, searchRelays, anonymousRelays } from "../consts.ts"; import {
anonymousRelays,
communityRelays,
searchRelays,
secondaryRelays,
} from "../consts.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
import { NDKRelaySet as NDKRelaySetFromNDK } from "@nostr-dev-kit/ndk"; import { NDKRelaySet as NDKRelaySetFromNDK } from "@nostr-dev-kit/ndk";
import { sha256 } from "@noble/hashes/sha2.js"; import { sha256 } from "@noble/hashes/sha2.js";
@ -55,7 +60,7 @@ function escapeHtml(text: string): string {
* Escape regex special characters * Escape regex special characters
*/ */
function escapeRegExp(string: string): string { function escapeRegExp(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
} }
/** /**
@ -68,7 +73,12 @@ export async function getUserMetadata(
// Remove nostr: prefix if present // Remove nostr: prefix if present
const cleanId = identifier.replace(/^nostr:/, ""); const cleanId = identifier.replace(/^nostr:/, "");
console.log("getUserMetadata called with identifier:", identifier, "force:", force); console.log(
"getUserMetadata called with identifier:",
identifier,
"force:",
force,
);
if (!force && npubCache.has(cleanId)) { if (!force && npubCache.has(cleanId)) {
const cached = npubCache.get(cleanId)!; const cached = npubCache.get(cleanId)!;
@ -100,7 +110,10 @@ export async function getUserMetadata(
} else if (decoded.type === "nprofile") { } else if (decoded.type === "nprofile") {
pubkey = decoded.data.pubkey; pubkey = decoded.data.pubkey;
} else { } else {
console.warn("getUserMetadata: Unsupported identifier type:", decoded.type); console.warn(
"getUserMetadata: Unsupported identifier type:",
decoded.type,
);
npubCache.set(cleanId, fallback); npubCache.set(cleanId, fallback);
return fallback; return fallback;
} }
@ -111,13 +124,12 @@ export async function getUserMetadata(
kinds: [0], kinds: [0],
authors: [pubkey], authors: [pubkey],
}); });
console.log("getUserMetadata: Profile event found:", profileEvent); console.log("getUserMetadata: Profile event found:", profileEvent);
const profile = const profile = profileEvent && profileEvent.content
profileEvent && profileEvent.content ? JSON.parse(profileEvent.content)
? JSON.parse(profileEvent.content) : null;
: null;
console.log("getUserMetadata: Parsed profile:", profile); console.log("getUserMetadata: Parsed profile:", profile);
@ -199,7 +211,7 @@ export async function createProfileLinkWithVerification(
}; };
const allRelays = [ const allRelays = [
...searchRelays, // Include search relays for profile searches ...searchRelays, // Include search relays for profile searches
...communityRelays, ...communityRelays,
...userRelays, ...userRelays,
...secondaryRelays, ...secondaryRelays,
@ -223,8 +235,7 @@ export async function createProfileLinkWithVerification(
const defaultText = `${cleanId.slice(0, 8)}...${cleanId.slice(-4)}`; const defaultText = `${cleanId.slice(0, 8)}...${cleanId.slice(-4)}`;
const escapedText = escapeHtml(displayText || defaultText); const escapedText = escapeHtml(displayText || defaultText);
const displayIdentifier = const displayIdentifier = profile?.displayName ??
profile?.displayName ??
profile?.display_name ?? profile?.display_name ??
profile?.name ?? profile?.name ??
escapedText; escapedText;
@ -287,7 +298,10 @@ export async function processNostrIdentifiers(
const displayText = metadata.displayName || metadata.name; const displayText = metadata.displayName || metadata.name;
const link = createProfileLink(identifier, displayText); const link = createProfileLink(identifier, displayText);
// Replace all occurrences of this exact match // Replace all occurrences of this exact match
processedContent = processedContent.replace(new RegExp(escapeRegExp(fullMatch), 'g'), link); processedContent = processedContent.replace(
new RegExp(escapeRegExp(fullMatch), "g"),
link,
);
} }
// Process notes (nevent, note, naddr) // Process notes (nevent, note, naddr)
@ -304,7 +318,10 @@ export async function processNostrIdentifiers(
} }
const link = createNoteLink(identifier); const link = createNoteLink(identifier);
// Replace all occurrences of this exact match // Replace all occurrences of this exact match
processedContent = processedContent.replace(new RegExp(escapeRegExp(fullMatch), 'g'), link); processedContent = processedContent.replace(
new RegExp(escapeRegExp(fullMatch), "g"),
link,
);
} }
return processedContent; return processedContent;
@ -409,7 +426,7 @@ export function withTimeout<T>(
return Promise.race([ return Promise.race([
promise, promise,
new Promise<T>((_, reject) => new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error("Timeout")), timeoutMs), setTimeout(() => reject(new Error("Timeout")), timeoutMs)
), ),
]); ]);
} }
@ -420,7 +437,7 @@ export function withTimeout<T>(
return Promise.race([ return Promise.race([
promise, promise,
new Promise<T>((_, reject) => new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error("Timeout")), timeoutMs), setTimeout(() => reject(new Error("Timeout")), timeoutMs)
), ),
]); ]);
} }
@ -455,40 +472,54 @@ export async function fetchEventWithFallback(
): Promise<NDKEvent | null> { ): Promise<NDKEvent | null> {
// AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive event discovery // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive event discovery
// This ensures we don't miss events that might be on any available relay // This ensures we don't miss events that might be on any available relay
// Get all relays from NDK pool first (most comprehensive) // Get all relays from NDK pool first (most comprehensive)
const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) => r.url); const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) =>
r.url
);
const inboxRelays = get(activeInboxRelays); const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays); const outboxRelays = get(activeOutboxRelays);
// Combine all available relays, prioritizing pool relays // Combine all available relays, prioritizing pool relays
let allRelays = [...new Set([...poolRelays, ...inboxRelays, ...outboxRelays])]; let allRelays = [
...new Set([...poolRelays, ...inboxRelays, ...outboxRelays]),
];
console.log("fetchEventWithFallback: Using pool relays:", poolRelays); console.log("fetchEventWithFallback: Using pool relays:", poolRelays);
console.log("fetchEventWithFallback: Using inbox relays:", inboxRelays); console.log("fetchEventWithFallback: Using inbox relays:", inboxRelays);
console.log("fetchEventWithFallback: Using outbox relays:", outboxRelays); console.log("fetchEventWithFallback: Using outbox relays:", outboxRelays);
console.log("fetchEventWithFallback: Total unique relays:", allRelays.length); console.log("fetchEventWithFallback: Total unique relays:", allRelays.length);
// Check if we have any relays available // Check if we have any relays available
if (allRelays.length === 0) { if (allRelays.length === 0) {
console.warn("fetchEventWithFallback: No relays available for event fetch, using fallback relays"); console.warn(
"fetchEventWithFallback: No relays available for event fetch, using fallback relays",
);
// Use fallback relays when no relays are available // Use fallback relays when no relays are available
allRelays = [...secondaryRelays, ...searchRelays, ...anonymousRelays]; allRelays = [...secondaryRelays, ...searchRelays, ...anonymousRelays];
console.log("fetchEventWithFallback: Using fallback relays:", allRelays); console.log("fetchEventWithFallback: Using fallback relays:", allRelays);
} }
// Create relay set from all available relays // Create relay set from all available relays
const relaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk); const relaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
try { try {
if (relaySet.relays.size === 0) { if (relaySet.relays.size === 0) {
console.warn("fetchEventWithFallback: No relays in relay set for event fetch"); console.warn(
"fetchEventWithFallback: No relays in relay set for event fetch",
);
return null; return null;
} }
console.log("fetchEventWithFallback: Relay set size:", relaySet.relays.size); console.log(
"fetchEventWithFallback: Relay set size:",
relaySet.relays.size,
);
console.log("fetchEventWithFallback: Filter:", filterOrId); console.log("fetchEventWithFallback: Filter:", filterOrId);
console.log("fetchEventWithFallback: Relay URLs:", Array.from(relaySet.relays).map((r) => r.url)); console.log(
"fetchEventWithFallback: Relay URLs:",
Array.from(relaySet.relays).map((r) => r.url),
);
let found: NDKEvent | null = null; let found: NDKEvent | null = null;
@ -500,8 +531,9 @@ export async function fetchEventWithFallback(
.fetchEvent({ ids: [filterOrId] }, undefined, relaySet) .fetchEvent({ ids: [filterOrId] }, undefined, relaySet)
.withTimeout(timeoutMs); .withTimeout(timeoutMs);
} else { } else {
const filter = const filter = typeof filterOrId === "string"
typeof filterOrId === "string" ? { ids: [filterOrId] } : filterOrId; ? { ids: [filterOrId] }
: filterOrId;
const results = await ndk const results = await ndk
.fetchEvents(filter, undefined, relaySet) .fetchEvents(filter, undefined, relaySet)
.withTimeout(timeoutMs); .withTimeout(timeoutMs);
@ -512,7 +544,9 @@ export async function fetchEventWithFallback(
if (!found) { if (!found) {
const timeoutSeconds = timeoutMs / 1000; const timeoutSeconds = timeoutMs / 1000;
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", "); const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(
", ",
);
console.warn( console.warn(
`fetchEventWithFallback: Event not found after ${timeoutSeconds}s timeout. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`, `fetchEventWithFallback: Event not found after ${timeoutSeconds}s timeout. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`,
); );
@ -523,14 +557,19 @@ export async function fetchEventWithFallback(
// Always wrap as NDKEvent // Always wrap as NDKEvent
return found instanceof NDKEvent ? found : new NDKEvent(ndk, found); return found instanceof NDKEvent ? found : new NDKEvent(ndk, found);
} catch (err) { } catch (err) {
if (err instanceof Error && err.message === 'Timeout') { if (err instanceof Error && err.message === "Timeout") {
const timeoutSeconds = timeoutMs / 1000; const timeoutSeconds = timeoutMs / 1000;
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", "); const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(
", ",
);
console.warn( console.warn(
`fetchEventWithFallback: Event fetch timed out after ${timeoutSeconds}s. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`, `fetchEventWithFallback: Event fetch timed out after ${timeoutSeconds}s. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`,
); );
} else { } else {
console.error("fetchEventWithFallback: Error in fetchEventWithFallback:", err); console.error(
"fetchEventWithFallback: Error in fetchEventWithFallback:",
err,
);
} }
return null; return null;
} }
@ -545,20 +584,22 @@ export function toNpub(pubkey: string | undefined): string | null {
try { try {
// If it's already an npub, return it // If it's already an npub, return it
if (pubkey.startsWith("npub")) return pubkey; if (pubkey.startsWith("npub")) return pubkey;
// If it's a hex pubkey, convert to npub // If it's a hex pubkey, convert to npub
if (new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(pubkey)) { if (new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(pubkey)) {
return nip19.npubEncode(pubkey); return nip19.npubEncode(pubkey);
} }
// If it's an nprofile, decode and extract npub // If it's an nprofile, decode and extract npub
if (pubkey.startsWith("nprofile")) { if (pubkey.startsWith("nprofile")) {
const decoded = nip19.decode(pubkey); const decoded = nip19.decode(pubkey);
if (decoded.type === 'nprofile') { if (decoded.type === "nprofile") {
return decoded.data.pubkey ? nip19.npubEncode(decoded.data.pubkey) : null; return decoded.data.pubkey
? nip19.npubEncode(decoded.data.pubkey)
: null;
} }
} }
return null; return null;
} catch { } catch {
return null; return null;
@ -573,7 +614,10 @@ export function createRelaySetFromUrls(relayUrls: string[], ndk: NDK) {
return NDKRelaySetFromNDK.fromRelayUrls(relayUrls, ndk); return NDKRelaySetFromNDK.fromRelayUrls(relayUrls, ndk);
} }
export function createNDKEvent(ndk: NDK, rawEvent: NDKEvent | NostrEvent | undefined) { export function createNDKEvent(
ndk: NDK,
rawEvent: NDKEvent | NostrEvent | undefined,
) {
return new NDKEvent(ndk, rawEvent); return new NDKEvent(ndk, rawEvent);
} }

40
src/lib/utils/nostr_identifiers.ts

@ -1,4 +1,4 @@
import { VALIDATION } from './search_constants'; import { VALIDATION } from "./search_constants";
/** /**
* Nostr identifier types * Nostr identifier types
@ -22,7 +22,7 @@ export interface ParsedCoordinate {
* @returns True if it's a valid hex event ID * @returns True if it's a valid hex event ID
*/ */
export function isEventId(id: string): id is NostrEventId { export function isEventId(id: string): id is NostrEventId {
return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, 'i').test(id); return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(id);
} }
/** /**
@ -30,22 +30,24 @@ export function isEventId(id: string): id is NostrEventId {
* @param coordinate The string to check * @param coordinate The string to check
* @returns True if it's a valid coordinate * @returns True if it's a valid coordinate
*/ */
export function isCoordinate(coordinate: string): coordinate is NostrCoordinate { export function isCoordinate(
const parts = coordinate.split(':'); coordinate: string,
): coordinate is NostrCoordinate {
const parts = coordinate.split(":");
if (parts.length < 3) return false; if (parts.length < 3) return false;
const [kindStr, pubkey, ...dTagParts] = parts; const [kindStr, pubkey, ...dTagParts] = parts;
// Check if kind is a valid number // Check if kind is a valid number
const kind = parseInt(kindStr, 10); const kind = parseInt(kindStr, 10);
if (isNaN(kind) || kind < 0) return false; if (isNaN(kind) || kind < 0) return false;
// Check if pubkey is a valid hex string // Check if pubkey is a valid hex string
if (!isEventId(pubkey)) return false; if (!isEventId(pubkey)) return false;
// Check if d-tag exists (can contain colons) // Check if d-tag exists (can contain colons)
if (dTagParts.length === 0) return false; if (dTagParts.length === 0) return false;
return true; return true;
} }
@ -56,14 +58,14 @@ export function isCoordinate(coordinate: string): coordinate is NostrCoordinate
*/ */
export function parseCoordinate(coordinate: string): ParsedCoordinate | null { export function parseCoordinate(coordinate: string): ParsedCoordinate | null {
if (!isCoordinate(coordinate)) return null; if (!isCoordinate(coordinate)) return null;
const parts = coordinate.split(':'); const parts = coordinate.split(":");
const [kindStr, pubkey, ...dTagParts] = parts; const [kindStr, pubkey, ...dTagParts] = parts;
return { return {
kind: parseInt(kindStr, 10), kind: parseInt(kindStr, 10),
pubkey, pubkey,
dTag: dTagParts.join(':') // Rejoin in case d-tag contains colons dTag: dTagParts.join(":"), // Rejoin in case d-tag contains colons
}; };
} }
@ -74,7 +76,11 @@ export function parseCoordinate(coordinate: string): ParsedCoordinate | null {
* @param dTag The d-tag value * @param dTag The d-tag value
* @returns The coordinate string * @returns The coordinate string
*/ */
export function createCoordinate(kind: number, pubkey: string, dTag: string): NostrCoordinate { export function createCoordinate(
kind: number,
pubkey: string,
dTag: string,
): NostrCoordinate {
return `${kind}:${pubkey}:${dTag}`; return `${kind}:${pubkey}:${dTag}`;
} }
@ -83,6 +89,8 @@ export function createCoordinate(kind: number, pubkey: string, dTag: string): No
* @param identifier The string to check * @param identifier The string to check
* @returns True if it's a valid Nostr identifier * @returns True if it's a valid Nostr identifier
*/ */
export function isNostrIdentifier(identifier: string): identifier is NostrIdentifier { export function isNostrIdentifier(
identifier: string,
): identifier is NostrIdentifier {
return isEventId(identifier) || isCoordinate(identifier); return isEventId(identifier) || isCoordinate(identifier);
} }

306
src/lib/utils/notification_utils.ts

@ -1,306 +0,0 @@
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { getUserMetadata, NDKRelaySetFromNDK, toNpub } from "$lib/utils/nostrUtils";
import { get } from "svelte/store";
import { ndkInstance } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import { userStore, type UserState } from "$lib/stores/userStore";
import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { neventEncode } from "$lib/utils";
import { nip19 } from "nostr-tools";
import type NDK from "@nostr-dev-kit/ndk";
import { parseEmbeddedMarkup } from "./markup/embeddedMarkupParser";
// AI-NOTE: Notification-specific utility functions that don't exist elsewhere
/**
* Truncates content to a specified length
*/
export function truncateContent(content: string, maxLength: number = 300): string {
if (content.length <= maxLength) return content;
return content.slice(0, maxLength) + "...";
}
/**
* Truncates rendered HTML content while preserving quote boxes
*/
export function truncateRenderedContent(renderedHtml: string, maxLength: number = 300): string {
if (renderedHtml.length <= maxLength) return renderedHtml;
const hasQuoteBoxes = renderedHtml.includes('jump-to-message');
if (hasQuoteBoxes) {
const quoteBoxPattern = /<div class="block w-fit my-2 px-3 py-2 bg-gray-200[^>]*onclick="window\.dispatchEvent\(new CustomEvent\('jump-to-message'[^>]*>[^<]*<\/div>/g;
const quoteBoxes = renderedHtml.match(quoteBoxPattern) || [];
let textOnly = renderedHtml.replace(quoteBoxPattern, '|||QUOTEBOX|||');
if (textOnly.length > maxLength) {
const availableLength = maxLength - (quoteBoxes.join('').length);
if (availableLength > 50) {
textOnly = textOnly.slice(0, availableLength) + "...";
} else {
textOnly = textOnly.slice(0, 50) + "...";
}
}
let result = textOnly;
quoteBoxes.forEach(box => {
result = result.replace('|||QUOTEBOX|||', box);
});
return result;
} else {
if (renderedHtml.includes('<')) {
const truncated = renderedHtml.slice(0, maxLength);
const lastTagStart = truncated.lastIndexOf('<');
const lastTagEnd = truncated.lastIndexOf('>');
if (lastTagStart > lastTagEnd) {
return renderedHtml.slice(0, lastTagStart) + "...";
}
return truncated + "...";
} else {
return renderedHtml.slice(0, maxLength) + "...";
}
}
}
/**
* Parses content with support for embedded events
*/
export async function parseContent(content: string): Promise<string> {
if (!content) return "";
return await parseEmbeddedMarkup(content, 0);
}
/**
* Parses repost content and renders it as an embedded event
*/
export async function parseRepostContent(content: string): Promise<string> {
if (!content) return "";
try {
// Try to parse the content as JSON (repost events contain the original event as JSON)
const originalEvent = JSON.parse(content);
// Extract the original event's content
const originalContent = originalEvent.content || "";
const originalAuthor = originalEvent.pubkey || "";
const originalCreatedAt = originalEvent.created_at || 0;
const originalKind = originalEvent.kind || 1;
// Parse the original content with embedded markup support
const parsedOriginalContent = await parseEmbeddedMarkup(originalContent, 0);
// Create an embedded event display with proper structure
const formattedDate = originalCreatedAt ? new Date(originalCreatedAt * 1000).toLocaleDateString() : "Unknown date";
const shortAuthor = originalAuthor ? `${originalAuthor.slice(0, 8)}...${originalAuthor.slice(-4)}` : "Unknown";
return `
<div class="embedded-repost bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 my-2">
<!-- Event header -->
<div class="flex items-center justify-between mb-3 min-w-0">
<div class="flex items-center space-x-2 min-w-0">
<span class="text-xs text-gray-500 dark:text-gray-400 font-mono flex-shrink-0">
Kind ${originalKind}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0">
(repost)
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-600 dark:text-gray-400 flex-shrink-0">Author:</span>
<span class="text-xs text-gray-700 dark:text-gray-300 font-mono">
${shortAuthor}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-500 dark:text-gray-400">
${formattedDate}
</span>
</div>
<button
class="text-xs text-primary-600 dark:text-primary-500 hover:underline flex-shrink-0"
onclick="window.location.href='/events?id=${originalEvent.id || 'unknown'}'"
>
View full event
</button>
</div>
<!-- Reposted content -->
<div class="text-sm text-gray-800 dark:text-gray-200 leading-relaxed">
${parsedOriginalContent}
</div>
</div>
`;
} catch (error) {
// If JSON parsing fails, fall back to embedded markup
console.warn("Failed to parse repost content as JSON, falling back to embedded markup:", error);
return await parseEmbeddedMarkup(content, 0);
}
}
/**
* Renders quoted content for a message
*/
export async function renderQuotedContent(message: NDKEvent, publicMessages: NDKEvent[]): Promise<string> {
const qTags = message.getMatchingTags("q");
if (qTags.length === 0) return "";
const qTag = qTags[0];
const eventId = qTag[1];
if (eventId) {
// Validate eventId format (should be 64 character hex string)
const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId);
// First try to find in local messages
let quotedMessage = publicMessages.find(msg => msg.id === eventId);
// If not found locally, fetch from relays
if (!quotedMessage) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (ndk) {
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays, ...searchRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const fetchedEvent = await ndk.fetchEvent({ ids: [eventId], limit: 1 }, undefined, ndkRelaySet);
quotedMessage = fetchedEvent || undefined;
}
}
} catch (error) {
console.warn(`[renderQuotedContent] Failed to fetch quoted event ${eventId}:`, error);
}
}
if (quotedMessage) {
const quotedContent = quotedMessage.content ? quotedMessage.content.slice(0, 200) : "No content";
const parsedContent = await parseEmbeddedMarkup(quotedContent, 0);
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick="window.dispatchEvent(new CustomEvent('jump-to-message', { detail: '${eventId}' }))">${parsedContent}</div>`;
} else {
// Fallback to nevent link - only if eventId is valid
if (isValidEventId) {
try {
const nevent = nip19.neventEncode({ id: eventId });
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick="window.location.href='/events?id=${nevent}'">Quoted message not found. Click to view event ${eventId.slice(0, 8)}...</div>`;
} catch (error) {
console.warn(`[renderQuotedContent] Failed to encode nevent for ${eventId}:`, error);
// Fall back to just showing the event ID without a link
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">Quoted message not found. Event ID: ${eventId.slice(0, 8)}...</div>`;
}
} else {
// Invalid event ID format
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">Invalid quoted message reference</div>`;
}
}
}
return "";
}
/**
* Gets notification type based on event kind
*/
export function getNotificationType(event: NDKEvent): string {
switch (event.kind) {
case 1: return "Reply";
case 1111: return "Custom Reply";
case 9802: return "Highlight";
case 6: return "Repost";
case 16: return "Generic Repost";
case 24: return "Public Message";
default: return `Kind ${event.kind}`;
}
}
/**
* Fetches author profiles for a list of events
*/
export async function fetchAuthorProfiles(events: NDKEvent[]): Promise<Map<string, { name?: string; displayName?: string; picture?: string }>> {
const authorProfiles = new Map<string, { name?: string; displayName?: string; picture?: string }>();
const uniquePubkeys = new Set<string>();
events.forEach(event => {
if (event.pubkey) uniquePubkeys.add(event.pubkey);
});
const profilePromises = Array.from(uniquePubkeys).map(async (pubkey) => {
try {
const npub = toNpub(pubkey);
if (!npub) return;
// Try cache first
let profile = await getUserMetadata(npub, false);
if (profile && (profile.name || profile.displayName || profile.picture)) {
authorProfiles.set(pubkey, profile);
return;
}
// Try search relays
for (const relay of searchRelays) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) break;
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
relaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
return;
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from ${relay}:`, error);
}
}
// Try all available relays as fallback
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) return;
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
ndkRelaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
}
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from all relays:`, error);
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Error processing profile for ${pubkey}:`, error);
}
});
await Promise.all(profilePromises);
return authorProfiles;
}

22
src/lib/utils/npubCache.ts

@ -4,7 +4,7 @@ export type NpubMetadata = NostrProfile;
class NpubCache { class NpubCache {
private cache: Record<string, NpubMetadata> = {}; private cache: Record<string, NpubMetadata> = {};
private readonly storageKey = 'alexandria_npub_cache'; private readonly storageKey = "alexandria_npub_cache";
private readonly maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds private readonly maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
constructor() { constructor() {
@ -13,12 +13,15 @@ class NpubCache {
private loadFromStorage(): void { private loadFromStorage(): void {
try { try {
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
const stored = localStorage.getItem(this.storageKey); const stored = localStorage.getItem(this.storageKey);
if (stored) { if (stored) {
const data = JSON.parse(stored) as Record<string, { profile: NpubMetadata; timestamp: number }>; const data = JSON.parse(stored) as Record<
string,
{ profile: NpubMetadata; timestamp: number }
>;
const now = Date.now(); const now = Date.now();
// Filter out expired entries // Filter out expired entries
for (const [key, entry] of Object.entries(data)) { for (const [key, entry] of Object.entries(data)) {
if (entry.timestamp && (now - entry.timestamp) < this.maxAge) { if (entry.timestamp && (now - entry.timestamp) < this.maxAge) {
@ -28,21 +31,24 @@ class NpubCache {
} }
} }
} catch (error) { } catch (error) {
console.warn('Failed to load npub cache from storage:', error); console.warn("Failed to load npub cache from storage:", error);
} }
} }
private saveToStorage(): void { private saveToStorage(): void {
try { try {
if (typeof window !== 'undefined') { if (typeof window !== "undefined") {
const data: Record<string, { profile: NpubMetadata; timestamp: number }> = {}; const data: Record<
string,
{ profile: NpubMetadata; timestamp: number }
> = {};
for (const [key, profile] of Object.entries(this.cache)) { for (const [key, profile] of Object.entries(this.cache)) {
data[key] = { profile, timestamp: Date.now() }; data[key] = { profile, timestamp: Date.now() };
} }
localStorage.setItem(this.storageKey, JSON.stringify(data)); localStorage.setItem(this.storageKey, JSON.stringify(data));
} }
} catch (error) { } catch (error) {
console.warn('Failed to save npub cache to storage:', error); console.warn("Failed to save npub cache to storage:", error);
} }
} }

69
src/lib/utils/profileCache.ts

@ -24,7 +24,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> {
const profileEvents = await ndk.fetchEvents({ const profileEvents = await ndk.fetchEvents({
kinds: [0], kinds: [0],
authors: [pubkey], authors: [pubkey],
limit: 1 limit: 1,
}); });
if (profileEvents.size === 0) { if (profileEvents.size === 0) {
@ -33,7 +33,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> {
// Get the most recent profile event // Get the most recent profile event
const profileEvent = Array.from(profileEvents)[0]; const profileEvent = Array.from(profileEvents)[0];
try { try {
const content = JSON.parse(profileEvent.content); const content = JSON.parse(profileEvent.content);
return content as ProfileData; return content as ProfileData;
@ -77,14 +77,14 @@ export async function getDisplayName(pubkey: string): Promise<string> {
* @returns Array of profile events * @returns Array of profile events
*/ */
export async function batchFetchProfiles( export async function batchFetchProfiles(
pubkeys: string[], pubkeys: string[],
onProgress?: (fetched: number, total: number) => void onProgress?: (fetched: number, total: number) => void,
): Promise<NDKEvent[]> { ): Promise<NDKEvent[]> {
const allProfileEvents: NDKEvent[] = []; const allProfileEvents: NDKEvent[] = [];
// Filter out already cached pubkeys // Filter out already cached pubkeys
const uncachedPubkeys = pubkeys.filter(pk => !profileCache.has(pk)); const uncachedPubkeys = pubkeys.filter((pk) => !profileCache.has(pk));
if (uncachedPubkeys.length === 0) { if (uncachedPubkeys.length === 0) {
if (onProgress) onProgress(pubkeys.length, pubkeys.length); if (onProgress) onProgress(pubkeys.length, pubkeys.length);
return allProfileEvents; return allProfileEvents;
@ -92,21 +92,24 @@ export async function batchFetchProfiles(
try { try {
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
// Report initial progress // Report initial progress
const cachedCount = pubkeys.length - uncachedPubkeys.length; const cachedCount = pubkeys.length - uncachedPubkeys.length;
if (onProgress) onProgress(cachedCount, pubkeys.length); if (onProgress) onProgress(cachedCount, pubkeys.length);
// Batch fetch in chunks to avoid overwhelming relays // Batch fetch in chunks to avoid overwhelming relays
const CHUNK_SIZE = 50; const CHUNK_SIZE = 50;
let fetchedCount = cachedCount; let fetchedCount = cachedCount;
for (let i = 0; i < uncachedPubkeys.length; i += CHUNK_SIZE) { for (let i = 0; i < uncachedPubkeys.length; i += CHUNK_SIZE) {
const chunk = uncachedPubkeys.slice(i, Math.min(i + CHUNK_SIZE, uncachedPubkeys.length)); const chunk = uncachedPubkeys.slice(
i,
Math.min(i + CHUNK_SIZE, uncachedPubkeys.length),
);
const profileEvents = await ndk.fetchEvents({ const profileEvents = await ndk.fetchEvents({
kinds: [0], kinds: [0],
authors: chunk authors: chunk,
}); });
// Process each profile event // Process each profile event
@ -120,19 +123,19 @@ export async function batchFetchProfiles(
console.error("Failed to parse profile content:", e); console.error("Failed to parse profile content:", e);
} }
}); });
// Update progress // Update progress
if (onProgress) { if (onProgress) {
onProgress(fetchedCount, pubkeys.length); onProgress(fetchedCount, pubkeys.length);
} }
} }
// Final progress update // Final progress update
if (onProgress) onProgress(pubkeys.length, pubkeys.length); if (onProgress) onProgress(pubkeys.length, pubkeys.length);
} catch (e) { } catch (e) {
console.error("Failed to batch fetch profiles:", e); console.error("Failed to batch fetch profiles:", e);
} }
return allProfileEvents; return allProfileEvents;
} }
@ -173,29 +176,29 @@ export function clearProfileCache(): void {
*/ */
export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> { export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
const pubkeys = new Set<string>(); const pubkeys = new Set<string>();
events.forEach(event => { events.forEach((event) => {
// Add author pubkey // Add author pubkey
if (event.pubkey) { if (event.pubkey) {
pubkeys.add(event.pubkey); pubkeys.add(event.pubkey);
} }
// Add pubkeys from p tags // Add pubkeys from p tags
const pTags = event.getMatchingTags("p"); const pTags = event.getMatchingTags("p");
pTags.forEach(tag => { pTags.forEach((tag) => {
if (tag[1]) { if (tag[1]) {
pubkeys.add(tag[1]); pubkeys.add(tag[1]);
} }
}); });
// Extract pubkeys from content (nostr:npub1... format) // Extract pubkeys from content (nostr:npub1... format)
const npubPattern = /nostr:npub1[a-z0-9]{58}/g; const npubPattern = /nostr:npub1[a-z0-9]{58}/g;
const matches = event.content?.match(npubPattern) || []; const matches = event.content?.match(npubPattern) || [];
matches.forEach(match => { matches.forEach((match) => {
try { try {
const npub = match.replace('nostr:', ''); const npub = match.replace("nostr:", "");
const decoded = nip19.decode(npub); const decoded = nip19.decode(npub);
if (decoded.type === 'npub') { if (decoded.type === "npub") {
pubkeys.add(decoded.data as string); pubkeys.add(decoded.data as string);
} }
} catch (e) { } catch (e) {
@ -203,7 +206,7 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
} }
}); });
}); });
return pubkeys; return pubkeys;
} }
@ -214,17 +217,17 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
*/ */
export function replaceContentPubkeys(content: string): string { export function replaceContentPubkeys(content: string): string {
if (!content) return content; if (!content) return content;
// Replace nostr:npub1... references // Replace nostr:npub1... references
const npubPattern = /nostr:npub[a-z0-9]{58}/g; const npubPattern = /nostr:npub[a-z0-9]{58}/g;
let result = content; let result = content;
const matches = content.match(npubPattern) || []; const matches = content.match(npubPattern) || [];
matches.forEach(match => { matches.forEach((match) => {
try { try {
const npub = match.replace('nostr:', ''); const npub = match.replace("nostr:", "");
const decoded = nip19.decode(npub); const decoded = nip19.decode(npub);
if (decoded.type === 'npub') { if (decoded.type === "npub") {
const pubkey = decoded.data as string; const pubkey = decoded.data as string;
const displayName = getDisplayNameSync(pubkey); const displayName = getDisplayNameSync(pubkey);
result = result.replace(match, `@${displayName}`); result = result.replace(match, `@${displayName}`);
@ -233,7 +236,7 @@ export function replaceContentPubkeys(content: string): string {
// Invalid npub, leave as is // Invalid npub, leave as is
} }
}); });
return result; return result;
} }
@ -245,8 +248,8 @@ export function replaceContentPubkeys(content: string): string {
export function replacePubkeysWithDisplayNames(text: string): string { export function replacePubkeysWithDisplayNames(text: string): string {
// Match hex pubkeys (64 characters) // Match hex pubkeys (64 characters)
const pubkeyRegex = /\b[0-9a-fA-F]{64}\b/g; const pubkeyRegex = /\b[0-9a-fA-F]{64}\b/g;
return text.replace(pubkeyRegex, (match) => { return text.replace(pubkeyRegex, (match) => {
return getDisplayNameSync(match); return getDisplayNameSync(match);
}); });
} }

32
src/lib/utils/profile_search.ts

@ -1,15 +1,15 @@
import { ndkInstance, activeInboxRelays } from "../ndk.ts"; import { activeInboxRelays, ndkInstance } from "../ndk.ts";
import { getUserMetadata, getNpubFromNip05 } from "./nostrUtils.ts"; import { getNpubFromNip05, getUserMetadata } from "./nostrUtils.ts";
import NDK, { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { searchCache } from "./searchCache.ts"; import { searchCache } from "./searchCache.ts";
import { searchRelays, communityRelays, secondaryRelays } from "../consts.ts"; import { communityRelays, searchRelays, secondaryRelays } from "../consts.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
import type { NostrProfile, ProfileSearchResult } from "./search_types.ts"; import type { NostrProfile, ProfileSearchResult } from "./search_types.ts";
import { import {
createProfileFromEvent,
fieldMatches, fieldMatches,
nip05Matches, nip05Matches,
normalizeSearchTerm, normalizeSearchTerm,
createProfileFromEvent,
} from "./search_utils.ts"; } from "./search_utils.ts";
/** /**
@ -267,12 +267,12 @@ async function quickRelaySearch(
// Use search relays (optimized for profiles) + user's inbox relays + community relays // Use search relays (optimized for profiles) + user's inbox relays + community relays
const userInboxRelays = get(activeInboxRelays); const userInboxRelays = get(activeInboxRelays);
const quickRelayUrls = [ const quickRelayUrls = [
...searchRelays, // Dedicated profile search relays ...searchRelays, // Dedicated profile search relays
...userInboxRelays, // User's personal inbox relays ...userInboxRelays, // User's personal inbox relays
...communityRelays, // Community relays ...communityRelays, // Community relays
...secondaryRelays // Secondary relays as fallback ...secondaryRelays, // Secondary relays as fallback
]; ];
// Deduplicate relay URLs // Deduplicate relay URLs
const uniqueRelayUrls = [...new Set(quickRelayUrls)]; const uniqueRelayUrls = [...new Set(quickRelayUrls)];
console.log("Using relays for profile search:", uniqueRelayUrls); console.log("Using relays for profile search:", uniqueRelayUrls);
@ -312,8 +312,8 @@ async function quickRelaySearch(
try { try {
if (!event.content) return; if (!event.content) return;
const profileData = JSON.parse(event.content); const profileData = JSON.parse(event.content);
const displayName = const displayName = profileData.displayName ||
profileData.displayName || profileData.display_name || ""; profileData.display_name || "";
const display_name = profileData.display_name || ""; const display_name = profileData.display_name || "";
const name = profileData.name || ""; const name = profileData.name || "";
const nip05 = profileData.nip05 || ""; const nip05 = profileData.nip05 || "";
@ -363,7 +363,9 @@ async function quickRelaySearch(
sub.on("eose", () => { sub.on("eose", () => {
console.log( console.log(
`Relay ${index + 1} (${uniqueRelayUrls[index]}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`, `Relay ${index + 1} (${
uniqueRelayUrls[index]
}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`,
); );
resolve(foundInRelay); resolve(foundInRelay);
}); });
@ -371,7 +373,9 @@ async function quickRelaySearch(
// Short timeout for quick search // Short timeout for quick search
setTimeout(() => { setTimeout(() => {
console.log( console.log(
`Relay ${index + 1} (${uniqueRelayUrls[index]}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`, `Relay ${index + 1} (${
uniqueRelayUrls[index]
}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`,
); );
sub.stop(); sub.stop();
resolve(foundInRelay); resolve(foundInRelay);

7
src/lib/utils/relayDiagnostics.ts

@ -42,9 +42,8 @@ export async function testRelay(url: string): Promise<RelayDiagnostic> {
responseTime: Date.now() - startTime, responseTime: Date.now() - startTime,
}); });
} }
} };
}); });
} }
/** /**
@ -93,7 +92,9 @@ export function logRelayDiagnostics(diagnostics: RelayDiagnostic[]): void {
console.log(`✅ Working relays (${working.length}):`); console.log(`✅ Working relays (${working.length}):`);
working.forEach((d) => { working.forEach((d) => {
console.log( console.log(
` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${d.responseTime ? ` (${d.responseTime}ms)` : ""}`, ` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${
d.responseTime ? ` (${d.responseTime}ms)` : ""
}`,
); );
}); });

84
src/lib/utils/relay_info_service.ts

@ -6,7 +6,7 @@
function simplifyUrl(url: string): string { function simplifyUrl(url: string): string {
try { try {
const urlObj = new URL(url); const urlObj = new URL(url);
return urlObj.hostname + (urlObj.port ? `:${urlObj.port}` : ''); return urlObj.hostname + (urlObj.port ? `:${urlObj.port}` : "");
} catch { } catch {
// If URL parsing fails, return the original string // If URL parsing fails, return the original string
return url; return url;
@ -42,18 +42,23 @@ export interface RelayInfoWithMetadata extends RelayInfo {
* @param url The relay URL to fetch info for * @param url The relay URL to fetch info for
* @returns Promise resolving to relay info or undefined if failed * @returns Promise resolving to relay info or undefined if failed
*/ */
export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata | undefined> { export async function fetchRelayInfo(
url: string,
): Promise<RelayInfoWithMetadata | undefined> {
try { try {
// Convert WebSocket URL to HTTP URL for NIP-11 // Convert WebSocket URL to HTTP URL for NIP-11
const httpUrl = url.replace('ws://', 'http://').replace('wss://', 'https://'); const httpUrl = url.replace("ws://", "http://").replace(
"wss://",
"https://",
);
const response = await fetch(httpUrl, { const response = await fetch(httpUrl, {
headers: { headers: {
'Accept': 'application/nostr+json', "Accept": "application/nostr+json",
'User-Agent': 'Alexandria/1.0' "User-Agent": "Alexandria/1.0",
}, },
// Add timeout to prevent hanging // Add timeout to prevent hanging
signal: AbortSignal.timeout(5000) signal: AbortSignal.timeout(5000),
}); });
if (!response.ok) { if (!response.ok) {
@ -62,18 +67,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
url, url,
shortUrl: simplifyUrl(url), shortUrl: simplifyUrl(url),
hasNip11: false, hasNip11: false,
triedNip11: true triedNip11: true,
}; };
} }
const relayInfo = await response.json() as RelayInfo; const relayInfo = await response.json() as RelayInfo;
return { return {
...relayInfo, ...relayInfo,
url, url,
shortUrl: simplifyUrl(url), shortUrl: simplifyUrl(url),
hasNip11: Object.keys(relayInfo).length > 0, hasNip11: Object.keys(relayInfo).length > 0,
triedNip11: true triedNip11: true,
}; };
} catch (error) { } catch (error) {
console.warn(`[RelayInfo] Failed to fetch info for ${url}:`, error); console.warn(`[RelayInfo] Failed to fetch info for ${url}:`, error);
@ -81,7 +86,7 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
url, url,
shortUrl: simplifyUrl(url), shortUrl: simplifyUrl(url),
hasNip11: false, hasNip11: false,
triedNip11: true triedNip11: true,
}; };
} }
} }
@ -91,16 +96,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
* @param urls Array of relay URLs to fetch info for * @param urls Array of relay URLs to fetch info for
* @returns Promise resolving to array of relay info objects * @returns Promise resolving to array of relay info objects
*/ */
export async function fetchRelayInfos(urls: string[]): Promise<RelayInfoWithMetadata[]> { export async function fetchRelayInfos(
urls: string[],
): Promise<RelayInfoWithMetadata[]> {
if (urls.length === 0) { if (urls.length === 0) {
return []; return [];
} }
const promises = urls.map(url => fetchRelayInfo(url)); const promises = urls.map((url) => fetchRelayInfo(url));
const results = await Promise.allSettled(promises); const results = await Promise.allSettled(promises);
return results return results
.map(result => result.status === 'fulfilled' ? result.value : undefined) .map((result) => result.status === "fulfilled" ? result.value : undefined)
.filter((info): info is RelayInfoWithMetadata => info !== undefined); .filter((info): info is RelayInfoWithMetadata => info !== undefined);
} }
@ -110,34 +117,42 @@ export async function fetchRelayInfos(urls: string[]): Promise<RelayInfoWithMeta
* @param relayInfo Optional relay info * @param relayInfo Optional relay info
* @returns String describing the relay type * @returns String describing the relay type
*/ */
export function getRelayTypeLabel(relayUrl: string, relayInfo?: RelayInfoWithMetadata): string { export function getRelayTypeLabel(
relayUrl: string,
relayInfo?: RelayInfoWithMetadata,
): string {
// Check if it's a local relay // Check if it's a local relay
if (relayUrl.includes('localhost') || relayUrl.includes('127.0.0.1')) { if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) {
return 'Local'; return "Local";
} }
// Check if it's a community relay // Check if it's a community relay
if (relayUrl.includes('nostr.band') || relayUrl.includes('noswhere.com') || if (
relayUrl.includes('damus.io') || relayUrl.includes('nostr.wine')) { relayUrl.includes("nostr.band") || relayUrl.includes("noswhere.com") ||
return 'Community'; relayUrl.includes("damus.io") || relayUrl.includes("nostr.wine")
) {
return "Community";
} }
// Check if it's a user's relay (likely inbox/outbox) // Check if it's a user's relay (likely inbox/outbox)
if (relayUrl.includes('relay.nsec.app') || relayUrl.includes('relay.snort.social')) { if (
return 'User'; relayUrl.includes("relay.nsec.app") ||
relayUrl.includes("relay.snort.social")
) {
return "User";
} }
// Use relay name if available // Use relay name if available
if (relayInfo?.name) { if (relayInfo?.name) {
return relayInfo.name; return relayInfo.name;
} }
// Fallback to domain // Fallback to domain
try { try {
const domain = new URL(relayUrl).hostname; const domain = new URL(relayUrl).hostname;
return domain.replace('www.', ''); return domain.replace("www.", "");
} catch { } catch {
return 'Unknown'; return "Unknown";
} }
} }
@ -147,11 +162,14 @@ export function getRelayTypeLabel(relayUrl: string, relayInfo?: RelayInfoWithMet
* @param relayUrl Relay URL as fallback * @param relayUrl Relay URL as fallback
* @returns Icon URL or undefined * @returns Icon URL or undefined
*/ */
export function getRelayIcon(relayInfo?: RelayInfoWithMetadata, relayUrl?: string): string | undefined { export function getRelayIcon(
relayInfo?: RelayInfoWithMetadata,
relayUrl?: string,
): string | undefined {
if (relayInfo?.icon) { if (relayInfo?.icon) {
return relayInfo.icon; return relayInfo.icon;
} }
// Generate favicon URL from relay URL // Generate favicon URL from relay URL
if (relayUrl) { if (relayUrl) {
try { try {
@ -161,6 +179,6 @@ export function getRelayIcon(relayInfo?: RelayInfoWithMetadata, relayUrl?: strin
// Invalid URL, return undefined // Invalid URL, return undefined
} }
} }
return undefined; return undefined;
} }

380
src/lib/utils/relay_management.ts

@ -1,5 +1,11 @@
import NDK, { NDKKind, NDKRelay, NDKUser } from "@nostr-dev-kit/ndk"; import NDK, { NDKKind, NDKRelay, NDKUser } from "@nostr-dev-kit/ndk";
import { searchRelays, secondaryRelays, anonymousRelays, lowbandwidthRelays, localRelays } from "../consts.ts"; import {
anonymousRelays,
localRelays,
lowbandwidthRelays,
searchRelays,
secondaryRelays,
} from "../consts.ts";
import { getRelaySetForNetworkCondition } from "./network_detection.ts"; import { getRelaySetForNetworkCondition } from "./network_detection.ts";
import { networkCondition } from "../stores/networkStore.ts"; import { networkCondition } from "../stores/networkStore.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
@ -11,15 +17,15 @@ import { get } from "svelte/store";
*/ */
export function normalizeRelayUrl(url: string): string { export function normalizeRelayUrl(url: string): string {
let normalized = url.toLowerCase().trim(); let normalized = url.toLowerCase().trim();
// Ensure protocol is present // Ensure protocol is present
if (!normalized.startsWith('ws://') && !normalized.startsWith('wss://')) { if (!normalized.startsWith("ws://") && !normalized.startsWith("wss://")) {
normalized = 'wss://' + normalized; normalized = "wss://" + normalized;
} }
// Remove trailing slash // Remove trailing slash
normalized = normalized.replace(/\/$/, ''); normalized = normalized.replace(/\/$/, "");
return normalized; return normalized;
} }
@ -58,7 +64,7 @@ export function testLocalRelayConnection(
actualUrl?: string; actualUrl?: string;
}> { }> {
// Only test connections on client-side // Only test connections on client-side
if (typeof window === 'undefined') { if (typeof window === "undefined") {
return Promise.resolve({ return Promise.resolve({
connected: false, connected: false,
requiresAuth: false, requiresAuth: false,
@ -66,7 +72,7 @@ export function testLocalRelayConnection(
actualUrl: relayUrl, actualUrl: relayUrl,
}); });
} }
return new Promise((resolve) => { return new Promise((resolve) => {
try { try {
// Ensure the URL is using ws:// protocol for local relays // Ensure the URL is using ws:// protocol for local relays
@ -193,7 +199,7 @@ export function testRemoteRelayConnection(
actualUrl?: string; actualUrl?: string;
}> { }> {
// Only test connections on client-side // Only test connections on client-side
if (typeof window === 'undefined') { if (typeof window === "undefined") {
return Promise.resolve({ return Promise.resolve({
connected: false, connected: false,
requiresAuth: false, requiresAuth: false,
@ -201,12 +207,14 @@ export function testRemoteRelayConnection(
actualUrl: relayUrl, actualUrl: relayUrl,
}); });
} }
return new Promise((resolve) => { return new Promise((resolve) => {
// Ensure the URL is using wss:// protocol for remote relays // Ensure the URL is using wss:// protocol for remote relays
const secureUrl = relayUrl.replace(/^ws:\/\//, "wss://"); const secureUrl = relayUrl.replace(/^ws:\/\//, "wss://");
console.debug(`[relay_management.ts] Testing remote relay connection: ${secureUrl}`); console.debug(
`[relay_management.ts] Testing remote relay connection: ${secureUrl}`,
);
// Use the existing NDK instance instead of creating a new one // Use the existing NDK instance instead of creating a new one
const relay = new NDKRelay(secureUrl, undefined, ndk); const relay = new NDKRelay(secureUrl, undefined, ndk);
@ -216,7 +224,9 @@ export function testRemoteRelayConnection(
let actualUrl: string | undefined; let actualUrl: string | undefined;
const timeout = setTimeout(() => { const timeout = setTimeout(() => {
console.debug(`[relay_management.ts] Relay ${secureUrl} connection timeout`); console.debug(
`[relay_management.ts] Relay ${secureUrl} connection timeout`,
);
relay.disconnect(); relay.disconnect();
resolve({ resolve({
connected: false, connected: false,
@ -227,7 +237,9 @@ export function testRemoteRelayConnection(
}, 3000); }, 3000);
relay.on("connect", () => { relay.on("connect", () => {
console.debug(`[relay_management.ts] Relay ${secureUrl} connected successfully`); console.debug(
`[relay_management.ts] Relay ${secureUrl} connected successfully`,
);
connected = true; connected = true;
actualUrl = secureUrl; actualUrl = secureUrl;
clearTimeout(timeout); clearTimeout(timeout);
@ -248,7 +260,9 @@ export function testRemoteRelayConnection(
relay.on("disconnect", () => { relay.on("disconnect", () => {
if (!connected) { if (!connected) {
console.debug(`[relay_management.ts] Relay ${secureUrl} disconnected without connecting`); console.debug(
`[relay_management.ts] Relay ${secureUrl} disconnected without connecting`,
);
error = "Connection failed"; error = "Connection failed";
clearTimeout(timeout); clearTimeout(timeout);
resolve({ resolve({
@ -280,14 +294,12 @@ export function testRelayConnection(
actualUrl?: string; actualUrl?: string;
}> { }> {
// Determine if this is a local or remote relay // Determine if this is a local or remote relay
if (relayUrl.includes('localhost') || relayUrl.includes('127.0.0.1')) { if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) {
return testLocalRelayConnection(relayUrl, ndk); return testLocalRelayConnection(relayUrl, ndk);
} else { } else {
return testRemoteRelayConnection(relayUrl, ndk); return testRemoteRelayConnection(relayUrl, ndk);
} }
} }
/** /**
* Tests connection to local relays * Tests connection to local relays
@ -295,14 +307,17 @@ export function testRelayConnection(
* @param ndk NDK instance * @param ndk NDK instance
* @returns Promise that resolves to array of working local relay URLs * @returns Promise that resolves to array of working local relay URLs
*/ */
async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<string[]> { async function testLocalRelays(
localRelayUrls: string[],
ndk: NDK,
): Promise<string[]> {
try { try {
const workingRelays: string[] = []; const workingRelays: string[] = [];
if (localRelayUrls.length === 0) { if (localRelayUrls.length === 0) {
return workingRelays; return workingRelays;
} }
// Test local relays quietly, without logging failures // Test local relays quietly, without logging failures
await Promise.all( await Promise.all(
localRelayUrls.map(async (url) => { localRelayUrls.map(async (url) => {
@ -310,17 +325,21 @@ async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<stri
const result = await testLocalRelayConnection(url, ndk); const result = await testLocalRelayConnection(url, ndk);
if (result.connected) { if (result.connected) {
workingRelays.push(url); workingRelays.push(url);
console.debug(`[relay_management.ts] Local relay connected: ${url}`); console.debug(
`[relay_management.ts] Local relay connected: ${url}`,
);
} }
// Don't log failures - local relays are optional // Don't log failures - local relays are optional
} catch { } catch {
// Silently ignore local relay failures - they're optional // Silently ignore local relay failures - they're optional
} }
}) }),
); );
if (workingRelays.length > 0) { if (workingRelays.length > 0) {
console.info(`[relay_management.ts] Found ${workingRelays.length} working local relays`); console.info(
`[relay_management.ts] Found ${workingRelays.length} working local relays`,
);
} }
return workingRelays; return workingRelays;
} catch { } catch {
@ -339,17 +358,17 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> {
try { try {
// If no local relays are configured, return empty array // If no local relays are configured, return empty array
if (localRelays.length === 0) { if (localRelays.length === 0) {
console.debug('[relay_management.ts] No local relays configured'); console.debug("[relay_management.ts] No local relays configured");
return []; return [];
} }
// Convert wss:// URLs from consts to ws:// for local testing // Convert wss:// URLs from consts to ws:// for local testing
const localRelayUrls = localRelays.map((url: string) => const localRelayUrls = localRelays.map((url: string) =>
url.replace(/^wss:\/\//, 'ws://') url.replace(/^wss:\/\//, "ws://")
); );
const workingRelays = await testLocalRelays(localRelayUrls, ndk); const workingRelays = await testLocalRelays(localRelayUrls, ndk);
// If no local relays are working, return empty array // If no local relays are working, return empty array
// The network detection logic will provide fallback relays // The network detection logic will provide fallback relays
return workingRelays; return workingRelays;
@ -365,7 +384,10 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> {
* @param user User to fetch local relays for * @param user User to fetch local relays for
* @returns Promise that resolves to array of local relay URLs * @returns Promise that resolves to array of local relay URLs
*/ */
export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<string[]> { export async function getUserLocalRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try { try {
const localRelayEvent = await ndk.fetchEvent( const localRelayEvent = await ndk.fetchEvent(
{ {
@ -376,7 +398,7 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
groupable: false, groupable: false,
skipVerification: false, skipVerification: false,
skipValidation: false, skipValidation: false,
} },
); );
if (!localRelayEvent) { if (!localRelayEvent) {
@ -385,14 +407,17 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
const localRelays: string[] = []; const localRelays: string[] = [];
localRelayEvent.tags.forEach((tag) => { localRelayEvent.tags.forEach((tag) => {
if (tag[0] === 'r' && tag[1]) { if (tag[0] === "r" && tag[1]) {
localRelays.push(tag[1]); localRelays.push(tag[1]);
} }
}); });
return localRelays; return localRelays;
} catch (error) { } catch (error) {
console.info('[relay_management.ts] Error fetching user local relays:', error); console.info(
"[relay_management.ts] Error fetching user local relays:",
error,
);
return []; return [];
} }
} }
@ -403,7 +428,10 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
* @param user User to fetch blocked relays for * @param user User to fetch blocked relays for
* @returns Promise that resolves to array of blocked relay URLs * @returns Promise that resolves to array of blocked relay URLs
*/ */
export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<string[]> { export async function getUserBlockedRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try { try {
const blockedRelayEvent = await ndk.fetchEvent( const blockedRelayEvent = await ndk.fetchEvent(
{ {
@ -414,7 +442,7 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
groupable: false, groupable: false,
skipVerification: false, skipVerification: false,
skipValidation: false, skipValidation: false,
} },
); );
if (!blockedRelayEvent) { if (!blockedRelayEvent) {
@ -423,14 +451,17 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
const blockedRelays: string[] = []; const blockedRelays: string[] = [];
blockedRelayEvent.tags.forEach((tag) => { blockedRelayEvent.tags.forEach((tag) => {
if (tag[0] === 'r' && tag[1]) { if (tag[0] === "r" && tag[1]) {
blockedRelays.push(tag[1]); blockedRelays.push(tag[1]);
} }
}); });
return blockedRelays; return blockedRelays;
} catch (error) { } catch (error) {
console.info('[relay_management.ts] Error fetching user blocked relays:', error); console.info(
"[relay_management.ts] Error fetching user blocked relays:",
error,
);
return []; return [];
} }
} }
@ -441,9 +472,15 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
* @param user User to fetch outbox relays for * @param user User to fetch outbox relays for
* @returns Promise that resolves to array of outbox relay URLs * @returns Promise that resolves to array of outbox relay URLs
*/ */
export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<string[]> { export async function getUserOutboxRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try { try {
console.debug('[relay_management.ts] Fetching outbox relays for user:', user.pubkey); console.debug(
"[relay_management.ts] Fetching outbox relays for user:",
user.pubkey,
);
const relayList = await ndk.fetchEvent( const relayList = await ndk.fetchEvent(
{ {
kinds: [10002], kinds: [10002],
@ -453,36 +490,47 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri
groupable: false, groupable: false,
skipVerification: false, skipVerification: false,
skipValidation: false, skipValidation: false,
} },
); );
if (!relayList) { if (!relayList) {
console.debug('[relay_management.ts] No relay list found for user'); console.debug("[relay_management.ts] No relay list found for user");
return []; return [];
} }
console.debug('[relay_management.ts] Found relay list event:', relayList.id); console.debug(
console.debug('[relay_management.ts] Relay list tags:', relayList.tags); "[relay_management.ts] Found relay list event:",
relayList.id,
);
console.debug("[relay_management.ts] Relay list tags:", relayList.tags);
const outboxRelays: string[] = []; const outboxRelays: string[] = [];
relayList.tags.forEach((tag) => { relayList.tags.forEach((tag) => {
console.debug('[relay_management.ts] Processing tag:', tag); console.debug("[relay_management.ts] Processing tag:", tag);
if (tag[0] === 'w' && tag[1]) { if (tag[0] === "w" && tag[1]) {
outboxRelays.push(tag[1]); outboxRelays.push(tag[1]);
console.debug('[relay_management.ts] Added outbox relay:', tag[1]); console.debug("[relay_management.ts] Added outbox relay:", tag[1]);
} else if (tag[0] === 'r' && tag[1]) { } else if (tag[0] === "r" && tag[1]) {
// Some relay lists use 'r' for both inbox and outbox // Some relay lists use 'r' for both inbox and outbox
outboxRelays.push(tag[1]); outboxRelays.push(tag[1]);
console.debug('[relay_management.ts] Added relay (r tag):', tag[1]); console.debug("[relay_management.ts] Added relay (r tag):", tag[1]);
} else { } else {
console.debug('[relay_management.ts] Skipping tag:', tag[0], 'value:', tag[1]); console.debug(
"[relay_management.ts] Skipping tag:",
tag[0],
"value:",
tag[1],
);
} }
}); });
console.debug('[relay_management.ts] Final outbox relays:', outboxRelays); console.debug("[relay_management.ts] Final outbox relays:", outboxRelays);
return outboxRelays; return outboxRelays;
} catch (error) { } catch (error) {
console.info('[relay_management.ts] Error fetching user outbox relays:', error); console.info(
"[relay_management.ts] Error fetching user outbox relays:",
error,
);
return []; return [];
} }
} }
@ -494,45 +542,65 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri
export async function getExtensionRelays(): Promise<string[]> { export async function getExtensionRelays(): Promise<string[]> {
try { try {
// Check if we're in a browser environment with extension support // Check if we're in a browser environment with extension support
if (typeof window === 'undefined' || !globalThis.nostr) { if (typeof window === "undefined" || !globalThis.nostr) {
console.debug('[relay_management.ts] No globalThis.nostr available'); console.debug("[relay_management.ts] No globalThis.nostr available");
return []; return [];
} }
console.debug('[relay_management.ts] Extension available, checking for getRelays()'); console.debug(
"[relay_management.ts] Extension available, checking for getRelays()",
);
const extensionRelays: string[] = []; const extensionRelays: string[] = [];
// Try to get relays from the extension's API // Try to get relays from the extension's API
// Different extensions may expose their relay config differently // Different extensions may expose their relay config differently
if (globalThis.nostr.getRelays) { if (globalThis.nostr.getRelays) {
console.debug('[relay_management.ts] getRelays() method found, calling it...'); console.debug(
"[relay_management.ts] getRelays() method found, calling it...",
);
try { try {
const relays = await globalThis.nostr.getRelays(); const relays = await globalThis.nostr.getRelays();
console.debug('[relay_management.ts] getRelays() returned:', relays); console.debug("[relay_management.ts] getRelays() returned:", relays);
if (relays && typeof relays === 'object') { if (relays && typeof relays === "object") {
// Convert relay object to array of URLs // Convert relay object to array of URLs
const relayUrls = Object.keys(relays); const relayUrls = Object.keys(relays);
extensionRelays.push(...relayUrls); extensionRelays.push(...relayUrls);
console.debug('[relay_management.ts] Got relays from extension:', relayUrls); console.debug(
"[relay_management.ts] Got relays from extension:",
relayUrls,
);
} }
} catch (error) { } catch (error) {
console.debug('[relay_management.ts] Extension getRelays() failed:', error); console.debug(
"[relay_management.ts] Extension getRelays() failed:",
error,
);
} }
} else { } else {
console.debug('[relay_management.ts] getRelays() method not found on globalThis.nostr'); console.debug(
"[relay_management.ts] getRelays() method not found on globalThis.nostr",
);
} }
// If getRelays() didn't work, try alternative methods // If getRelays() didn't work, try alternative methods
if (extensionRelays.length === 0) { if (extensionRelays.length === 0) {
// Some extensions might expose relays through other methods // Some extensions might expose relays through other methods
// This is a fallback for extensions that don't expose getRelays() // This is a fallback for extensions that don't expose getRelays()
console.debug('[relay_management.ts] Extension does not expose relay configuration'); console.debug(
"[relay_management.ts] Extension does not expose relay configuration",
);
} }
console.debug('[relay_management.ts] Final extension relays:', extensionRelays); console.debug(
"[relay_management.ts] Final extension relays:",
extensionRelays,
);
return extensionRelays; return extensionRelays;
} catch (error) { } catch (error) {
console.debug('[relay_management.ts] Error getting extension relays:', error); console.debug(
"[relay_management.ts] Error getting extension relays:",
error,
);
return []; return [];
} }
} }
@ -547,36 +615,59 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> {
const workingRelays: string[] = []; const workingRelays: string[] = [];
const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them
console.debug(`[relay_management.ts] Testing ${relayUrls.length} relays in batches of ${maxConcurrent}`); console.debug(
`[relay_management.ts] Testing ${relayUrls.length} relays in batches of ${maxConcurrent}`,
);
console.debug(`[relay_management.ts] Relay URLs to test:`, relayUrls); console.debug(`[relay_management.ts] Relay URLs to test:`, relayUrls);
for (let i = 0; i < relayUrls.length; i += maxConcurrent) { for (let i = 0; i < relayUrls.length; i += maxConcurrent) {
const batch = relayUrls.slice(i, i + maxConcurrent); const batch = relayUrls.slice(i, i + maxConcurrent);
console.debug(`[relay_management.ts] Testing batch ${Math.floor(i/maxConcurrent) + 1}:`, batch); console.debug(
`[relay_management.ts] Testing batch ${
Math.floor(i / maxConcurrent) + 1
}:`,
batch,
);
const batchPromises = batch.map(async (url) => { const batchPromises = batch.map(async (url) => {
try { try {
console.debug(`[relay_management.ts] Testing relay: ${url}`); console.debug(`[relay_management.ts] Testing relay: ${url}`);
const result = await testRelayConnection(url, ndk); const result = await testRelayConnection(url, ndk);
console.debug(`[relay_management.ts] Relay ${url} test result:`, result); console.debug(
`[relay_management.ts] Relay ${url} test result:`,
result,
);
return result.connected ? url : null; return result.connected ? url : null;
} catch (error) { } catch (error) {
console.debug(`[relay_management.ts] Failed to test relay ${url}:`, error); console.debug(
`[relay_management.ts] Failed to test relay ${url}:`,
error,
);
return null; return null;
} }
}); });
const batchResults = await Promise.allSettled(batchPromises); const batchResults = await Promise.allSettled(batchPromises);
const batchWorkingRelays = batchResults const batchWorkingRelays = batchResults
.filter((result): result is PromiseFulfilledResult<string | null> => result.status === 'fulfilled') .filter((result): result is PromiseFulfilledResult<string | null> =>
.map(result => result.value) result.status === "fulfilled"
)
.map((result) => result.value)
.filter((url): url is string => url !== null); .filter((url): url is string => url !== null);
console.debug(`[relay_management.ts] Batch ${Math.floor(i/maxConcurrent) + 1} working relays:`, batchWorkingRelays); console.debug(
`[relay_management.ts] Batch ${
Math.floor(i / maxConcurrent) + 1
} working relays:`,
batchWorkingRelays,
);
workingRelays.push(...batchWorkingRelays); workingRelays.push(...batchWorkingRelays);
} }
console.debug(`[relay_management.ts] Total working relays after testing:`, workingRelays); console.debug(
`[relay_management.ts] Total working relays after testing:`,
workingRelays,
);
return workingRelays; return workingRelays;
} }
@ -588,13 +679,19 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> {
*/ */
export async function buildCompleteRelaySet( export async function buildCompleteRelaySet(
ndk: NDK, ndk: NDK,
user: NDKUser | null user: NDKUser | null,
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> { ): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> {
console.debug('[relay_management.ts] buildCompleteRelaySet: Starting with user:', user?.pubkey || 'null'); console.debug(
"[relay_management.ts] buildCompleteRelaySet: Starting with user:",
user?.pubkey || "null",
);
// Discover local relays first // Discover local relays first
const discoveredLocalRelays = await discoverLocalRelays(ndk); const discoveredLocalRelays = await discoverLocalRelays(ndk);
console.debug('[relay_management.ts] buildCompleteRelaySet: Discovered local relays:', discoveredLocalRelays); console.debug(
"[relay_management.ts] buildCompleteRelaySet: Discovered local relays:",
discoveredLocalRelays,
);
// Get user-specific relays if available // Get user-specific relays if available
let userOutboxRelays: string[] = []; let userOutboxRelays: string[] = [];
@ -603,42 +700,75 @@ export async function buildCompleteRelaySet(
let extensionRelays: string[] = []; let extensionRelays: string[] = [];
if (user) { if (user) {
console.debug('[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:', user.pubkey); console.debug(
"[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:",
user.pubkey,
);
try { try {
userOutboxRelays = await getUserOutboxRelays(ndk, user); userOutboxRelays = await getUserOutboxRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User outbox relays:', userOutboxRelays); console.debug(
"[relay_management.ts] buildCompleteRelaySet: User outbox relays:",
userOutboxRelays,
);
} catch (error) { } catch (error) {
console.debug('[relay_management.ts] Error fetching user outbox relays:', error); console.debug(
"[relay_management.ts] Error fetching user outbox relays:",
error,
);
} }
try { try {
userLocalRelays = await getUserLocalRelays(ndk, user); userLocalRelays = await getUserLocalRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User local relays:', userLocalRelays); console.debug(
"[relay_management.ts] buildCompleteRelaySet: User local relays:",
userLocalRelays,
);
} catch (error) { } catch (error) {
console.debug('[relay_management.ts] Error fetching user local relays:', error); console.debug(
"[relay_management.ts] Error fetching user local relays:",
error,
);
} }
try { try {
blockedRelays = await getUserBlockedRelays(ndk, user); blockedRelays = await getUserBlockedRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User blocked relays:', blockedRelays); console.debug(
"[relay_management.ts] buildCompleteRelaySet: User blocked relays:",
blockedRelays,
);
} catch { } catch {
// Silently ignore blocked relay fetch errors // Silently ignore blocked relay fetch errors
} }
try { try {
extensionRelays = await getExtensionRelays(); extensionRelays = await getExtensionRelays();
console.debug('[relay_management.ts] Extension relays gathered:', extensionRelays); console.debug(
"[relay_management.ts] Extension relays gathered:",
extensionRelays,
);
} catch (error) { } catch (error) {
console.debug('[relay_management.ts] Error fetching extension relays:', error); console.debug(
"[relay_management.ts] Error fetching extension relays:",
error,
);
} }
} else { } else {
console.debug('[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays'); console.debug(
"[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays",
);
} }
// Build initial relay sets and deduplicate // Build initial relay sets and deduplicate
const finalInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userLocalRelays]); const finalInboxRelays = deduplicateRelayUrls([
const finalOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userOutboxRelays, ...extensionRelays]); ...discoveredLocalRelays,
...userLocalRelays,
]);
const finalOutboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...userOutboxRelays,
...extensionRelays,
]);
// Test relays and filter out non-working ones // Test relays and filter out non-working ones
let testedInboxRelays: string[] = []; let testedInboxRelays: string[] = [];
@ -654,21 +784,27 @@ export async function buildCompleteRelaySet(
// If no relays passed testing, use remote relays without testing // If no relays passed testing, use remote relays without testing
if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) { if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) {
const remoteRelays = deduplicateRelayUrls([...secondaryRelays, ...searchRelays]); const remoteRelays = deduplicateRelayUrls([
...secondaryRelays,
...searchRelays,
]);
return { return {
inboxRelays: remoteRelays, inboxRelays: remoteRelays,
outboxRelays: remoteRelays outboxRelays: remoteRelays,
}; };
} }
// Always include some remote relays as fallback, even when local relays are working // Always include some remote relays as fallback, even when local relays are working
const fallbackRelays = deduplicateRelayUrls([...anonymousRelays, ...secondaryRelays]); const fallbackRelays = deduplicateRelayUrls([
...anonymousRelays,
...secondaryRelays,
]);
// Use tested relays and add fallback relays // Use tested relays and add fallback relays
const inboxRelays = testedInboxRelays.length > 0 const inboxRelays = testedInboxRelays.length > 0
? deduplicateRelayUrls([...testedInboxRelays, ...fallbackRelays]) ? deduplicateRelayUrls([...testedInboxRelays, ...fallbackRelays])
: deduplicateRelayUrls(fallbackRelays); : deduplicateRelayUrls(fallbackRelays);
const outboxRelays = testedOutboxRelays.length > 0 const outboxRelays = testedOutboxRelays.length > 0
? deduplicateRelayUrls([...testedOutboxRelays, ...fallbackRelays]) ? deduplicateRelayUrls([...testedOutboxRelays, ...fallbackRelays])
: deduplicateRelayUrls(fallbackRelays); : deduplicateRelayUrls(fallbackRelays);
@ -678,27 +814,51 @@ export async function buildCompleteRelaySet(
currentNetworkCondition, currentNetworkCondition,
discoveredLocalRelays, discoveredLocalRelays,
lowbandwidthRelays, lowbandwidthRelays,
{ inboxRelays, outboxRelays } { inboxRelays, outboxRelays },
); );
// Filter out blocked relays and deduplicate final sets // Filter out blocked relays and deduplicate final sets
const finalRelaySet = { const finalRelaySet = {
inboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.inboxRelays.filter((r: string) => !blockedRelays.includes(r))), inboxRelays: deduplicateRelayUrls(
outboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.outboxRelays.filter((r: string) => !blockedRelays.includes(r))) networkOptimizedRelaySet.inboxRelays.filter((r: string) =>
!blockedRelays.includes(r)
),
),
outboxRelays: deduplicateRelayUrls(
networkOptimizedRelaySet.outboxRelays.filter((r: string) =>
!blockedRelays.includes(r)
),
),
}; };
// Ensure we always have at least some relays // Ensure we always have at least some relays
if (finalRelaySet.inboxRelays.length === 0 && finalRelaySet.outboxRelays.length === 0) { if (
console.warn('[relay_management.ts] No relays available, using anonymous relays as final fallback'); finalRelaySet.inboxRelays.length === 0 &&
finalRelaySet.outboxRelays.length === 0
) {
console.warn(
"[relay_management.ts] No relays available, using anonymous relays as final fallback",
);
return { return {
inboxRelays: deduplicateRelayUrls(anonymousRelays), inboxRelays: deduplicateRelayUrls(anonymousRelays),
outboxRelays: deduplicateRelayUrls(anonymousRelays) outboxRelays: deduplicateRelayUrls(anonymousRelays),
}; };
} }
console.debug('[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:', finalRelaySet.inboxRelays.length, 'outbox:', finalRelaySet.outboxRelays.length); console.debug(
console.debug('[relay_management.ts] buildCompleteRelaySet: Final inbox relays:', finalRelaySet.inboxRelays); "[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:",
console.debug('[relay_management.ts] buildCompleteRelaySet: Final outbox relays:', finalRelaySet.outboxRelays); finalRelaySet.inboxRelays.length,
"outbox:",
finalRelaySet.outboxRelays.length,
);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Final inbox relays:",
finalRelaySet.inboxRelays,
);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Final outbox relays:",
finalRelaySet.outboxRelays,
);
return finalRelaySet; return finalRelaySet;
} }

16
src/lib/utils/search_result_formatter.ts

@ -6,17 +6,19 @@ export class SearchResultFormatter {
/** /**
* Formats a result message based on search count and type * Formats a result message based on search count and type
*/ */
formatResultMessage(searchResultCount: number | null, searchResultType: string | null): string { formatResultMessage(
searchResultCount: number | null,
searchResultType: string | null,
): string {
if (searchResultCount === 0) { if (searchResultCount === 0) {
return "Search completed. No results found."; return "Search completed. No results found.";
} }
const typeLabel = const typeLabel = searchResultType === "n"
searchResultType === "n" ? "profile"
? "profile" : searchResultType === "nip05"
: searchResultType === "nip05" ? "NIP-05 address"
? "NIP-05 address" : "event";
: "event";
const countLabel = searchResultType === "n" ? "profiles" : "events"; const countLabel = searchResultType === "n" ? "profiles" : "events";
return searchResultCount === 1 return searchResultCount === 1

14
src/lib/utils/search_utility.ts

@ -13,13 +13,13 @@ export { searchBySubscription } from "./subscription_search";
export { searchEvent, searchNip05 } from "./event_search"; export { searchEvent, searchNip05 } from "./event_search";
export { checkCommunity } from "./community_checker"; export { checkCommunity } from "./community_checker";
export { export {
wellKnownUrl,
lnurlpWellKnownUrl,
isValidNip05Address,
normalizeSearchTerm,
fieldMatches,
nip05Matches,
COMMON_DOMAINS, COMMON_DOMAINS,
isEmojiReaction,
createProfileFromEvent, createProfileFromEvent,
fieldMatches,
isEmojiReaction,
isValidNip05Address,
lnurlpWellKnownUrl,
nip05Matches,
normalizeSearchTerm,
wellKnownUrl,
} from "./search_utils"; } from "./search_utils";

272
src/lib/utils/subscription_search.ts

@ -2,28 +2,28 @@
import { ndkInstance } from "../ndk.ts"; import { ndkInstance } from "../ndk.ts";
import { getMatchingTags, getNpubFromNip05 } from "./nostrUtils.ts"; import { getMatchingTags, getNpubFromNip05 } from "./nostrUtils.ts";
import { nip19 } from "./nostrUtils.ts"; import { nip19 } from "./nostrUtils.ts";
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk"; import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { searchCache } from "./searchCache.ts"; import { searchCache } from "./searchCache.ts";
import { communityRelays, searchRelays } from "../consts.ts"; import { communityRelays, searchRelays } from "../consts.ts";
import { get } from "svelte/store"; import { get } from "svelte/store";
import type { import type {
SearchCallbacks,
SearchFilter,
SearchResult, SearchResult,
SearchSubscriptionType, SearchSubscriptionType,
SearchFilter,
SearchCallbacks,
} from "./search_types.ts"; } from "./search_types.ts";
import { import {
fieldMatches,
nip05Matches,
COMMON_DOMAINS, COMMON_DOMAINS,
fieldMatches,
isEmojiReaction, isEmojiReaction,
nip05Matches,
} from "./search_utils.ts"; } from "./search_utils.ts";
import { TIMEOUTS, SEARCH_LIMITS } from "./search_constants.ts"; import { SEARCH_LIMITS, TIMEOUTS } from "./search_constants.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts"; import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
// Helper function to normalize URLs for comparison // Helper function to normalize URLs for comparison
const normalizeUrl = (url: string): string => { const normalizeUrl = (url: string): string => {
return url.replace(/\/$/, ''); // Remove trailing slash return url.replace(/\/$/, ""); // Remove trailing slash
}; };
/** /**
@ -62,7 +62,9 @@ export async function searchBySubscription(
// AI-NOTE: 2025-01-24 - For profile searches, return cached results immediately // AI-NOTE: 2025-01-24 - For profile searches, return cached results immediately
// The EventSearch component now handles cache checking before calling this function // The EventSearch component now handles cache checking before calling this function
if (searchType === "n") { if (searchType === "n") {
console.log("subscription_search: Returning cached profile result immediately"); console.log(
"subscription_search: Returning cached profile result immediately",
);
return cachedResult; return cachedResult;
} else { } else {
return cachedResult; return cachedResult;
@ -147,8 +149,10 @@ export async function searchBySubscription(
// AI-NOTE: 2025-01-08 - For profile searches, return immediately when found // AI-NOTE: 2025-01-08 - For profile searches, return immediately when found
// but still start background search for second-order results // but still start background search for second-order results
if (searchType === "n") { if (searchType === "n") {
console.log("subscription_search: Profile found, returning immediately but starting background second-order search"); console.log(
"subscription_search: Profile found, returning immediately but starting background second-order search",
);
// Start Phase 2 in background for second-order results // Start Phase 2 in background for second-order results
searchOtherRelaysInBackground( searchOtherRelaysInBackground(
searchType, searchType,
@ -157,9 +161,11 @@ export async function searchBySubscription(
callbacks, callbacks,
cleanup, cleanup,
); );
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms`); console.log(
`subscription_search: Profile search completed in ${elapsed}ms`,
);
return immediateResult; return immediateResult;
} }
@ -177,7 +183,7 @@ export async function searchBySubscription(
console.log( console.log(
"subscription_search: No results from primary relay", "subscription_search: No results from primary relay",
); );
// AI-NOTE: 2025-01-08 - For profile searches, if no results found in search relays, // AI-NOTE: 2025-01-08 - For profile searches, if no results found in search relays,
// try all relays as fallback // try all relays as fallback
if (searchType === "n") { if (searchType === "n") {
@ -185,20 +191,23 @@ export async function searchBySubscription(
"subscription_search: No profile found in search relays, trying all relays", "subscription_search: No profile found in search relays, trying all relays",
); );
// Try with all relays as fallback // Try with all relays as fallback
const allRelaySet = new NDKRelaySet(new Set(Array.from(ndk.pool.relays.values())) as any, ndk); const allRelaySet = new NDKRelaySet(
new Set(Array.from(ndk.pool.relays.values())) as any,
ndk,
);
try { try {
const fallbackEvents = await ndk.fetchEvents( const fallbackEvents = await ndk.fetchEvents(
searchFilter.filter, searchFilter.filter,
{ closeOnEose: true }, { closeOnEose: true },
allRelaySet, allRelaySet,
); );
console.log( console.log(
"subscription_search: Fallback search returned", "subscription_search: Fallback search returned",
fallbackEvents.size, fallbackEvents.size,
"events", "events",
); );
processPrimaryRelayResults( processPrimaryRelayResults(
fallbackEvents, fallbackEvents,
searchType, searchType,
@ -208,7 +217,7 @@ export async function searchBySubscription(
abortSignal, abortSignal,
cleanup, cleanup,
); );
if (hasResults(searchState, searchType)) { if (hasResults(searchState, searchType)) {
console.log( console.log(
"subscription_search: Found profile in fallback search, returning immediately", "subscription_search: Found profile in fallback search, returning immediately",
@ -220,21 +229,31 @@ export async function searchBySubscription(
); );
searchCache.set(searchType, normalizedSearchTerm, fallbackResult); searchCache.set(searchType, normalizedSearchTerm, fallbackResult);
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms (fallback)`); console.log(
`subscription_search: Profile search completed in ${elapsed}ms (fallback)`,
);
return fallbackResult; return fallbackResult;
} }
} catch (fallbackError) { } catch (fallbackError) {
console.error("subscription_search: Fallback search failed:", fallbackError); console.error(
"subscription_search: Fallback search failed:",
fallbackError,
);
} }
console.log( console.log(
"subscription_search: Profile not found in any relays, returning empty result", "subscription_search: Profile not found in any relays, returning empty result",
); );
const emptyResult = createEmptySearchResult(searchType, normalizedSearchTerm); const emptyResult = createEmptySearchResult(
searchType,
normalizedSearchTerm,
);
// AI-NOTE: 2025-01-08 - Don't cache empty profile results as they may be due to search issues // AI-NOTE: 2025-01-08 - Don't cache empty profile results as they may be due to search issues
// rather than the profile not existing // rather than the profile not existing
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms (not found)`); console.log(
`subscription_search: Profile search completed in ${elapsed}ms (not found)`,
);
return emptyResult; return emptyResult;
} else { } else {
console.log( console.log(
@ -262,13 +281,15 @@ export async function searchBySubscription(
callbacks, callbacks,
cleanup, cleanup,
); );
// AI-NOTE: 2025-01-08 - Log performance for non-profile searches // AI-NOTE: 2025-01-08 - Log performance for non-profile searches
if (searchType !== "n") { if (searchType !== "n") {
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
console.log(`subscription_search: ${searchType} search completed in ${elapsed}ms`); console.log(
`subscription_search: ${searchType} search completed in ${elapsed}ms`,
);
} }
return result; return result;
} }
@ -324,7 +345,10 @@ async function createSearchFilter(
switch (searchType) { switch (searchType) {
case "d": { case "d": {
const dFilter = { const dFilter = {
filter: { "#d": [normalizedSearchTerm], limit: SEARCH_LIMITS.GENERAL_CONTENT }, filter: {
"#d": [normalizedSearchTerm],
limit: SEARCH_LIMITS.GENERAL_CONTENT,
},
subscriptionType: "d-tag", subscriptionType: "d-tag",
}; };
console.log("subscription_search: Created d-tag filter:", dFilter); console.log("subscription_search: Created d-tag filter:", dFilter);
@ -332,7 +356,10 @@ async function createSearchFilter(
} }
case "t": { case "t": {
const tFilter = { const tFilter = {
filter: { "#t": [normalizedSearchTerm], limit: SEARCH_LIMITS.GENERAL_CONTENT }, filter: {
"#t": [normalizedSearchTerm],
limit: SEARCH_LIMITS.GENERAL_CONTENT,
},
subscriptionType: "t-tag", subscriptionType: "t-tag",
}; };
console.log("subscription_search: Created t-tag filter:", tFilter); console.log("subscription_search: Created t-tag filter:", tFilter);
@ -412,11 +439,14 @@ function createPrimaryRelaySet(
): NDKRelaySet { ): NDKRelaySet {
// Debug: Log all relays in NDK pool // Debug: Log all relays in NDK pool
const poolRelays = Array.from(ndk.pool.relays.values()); const poolRelays = Array.from(ndk.pool.relays.values());
console.debug('subscription_search: NDK pool relays:', poolRelays.map((r: any) => r.url)); console.debug(
"subscription_search: NDK pool relays:",
poolRelays.map((r: any) => r.url),
);
// AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive search coverage // AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive search coverage
// This ensures searches don't fail due to missing relays and provides maximum event discovery // This ensures searches don't fail due to missing relays and provides maximum event discovery
if (searchType === "n") { if (searchType === "n") {
// For profile searches, prioritize search relays for speed but include all relays // For profile searches, prioritize search relays for speed but include all relays
const searchRelaySet = poolRelays.filter( const searchRelaySet = poolRelays.filter(
@ -426,29 +456,43 @@ function createPrimaryRelaySet(
normalizeUrl(relay.url) === normalizeUrl(searchRelay), normalizeUrl(relay.url) === normalizeUrl(searchRelay),
), ),
); );
if (searchRelaySet.length > 0) { if (searchRelaySet.length > 0) {
console.debug('subscription_search: Profile search - using search relays for speed:', searchRelaySet.map((r: any) => r.url)); console.debug(
"subscription_search: Profile search - using search relays for speed:",
searchRelaySet.map((r: any) => r.url),
);
// Still include all relays for comprehensive coverage // Still include all relays for comprehensive coverage
console.debug('subscription_search: Profile search - also including all relays for comprehensive coverage'); console.debug(
"subscription_search: Profile search - also including all relays for comprehensive coverage",
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk); return new NDKRelaySet(new Set(poolRelays) as any, ndk);
} else { } else {
// Use all relays if search relays not available // Use all relays if search relays not available
console.debug('subscription_search: Profile search - using all relays:', poolRelays.map((r: any) => r.url)); console.debug(
"subscription_search: Profile search - using all relays:",
poolRelays.map((r: any) => r.url),
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk); return new NDKRelaySet(new Set(poolRelays) as any, ndk);
} }
} else { } else {
// For all other searches, use ALL available relays for maximum coverage // For all other searches, use ALL available relays for maximum coverage
const activeRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)]; const activeRelays = [
console.debug('subscription_search: Active relay stores:', { ...get(activeInboxRelays),
...get(activeOutboxRelays),
];
console.debug("subscription_search: Active relay stores:", {
inboxRelays: get(activeInboxRelays), inboxRelays: get(activeInboxRelays),
outboxRelays: get(activeOutboxRelays), outboxRelays: get(activeOutboxRelays),
activeRelays activeRelays,
}); });
// AI-NOTE: 2025-01-24 - Use all pool relays instead of filtering to active relays only // AI-NOTE: 2025-01-24 - Use all pool relays instead of filtering to active relays only
// This ensures we don't miss events that might be on other relays // This ensures we don't miss events that might be on other relays
console.debug('subscription_search: Using ALL pool relays for comprehensive search coverage:', poolRelays.map((r: any) => r.url)); console.debug(
"subscription_search: Using ALL pool relays for comprehensive search coverage:",
poolRelays.map((r: any) => r.url),
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk); return new NDKRelaySet(new Set(poolRelays) as any, ndk);
} }
} }
@ -620,12 +664,11 @@ function createSearchResult(
normalizedSearchTerm: string, normalizedSearchTerm: string,
): SearchResult { ): SearchResult {
return { return {
events: events: searchType === "n"
searchType === "n" ? searchState.foundProfiles
? searchState.foundProfiles : searchType === "t"
: searchType === "t" ? searchState.tTagEvents
? searchState.tTagEvents : searchState.firstOrderEvents,
: searchState.firstOrderEvents,
secondOrder: [], secondOrder: [],
tTagEvents: [], tTagEvents: [],
eventIds: searchState.eventIds, eventIds: searchState.eventIds,
@ -653,9 +696,11 @@ function searchOtherRelaysInBackground(
new Set(Array.from(ndk.pool.relays.values())), new Set(Array.from(ndk.pool.relays.values())),
ndk, ndk,
); );
console.debug('subscription_search: Background search using ALL relays:', console.debug(
Array.from(ndk.pool.relays.values()).map((r: any) => r.url)); "subscription_search: Background search using ALL relays:",
Array.from(ndk.pool.relays.values()).map((r: any) => r.url),
);
// Subscribe to events from other relays // Subscribe to events from other relays
const sub = ndk.subscribe( const sub = ndk.subscribe(
@ -758,7 +803,10 @@ function processProfileEoseResults(
) { ) {
const targetPubkey = dedupedProfiles[0]?.pubkey; const targetPubkey = dedupedProfiles[0]?.pubkey;
if (targetPubkey) { if (targetPubkey) {
console.log("subscription_search: Triggering second-order search for npub-specific profile:", targetPubkey); console.log(
"subscription_search: Triggering second-order search for npub-specific profile:",
targetPubkey,
);
performSecondOrderSearchInBackground( performSecondOrderSearchInBackground(
"n", "n",
dedupedProfiles, dedupedProfiles,
@ -768,13 +816,18 @@ function processProfileEoseResults(
callbacks, callbacks,
); );
} else { } else {
console.log("subscription_search: No targetPubkey found for second-order search"); console.log(
"subscription_search: No targetPubkey found for second-order search",
);
} }
} else if (searchFilter.subscriptionType === "profile") { } else if (searchFilter.subscriptionType === "profile") {
// For general profile searches, perform second-order search for each found profile // For general profile searches, perform second-order search for each found profile
for (const profile of dedupedProfiles) { for (const profile of dedupedProfiles) {
if (profile.pubkey) { if (profile.pubkey) {
console.log("subscription_search: Triggering second-order search for general profile:", profile.pubkey); console.log(
"subscription_search: Triggering second-order search for general profile:",
profile.pubkey,
);
performSecondOrderSearchInBackground( performSecondOrderSearchInBackground(
"n", "n",
dedupedProfiles, dedupedProfiles,
@ -786,7 +839,10 @@ function processProfileEoseResults(
} }
} }
} else { } else {
console.log("subscription_search: No second-order search triggered for subscription type:", searchFilter.subscriptionType); console.log(
"subscription_search: No second-order search triggered for subscription type:",
searchFilter.subscriptionType,
);
} }
return { return {
@ -896,7 +952,12 @@ async function performSecondOrderSearchInBackground(
callbacks?: SearchCallbacks, callbacks?: SearchCallbacks,
) { ) {
try { try {
console.log("subscription_search: Starting second-order search for", searchType, "with targetPubkey:", targetPubkey); console.log(
"subscription_search: Starting second-order search for",
searchType,
"with targetPubkey:",
targetPubkey,
);
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
let allSecondOrderEvents: NDKEvent[] = []; let allSecondOrderEvents: NDKEvent[] = [];
@ -910,20 +971,30 @@ async function performSecondOrderSearchInBackground(
const searchPromise = (async () => { const searchPromise = (async () => {
if (searchType === "n" && targetPubkey) { if (searchType === "n" && targetPubkey) {
console.log("subscription_search: Searching for events mentioning pubkey:", targetPubkey); console.log(
"subscription_search: Searching for events mentioning pubkey:",
targetPubkey,
);
// AI-NOTE: 2025-01-24 - Use only active relays for second-order profile search to prevent hanging // AI-NOTE: 2025-01-24 - Use only active relays for second-order profile search to prevent hanging
const activeRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)]; const activeRelays = [
...get(activeInboxRelays),
...get(activeOutboxRelays),
];
const availableRelays = activeRelays const availableRelays = activeRelays
.map(url => ndk.pool.relays.get(url)) .map((url) => ndk.pool.relays.get(url))
.filter((relay): relay is any => relay !== undefined); .filter((relay): relay is any => relay !== undefined);
const relaySet = new NDKRelaySet( const relaySet = new NDKRelaySet(
new Set(availableRelays), new Set(availableRelays),
ndk ndk,
);
console.log(
"subscription_search: Using",
activeRelays.length,
"active relays for second-order search",
); );
console.log("subscription_search: Using", activeRelays.length, "active relays for second-order search");
// Search for events that mention this pubkey via p-tags // Search for events that mention this pubkey via p-tags
const pTagFilter = { "#p": [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging const pTagFilter = { "#p": [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging
const pTagEvents = await ndk.fetchEvents( const pTagEvents = await ndk.fetchEvents(
@ -931,8 +1002,13 @@ async function performSecondOrderSearchInBackground(
{ closeOnEose: true }, { closeOnEose: true },
relaySet, relaySet,
); );
console.log("subscription_search: Found", pTagEvents.size, "events with p-tag for", targetPubkey); console.log(
"subscription_search: Found",
pTagEvents.size,
"events with p-tag for",
targetPubkey,
);
// AI-NOTE: 2025-01-24 - Also search for events written by this pubkey with limit // AI-NOTE: 2025-01-24 - Also search for events written by this pubkey with limit
const authorFilter = { authors: [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging const authorFilter = { authors: [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging
const authorEvents = await ndk.fetchEvents( const authorEvents = await ndk.fetchEvents(
@ -940,14 +1016,27 @@ async function performSecondOrderSearchInBackground(
{ closeOnEose: true }, { closeOnEose: true },
relaySet, relaySet,
); );
console.log("subscription_search: Found", authorEvents.size, "events written by", targetPubkey); console.log(
"subscription_search: Found",
authorEvents.size,
"events written by",
targetPubkey,
);
// Filter out unwanted events from both sets // Filter out unwanted events from both sets
const filteredPTagEvents = filterUnwantedEvents(Array.from(pTagEvents)); const filteredPTagEvents = filterUnwantedEvents(Array.from(pTagEvents));
const filteredAuthorEvents = filterUnwantedEvents(Array.from(authorEvents)); const filteredAuthorEvents = filterUnwantedEvents(
Array.from(authorEvents),
console.log("subscription_search: After filtering unwanted events:", filteredPTagEvents.length, "p-tag events,", filteredAuthorEvents.length, "author events"); );
console.log(
"subscription_search: After filtering unwanted events:",
filteredPTagEvents.length,
"p-tag events,",
filteredAuthorEvents.length,
"author events",
);
// Combine both sets of events // Combine both sets of events
allSecondOrderEvents = [...filteredPTagEvents, ...filteredAuthorEvents]; allSecondOrderEvents = [...filteredPTagEvents, ...filteredAuthorEvents];
} else if (searchType === "d") { } else if (searchType === "d") {
@ -959,17 +1048,23 @@ async function performSecondOrderSearchInBackground(
const [eTagEvents, aTagEvents] = await Promise.all([ const [eTagEvents, aTagEvents] = await Promise.all([
eventIds.size > 0 eventIds.size > 0
? ndk.fetchEvents( ? ndk.fetchEvents(
{ "#e": Array.from(eventIds), limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS }, {
{ closeOnEose: true }, "#e": Array.from(eventIds),
relaySet, limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS,
) },
{ closeOnEose: true },
relaySet,
)
: Promise.resolve([]), : Promise.resolve([]),
addresses.size > 0 addresses.size > 0
? ndk.fetchEvents( ? ndk.fetchEvents(
{ "#a": Array.from(addresses), limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS }, {
{ closeOnEose: true }, "#a": Array.from(addresses),
relaySet, limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS,
) },
{ closeOnEose: true },
relaySet,
)
: Promise.resolve([]), : Promise.resolve([]),
]); ]);
// Filter out unwanted events // Filter out unwanted events
@ -1003,17 +1098,20 @@ async function performSecondOrderSearchInBackground(
.sort((a, b) => (b.created_at || 0) - (a.created_at || 0)) .sort((a, b) => (b.created_at || 0) - (a.created_at || 0))
.slice(0, SEARCH_LIMITS.SECOND_ORDER_RESULTS); .slice(0, SEARCH_LIMITS.SECOND_ORDER_RESULTS);
console.log("subscription_search: Second-order search completed with", sortedSecondOrder.length, "results"); console.log(
"subscription_search: Second-order search completed with",
sortedSecondOrder.length,
"results",
);
// Update the search results with second-order events // Update the search results with second-order events
const result: SearchResult = { const result: SearchResult = {
events: firstOrderEvents, events: firstOrderEvents,
secondOrder: sortedSecondOrder, secondOrder: sortedSecondOrder,
tTagEvents: [], tTagEvents: [],
eventIds: eventIds: searchType === "n"
searchType === "n" ? new Set(firstOrderEvents.map((p) => p.id))
? new Set(firstOrderEvents.map((p) => p.id)) : eventIds,
: eventIds,
addresses: searchType === "n" ? new Set() : addresses, addresses: searchType === "n" ? new Set() : addresses,
searchType: searchType, searchType: searchType,
searchTerm: "", // This will be set by the caller searchTerm: "", // This will be set by the caller
@ -1021,10 +1119,16 @@ async function performSecondOrderSearchInBackground(
// Notify UI of updated results // Notify UI of updated results
if (callbacks?.onSecondOrderUpdate) { if (callbacks?.onSecondOrderUpdate) {
console.log("subscription_search: Calling onSecondOrderUpdate callback with", sortedSecondOrder.length, "second-order events"); console.log(
"subscription_search: Calling onSecondOrderUpdate callback with",
sortedSecondOrder.length,
"second-order events",
);
callbacks.onSecondOrderUpdate(result); callbacks.onSecondOrderUpdate(result);
} else { } else {
console.log("subscription_search: No onSecondOrderUpdate callback available"); console.log(
"subscription_search: No onSecondOrderUpdate callback available",
);
} }
})(); })();

117
src/lib/utils/tag_event_fetch.ts

@ -1,7 +1,7 @@
import type { NDKEvent } from "@nostr-dev-kit/ndk"; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { ndkInstance } from "../ndk"; import { ndkInstance } from "../ndk";
import { get } from "svelte/store"; import { get } from "svelte/store";
import { extractPubkeysFromEvents, batchFetchProfiles } from "./profileCache"; import { batchFetchProfiles, extractPubkeysFromEvents } from "./profileCache";
// Constants for publication event kinds // Constants for publication event kinds
const INDEX_EVENT_KIND = 30040; const INDEX_EVENT_KIND = 30040;
@ -17,12 +17,12 @@ export interface TagExpansionResult {
/** /**
* Fetches publications and their content events from relays based on tags * Fetches publications and their content events from relays based on tags
* *
* This function handles the relay-based fetching portion of tag expansion: * This function handles the relay-based fetching portion of tag expansion:
* 1. Fetches publication index events that have any of the specified tags * 1. Fetches publication index events that have any of the specified tags
* 2. Extracts content event references from those publications * 2. Extracts content event references from those publications
* 3. Fetches the referenced content events * 3. Fetches the referenced content events
* *
* @param tags Array of tags to search for in publications * @param tags Array of tags to search for in publications
* @param existingEventIds Set of existing event IDs to avoid duplicates * @param existingEventIds Set of existing event IDs to avoid duplicates
* @param baseEvents Array of base events to check for existing content * @param baseEvents Array of base events to check for existing content
@ -33,44 +33,46 @@ export async function fetchTaggedEventsFromRelays(
tags: string[], tags: string[],
existingEventIds: Set<string>, existingEventIds: Set<string>,
baseEvents: NDKEvent[], baseEvents: NDKEvent[],
debug?: (...args: any[]) => void debug?: (...args: any[]) => void,
): Promise<TagExpansionResult> { ): Promise<TagExpansionResult> {
const log = debug || console.debug; const log = debug || console.debug;
log("Fetching from relays for tags:", tags); log("Fetching from relays for tags:", tags);
// Fetch publications that have any of the specified tags // Fetch publications that have any of the specified tags
const ndk = get(ndkInstance); const ndk = get(ndkInstance);
const taggedPublications = await ndk.fetchEvents({ const taggedPublications = await ndk.fetchEvents({
kinds: [INDEX_EVENT_KIND], kinds: [INDEX_EVENT_KIND],
"#t": tags, // Match any of these tags "#t": tags, // Match any of these tags
limit: 30 // Reasonable default limit limit: 30, // Reasonable default limit
}); });
log("Found tagged publications from relays:", taggedPublications.size); log("Found tagged publications from relays:", taggedPublications.size);
// Filter to avoid duplicates // Filter to avoid duplicates
const newPublications = Array.from(taggedPublications).filter( const newPublications = Array.from(taggedPublications).filter(
(event: NDKEvent) => !existingEventIds.has(event.id) (event: NDKEvent) => !existingEventIds.has(event.id),
); );
// Extract content event d-tags from new publications // Extract content event d-tags from new publications
const contentEventDTags = new Set<string>(); const contentEventDTags = new Set<string>();
const existingContentDTags = new Set( const existingContentDTags = new Set(
baseEvents baseEvents
.filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)) .filter((e) =>
.map(e => e.tagValue("d")) e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)
.filter(d => d !== undefined) )
.map((e) => e.tagValue("d"))
.filter((d) => d !== undefined),
); );
newPublications.forEach((event: NDKEvent) => { newPublications.forEach((event: NDKEvent) => {
const aTags = event.getMatchingTags("a"); const aTags = event.getMatchingTags("a");
aTags.forEach((tag: string[]) => { aTags.forEach((tag: string[]) => {
// Parse the 'a' tag identifier: kind:pubkey:d-tag // Parse the 'a' tag identifier: kind:pubkey:d-tag
if (tag[1]) { if (tag[1]) {
const parts = tag[1].split(':'); const parts = tag[1].split(":");
if (parts.length >= 3) { if (parts.length >= 3) {
const dTag = parts.slice(2).join(':'); // Handle d-tags with colons const dTag = parts.slice(2).join(":"); // Handle d-tags with colons
if (!existingContentDTags.has(dTag)) { if (!existingContentDTags.has(dTag)) {
contentEventDTags.add(dTag); contentEventDTags.add(dTag);
} }
@ -78,7 +80,7 @@ export async function fetchTaggedEventsFromRelays(
} }
}); });
}); });
// Fetch the content events // Fetch the content events
let newContentEvents: NDKEvent[] = []; let newContentEvents: NDKEvent[] = [];
if (contentEventDTags.size > 0) { if (contentEventDTags.size > 0) {
@ -88,21 +90,21 @@ export async function fetchTaggedEventsFromRelays(
}); });
newContentEvents = Array.from(contentEventsSet); newContentEvents = Array.from(contentEventsSet);
} }
return { return {
publications: newPublications, publications: newPublications,
contentEvents: newContentEvents contentEvents: newContentEvents,
}; };
} }
/** /**
* Searches through already fetched events for publications with specified tags * Searches through already fetched events for publications with specified tags
* *
* This function handles the local search portion of tag expansion: * This function handles the local search portion of tag expansion:
* 1. Searches through existing events for publications with matching tags * 1. Searches through existing events for publications with matching tags
* 2. Extracts content event references from those publications * 2. Extracts content event references from those publications
* 3. Finds the referenced content events in existing events * 3. Finds the referenced content events in existing events
* *
* @param allEvents Array of all fetched events to search through * @param allEvents Array of all fetched events to search through
* @param tags Array of tags to search for in publications * @param tags Array of tags to search for in publications
* @param existingEventIds Set of existing event IDs to avoid duplicates * @param existingEventIds Set of existing event IDs to avoid duplicates
@ -115,42 +117,44 @@ export function findTaggedEventsInFetched(
tags: string[], tags: string[],
existingEventIds: Set<string>, existingEventIds: Set<string>,
baseEvents: NDKEvent[], baseEvents: NDKEvent[],
debug?: (...args: any[]) => void debug?: (...args: any[]) => void,
): TagExpansionResult { ): TagExpansionResult {
const log = debug || console.debug; const log = debug || console.debug;
log("Searching through already fetched events for tags:", tags); log("Searching through already fetched events for tags:", tags);
// Find publications in allEvents that have the specified tags // Find publications in allEvents that have the specified tags
const taggedPublications = allEvents.filter(event => { const taggedPublications = allEvents.filter((event) => {
if (event.kind !== INDEX_EVENT_KIND) return false; if (event.kind !== INDEX_EVENT_KIND) return false;
if (existingEventIds.has(event.id)) return false; // Skip base events if (existingEventIds.has(event.id)) return false; // Skip base events
// Check if event has any of the specified tags // Check if event has any of the specified tags
const eventTags = event.getMatchingTags("t").map(tag => tag[1]); const eventTags = event.getMatchingTags("t").map((tag) => tag[1]);
return tags.some(tag => eventTags.includes(tag)); return tags.some((tag) => eventTags.includes(tag));
}); });
const newPublications = taggedPublications; const newPublications = taggedPublications;
log("Found", newPublications.length, "publications in fetched events"); log("Found", newPublications.length, "publications in fetched events");
// For content events, also search in allEvents // For content events, also search in allEvents
const existingContentDTags = new Set( const existingContentDTags = new Set(
baseEvents baseEvents
.filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)) .filter((e) =>
.map(e => e.tagValue("d")) e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)
.filter(d => d !== undefined) )
.map((e) => e.tagValue("d"))
.filter((d) => d !== undefined),
); );
const contentEventDTags = new Set<string>(); const contentEventDTags = new Set<string>();
newPublications.forEach((event: NDKEvent) => { newPublications.forEach((event: NDKEvent) => {
const aTags = event.getMatchingTags("a"); const aTags = event.getMatchingTags("a");
aTags.forEach((tag: string[]) => { aTags.forEach((tag: string[]) => {
// Parse the 'a' tag identifier: kind:pubkey:d-tag // Parse the 'a' tag identifier: kind:pubkey:d-tag
if (tag[1]) { if (tag[1]) {
const parts = tag[1].split(':'); const parts = tag[1].split(":");
if (parts.length >= 3) { if (parts.length >= 3) {
const dTag = parts.slice(2).join(':'); // Handle d-tags with colons const dTag = parts.slice(2).join(":"); // Handle d-tags with colons
if (!existingContentDTags.has(dTag)) { if (!existingContentDTags.has(dTag)) {
contentEventDTags.add(dTag); contentEventDTags.add(dTag);
} }
@ -158,23 +162,23 @@ export function findTaggedEventsInFetched(
} }
}); });
}); });
// Find content events in allEvents // Find content events in allEvents
const newContentEvents = allEvents.filter(event => { const newContentEvents = allEvents.filter((event) => {
if (!CONTENT_EVENT_KINDS.includes(event.kind || 0)) return false; if (!CONTENT_EVENT_KINDS.includes(event.kind || 0)) return false;
const dTag = event.tagValue("d"); const dTag = event.tagValue("d");
return dTag !== undefined && contentEventDTags.has(dTag); return dTag !== undefined && contentEventDTags.has(dTag);
}); });
return { return {
publications: newPublications, publications: newPublications,
contentEvents: newContentEvents contentEvents: newContentEvents,
}; };
} }
/** /**
* Fetches profiles for new events and updates progress * Fetches profiles for new events and updates progress
* *
* @param newPublications Array of new publication events * @param newPublications Array of new publication events
* @param newContentEvents Array of new content events * @param newContentEvents Array of new content events
* @param onProgressUpdate Callback to update progress state * @param onProgressUpdate Callback to update progress state
@ -184,23 +188,32 @@ export function findTaggedEventsInFetched(
export async function fetchProfilesForNewEvents( export async function fetchProfilesForNewEvents(
newPublications: NDKEvent[], newPublications: NDKEvent[],
newContentEvents: NDKEvent[], newContentEvents: NDKEvent[],
onProgressUpdate: (progress: { current: number; total: number } | null) => void, onProgressUpdate: (
debug?: (...args: any[]) => void progress: { current: number; total: number } | null,
) => void,
debug?: (...args: any[]) => void,
): Promise<void> { ): Promise<void> {
const log = debug || console.debug; const log = debug || console.debug;
// Extract pubkeys from new events // Extract pubkeys from new events
const newPubkeys = extractPubkeysFromEvents([...newPublications, ...newContentEvents]); const newPubkeys = extractPubkeysFromEvents([
...newPublications,
...newContentEvents,
]);
if (newPubkeys.size > 0) { if (newPubkeys.size > 0) {
log("Fetching profiles for", newPubkeys.size, "new pubkeys from tag expansion"); log(
"Fetching profiles for",
newPubkeys.size,
"new pubkeys from tag expansion",
);
onProgressUpdate({ current: 0, total: newPubkeys.size }); onProgressUpdate({ current: 0, total: newPubkeys.size });
await batchFetchProfiles(Array.from(newPubkeys), (fetched, total) => { await batchFetchProfiles(Array.from(newPubkeys), (fetched, total) => {
onProgressUpdate({ current: fetched, total }); onProgressUpdate({ current: fetched, total });
}); });
onProgressUpdate(null); onProgressUpdate(null);
} }
} }

91
src/lib/utils/websocket_utils.ts

@ -18,7 +18,7 @@ export interface NostrFilter {
ids?: string[]; ids?: string[];
authors?: string[]; authors?: string[];
kinds?: number[]; kinds?: number[];
[tag: `#${string}`]: string[] | undefined; [tag: `#${string}`]: string[] | undefined;
since?: number; since?: number;
until?: number; until?: number;
limit?: number; limit?: number;
@ -28,14 +28,16 @@ type ResolveCallback<T> = (value: T | PromiseLike<T>) => void;
type RejectCallback = (reason?: any) => void; type RejectCallback = (reason?: any) => void;
type EventHandler = (ev: Event) => void; type EventHandler = (ev: Event) => void;
type MessageEventHandler = (ev: MessageEvent) => void; type MessageEventHandler = (ev: MessageEvent) => void;
type EventHandlerReject = (reject: RejectCallback) => EventHandler; type EventHandlerReject = (reject: RejectCallback) => EventHandler;
type EventHandlerResolve<T> = (resolve: ResolveCallback<T>) => (reject: RejectCallback) => MessageEventHandler; type EventHandlerResolve<T> = (
resolve: ResolveCallback<T>,
) => (reject: RejectCallback) => MessageEventHandler;
function handleMessage( function handleMessage(
ev: MessageEvent, ev: MessageEvent,
subId: string, subId: string,
resolve: (event: NostrEvent) => void, resolve: (event: NostrEvent) => void,
reject: (reason: any) => void reject: (reason: any) => void,
) { ) {
const data = JSON.parse(ev.data); const data = JSON.parse(ev.data);
@ -64,43 +66,48 @@ function handleMessage(
function handleError( function handleError(
ev: Event, ev: Event,
reject: (reason: any) => void reject: (reason: any) => void,
) { ) {
reject(ev); reject(ev);
} }
export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent | null> { export async function fetchNostrEvent(
filter: NostrFilter,
): Promise<NostrEvent | null> {
// AI-NOTE: Updated to use active relay stores instead of hardcoded relay URL // AI-NOTE: Updated to use active relay stores instead of hardcoded relay URL
// This ensures the function uses the user's configured relays and can find events // This ensures the function uses the user's configured relays and can find events
// across multiple relays rather than being limited to a single hardcoded relay. // across multiple relays rather than being limited to a single hardcoded relay.
// Get available relays from the active relay stores // Get available relays from the active relay stores
const inboxRelays = get(activeInboxRelays); const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays); const outboxRelays = get(activeOutboxRelays);
// Combine all available relays, prioritizing inbox relays // Combine all available relays, prioritizing inbox relays
let availableRelays = [...inboxRelays, ...outboxRelays]; let availableRelays = [...inboxRelays, ...outboxRelays];
// AI-NOTE: Use fallback relays when stores are empty (e.g., during SSR) // AI-NOTE: Use fallback relays when stores are empty (e.g., during SSR)
// This ensures publications can still load even when relay stores haven't been populated // This ensures publications can still load even when relay stores haven't been populated
if (availableRelays.length === 0) { if (availableRelays.length === 0) {
// Import fallback relays from constants // Import fallback relays from constants
const { searchRelays, secondaryRelays } = await import("../consts.ts"); const { searchRelays, secondaryRelays } = await import("../consts.ts");
availableRelays = [...searchRelays, ...secondaryRelays]; availableRelays = [...searchRelays, ...secondaryRelays];
if (availableRelays.length === 0) { if (availableRelays.length === 0) {
availableRelays = ["wss://thecitadel.nostr1.com"]; availableRelays = ["wss://thecitadel.nostr1.com"];
} }
} }
// AI-NOTE: 2025-01-24 - Enhanced relay strategy for better event discovery // AI-NOTE: 2025-01-24 - Enhanced relay strategy for better event discovery
// Always include search relays in the relay set for comprehensive event discovery // Always include search relays in the relay set for comprehensive event discovery
const { searchRelays, secondaryRelays } = await import("../consts.ts"); const { searchRelays, secondaryRelays } = await import("../consts.ts");
const allRelays = [...availableRelays, ...searchRelays, ...secondaryRelays]; const allRelays = [...availableRelays, ...searchRelays, ...secondaryRelays];
const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates
console.debug(`[fetchNostrEvent] Trying ${uniqueRelays.length} relays for event discovery:`, uniqueRelays); console.debug(
`[fetchNostrEvent] Trying ${uniqueRelays.length} relays for event discovery:`,
uniqueRelays,
);
// Try all available relays in parallel and return the first result // Try all available relays in parallel and return the first result
const relayPromises = uniqueRelays.map(async (relay) => { const relayPromises = uniqueRelays.map(async (relay) => {
try { try {
@ -110,16 +117,15 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
// AI-NOTE: Currying is used here to abstract the internal handler logic away from the WebSocket // AI-NOTE: Currying is used here to abstract the internal handler logic away from the WebSocket
// handling logic. The message and error handlers themselves can be refactored without affecting // handling logic. The message and error handlers themselves can be refactored without affecting
// the WebSocket handling logic. // the WebSocket handling logic.
const curriedMessageHandler: (subId: string) => (resolve: ResolveCallback<NostrEvent>) => (reject: RejectCallback) => MessageEventHandler = const curriedMessageHandler: (
(subId) => subId: string,
(resolve) => ) => (
(reject) => resolve: ResolveCallback<NostrEvent>,
(ev: MessageEvent) => ) => (reject: RejectCallback) => MessageEventHandler =
handleMessage(ev, subId, resolve, reject); (subId) => (resolve) => (reject) => (ev: MessageEvent) =>
const curriedErrorHandler: EventHandlerReject = handleMessage(ev, subId, resolve, reject);
(reject) => const curriedErrorHandler: EventHandlerReject = (reject) => (ev: Event) =>
(ev: Event) => handleError(ev, reject);
handleError(ev, reject);
// AI-NOTE: These variables store references to partially-applied handlers so that the `finally` // AI-NOTE: These variables store references to partially-applied handlers so that the `finally`
// block receives the correct references to clean up the listeners. // block receives the correct references to clean up the listeners.
@ -133,20 +139,20 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
ws.addEventListener("message", messageHandler); ws.addEventListener("message", messageHandler);
ws.addEventListener("error", errorHandler); ws.addEventListener("error", errorHandler);
}) })
.withTimeout(2000) .withTimeout(2000)
.finally(() => { .finally(() => {
ws.removeEventListener("message", messageHandler); ws.removeEventListener("message", messageHandler);
ws.removeEventListener("error", errorHandler); ws.removeEventListener("error", errorHandler);
WebSocketPool.instance.release(ws); WebSocketPool.instance.release(ws);
}); });
ws.send(JSON.stringify(["REQ", subId, filter])); ws.send(JSON.stringify(["REQ", subId, filter]));
const result = await res; const result = await res;
if (result) { if (result) {
return result; return result;
} }
return null; return null;
} catch (err) { } catch (err) {
return null; return null;
@ -155,14 +161,14 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
// Wait for all relay results and find the first successful one // Wait for all relay results and find the first successful one
const results = await Promise.allSettled(relayPromises); const results = await Promise.allSettled(relayPromises);
// Find the first successful result // Find the first successful result
for (const result of results) { for (const result of results) {
if (result.status === 'fulfilled' && result.value) { if (result.status === "fulfilled" && result.value) {
return result.value; return result.value;
} }
} }
return null; return null;
} }
@ -191,7 +197,10 @@ export async function fetchEventByDTag(dTag: string): Promise<NostrEvent> {
try { try {
const event = await fetchNostrEvent({ "#d": [dTag], limit: 1 }); const event = await fetchNostrEvent({ "#d": [dTag], limit: 1 });
if (!event) { if (!event) {
error(404, `Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`); error(
404,
`Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`,
);
} }
return event; return event;
} catch (err) { } catch (err) {
@ -215,7 +224,10 @@ export async function fetchEventByNaddr(naddr: string): Promise<NostrEvent> {
}; };
const event = await fetchNostrEvent(filter); const event = await fetchNostrEvent(filter);
if (!event) { if (!event) {
error(404, `Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`); error(
404,
`Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`,
);
} }
return event; return event;
} catch (err) { } catch (err) {
@ -234,7 +246,10 @@ export async function fetchEventByNevent(nevent: string): Promise<NostrEvent> {
const decoded = neventDecode(nevent); const decoded = neventDecode(nevent);
const event = await fetchNostrEvent({ ids: [decoded.id], limit: 1 }); const event = await fetchNostrEvent({ ids: [decoded.id], limit: 1 });
if (!event) { if (!event) {
error(404, `Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`); error(
404,
`Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`,
);
} }
return event; return event;
} catch (err) { } catch (err) {

141
src/routes/+layout.ts

@ -1,141 +0,0 @@
import { getPersistedLogin, initNdk, ndkInstance } from "../lib/ndk.ts";
import {
loginWithExtension,
loginWithAmber,
loginWithNpub,
} from "../lib/stores/userStore.ts";
import { loginMethodStorageKey } from "../lib/stores/userStore.ts";
import Pharos, { pharosInstance } from "../lib/parser.ts";
import type { LayoutLoad } from "./$types";
import { get } from "svelte/store";
import { browser } from "$app/environment";
// AI-NOTE: SSR enabled for better SEO and OpenGraph support
export const ssr = true;
/**
* Attempts to restore the user's authentication session from localStorage.
* Handles extension, Amber (NIP-46), and npub login methods.
* Only runs on client-side.
*/
function restoreAuthSession() {
// Only run on client-side
if (!browser) return;
try {
const pubkey = getPersistedLogin();
const loginMethod = localStorage.getItem(loginMethodStorageKey);
const logoutFlag = localStorage.getItem("alexandria/logout/flag");
console.log("Layout load - persisted pubkey:", pubkey);
console.log("Layout load - persisted login method:", loginMethod);
console.log("Layout load - logout flag:", logoutFlag);
console.log("All localStorage keys:", Object.keys(localStorage));
if (pubkey && loginMethod && !logoutFlag) {
if (loginMethod === "extension") {
console.log("Restoring extension login...");
loginWithExtension();
} else if (loginMethod === "amber") {
// Attempt to restore Amber (NIP-46) session from localStorage
const relay = "wss://relay.nsec.app";
const localNsec = localStorage.getItem("amber/nsec");
if (localNsec) {
import("@nostr-dev-kit/ndk").then(
async ({ NDKNip46Signer }) => {
const ndk = get(ndkInstance);
try {
// deno-lint-ignore no-explicit-any
const amberSigner = (NDKNip46Signer as any).nostrconnect(
ndk,
relay,
localNsec,
{
name: "Alexandria",
perms: "sign_event:1;sign_event:4",
},
);
// Try to reconnect (blockUntilReady will resolve if Amber is running and session is valid)
await amberSigner.blockUntilReady();
const user = await amberSigner.user();
await loginWithAmber(amberSigner, user);
console.log("Amber session restored.");
} catch {
// If reconnection fails, automatically fallback to npub-only mode
console.warn(
"Amber session could not be restored. Falling back to npub-only mode.",
);
try {
// Set the flag first, before login
localStorage.setItem("alexandria/amber/fallback", "1");
console.log("Set fallback flag in localStorage");
// Small delay to ensure flag is set
await new Promise((resolve) => setTimeout(resolve, 100));
await loginWithNpub(pubkey);
console.log("Successfully fell back to npub-only mode.");
} catch (fallbackErr) {
console.error(
"Failed to fallback to npub-only mode:",
fallbackErr,
);
}
}
},
);
} else {
// No session data, automatically fallback to npub-only mode
console.log(
"No Amber session data found. Falling back to npub-only mode.",
);
// Set the flag first, before login
localStorage.setItem("alexandria/amber/fallback", "1");
console.log("Set fallback flag in localStorage");
// Small delay to ensure flag is set
setTimeout(async () => {
try {
await loginWithNpub(pubkey);
console.log("Successfully fell back to npub-only mode.");
} catch (fallbackErr) {
console.error(
"Failed to fallback to npub-only mode:",
fallbackErr,
);
}
}, 100);
}
} else if (loginMethod === "npub") {
console.log("Restoring npub login...");
loginWithNpub(pubkey);
}
} else if (logoutFlag) {
console.log("Skipping auto-login due to logout flag");
localStorage.removeItem("alexandria/logout/flag");
}
} catch (e) {
console.warn(
`Failed to restore login: ${e}\n\nContinuing with anonymous session.`,
);
}
}
export const load: LayoutLoad = () => {
// Initialize NDK with new relay management system
const ndk = initNdk();
ndkInstance.set(ndk);
// Only restore auth session on client-side
if (browser) {
restoreAuthSession();
}
const parser = new Pharos(ndk);
pharosInstance.set(parser);
return {
ndk,
parser,
};
};

69
src/routes/events/+page.svelte

@ -1,6 +1,5 @@
<script lang="ts"> <script lang="ts">
import { Heading, P } from "flowbite-svelte"; import { Heading, P } from "flowbite-svelte";
import { onMount } from "svelte";
import { page } from "$app/stores"; import { page } from "$app/stores";
import { goto } from "$app/navigation"; import { goto } from "$app/navigation";
import type { NDKEvent } from "$lib/utils/nostrUtils"; import type { NDKEvent } from "$lib/utils/nostrUtils";
@ -8,19 +7,18 @@
import EventDetails from "$lib/components/EventDetails.svelte"; import EventDetails from "$lib/components/EventDetails.svelte";
import RelayActions from "$lib/components/RelayActions.svelte"; import RelayActions from "$lib/components/RelayActions.svelte";
import CommentBox from "$lib/components/CommentBox.svelte"; import CommentBox from "$lib/components/CommentBox.svelte";
import CommentViewer from "$lib/components/CommentViewer.svelte"; import CommentViewer from "$lib/components/CommentViewer.svelte";
import { userStore } from "$lib/stores/userStore";
import { userBadge } from "$lib/snippets/UserSnippets.svelte"; import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { getMatchingTags, toNpub, getUserMetadata } from "$lib/utils/nostrUtils"; import { getMatchingTags, toNpub, getUserMetadata } from "$lib/utils/nostrUtils";
import EventInput from "$lib/components/EventInput.svelte"; import EventInput from "$lib/components/EventInput.svelte";
import { userPubkey, isLoggedIn } from "$lib/stores/authStore.Svelte"; import { userPubkey, isLoggedIn } from "$lib/stores/authStore.Svelte";
import CopyToClipboard from "$lib/components/util/CopyToClipboard.svelte"; import CopyToClipboard from "$lib/components/util/CopyToClipboard.svelte";
import { neventEncode, naddrEncode } from "$lib/utils"; import { neventEncode, naddrEncode } from "$lib/utils";
import { activeInboxRelays, activeOutboxRelays, logCurrentRelayConfiguration } from "$lib/ndk"; import { activeInboxRelays } from "$lib/ndk";
import { getEventType } from "$lib/utils/mime"; import { getEventType } from "$lib/utils/mime";
import ViewPublicationLink from "$lib/components/util/ViewPublicationLink.svelte"; import ViewPublicationLink from "$lib/components/util/ViewPublicationLink.svelte";
import { checkCommunity } from "$lib/utils/search_utility"; import { checkCommunity } from "$lib/utils/search_utility";
import { parseRepostContent, parseContent } from "$lib/utils/notification_utils"; import EmbeddedEvent from "$lib/components/EmbeddedEvent.svelte";
let loading = $state(false); let loading = $state(false);
let error = $state<string | null>(null); let error = $state<string | null>(null);
@ -44,7 +42,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
lud16?: string; lud16?: string;
nip05?: string; nip05?: string;
} | null>(null); } | null>(null);
let user = $state($userStore);
let userRelayPreference = $state(false); let userRelayPreference = $state(false);
let showSidePanel = $state(false); let showSidePanel = $state(false);
let searchInProgress = $state(false); let searchInProgress = $state(false);
@ -52,23 +49,11 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
let communityStatus = $state<Record<string, boolean>>({}); let communityStatus = $state<Record<string, boolean>>({});
let searchResultsCollapsed = $state(false); let searchResultsCollapsed = $state(false);
userStore.subscribe((val) => (user = val));
function handleEventFound(newEvent: NDKEvent) { function handleEventFound(newEvent: NDKEvent) {
event = newEvent; event = newEvent;
showSidePanel = true; showSidePanel = true;
// AI-NOTE: 2025-01-24 - Preserve search results to allow navigation through them // AI-NOTE: 2025-01-24 - Preserve search results to allow navigation through them
// Don't clear search results when showing a single event - this allows users to browse through results // Don't clear search results when showing a single event - this allows users to browse through results
// searchResults = [];
// secondOrderResults = [];
// tTagResults = [];
// originalEventIds = new Set();
// originalAddresses = new Set();
// searchType = null;
// searchTerm = null;
// searchInProgress = false;
// secondOrderSearchMessage = null;
if (newEvent.kind === 0) { if (newEvent.kind === 0) {
try { try {
profile = JSON.parse(newEvent.content); profile = JSON.parse(newEvent.content);
@ -209,10 +194,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
// AI-NOTE: 2025-01-24 - Cache profiles for all search results // AI-NOTE: 2025-01-24 - Cache profiles for all search results
cacheProfilesForEvents([...results, ...secondOrder, ...tTagEvents]); cacheProfilesForEvents([...results, ...secondOrder, ...tTagEvents]);
// Don't clear the current event - let the user continue viewing it
// event = null;
// profile = null;
} }
// AI-NOTE: 2025-01-24 - Function to cache profiles for multiple events // AI-NOTE: 2025-01-24 - Function to cache profiles for multiple events
@ -330,10 +311,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
return neventEncode(event, $activeInboxRelays); return neventEncode(event, $activeInboxRelays);
} }
function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays);
}
function isAddressableEvent(event: NDKEvent): boolean { function isAddressableEvent(event: NDKEvent): boolean {
return getEventType(event.kind || 0) === "addressable"; return getEventType(event.kind || 0) === "addressable";
} }
@ -397,26 +374,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
communityStatus = { ...communityStatus, ...newCommunityStatus }; communityStatus = { ...communityStatus, ...newCommunityStatus };
} }
// AI-NOTE: Refactored to avoid blocking $effect with logging operations
// Reactive effect to log relay configuration when stores change - non-blocking approach
$effect.pre(() => {
const inboxRelays = $activeInboxRelays;
const outboxRelays = $activeOutboxRelays;
// Only log if we have relays (not empty arrays)
if (inboxRelays.length > 0 || outboxRelays.length > 0) {
// Defer logging to avoid blocking the reactive system
requestAnimationFrame(() => {
console.log('🔌 Events Page - Relay Configuration Updated:');
console.log('📥 Inbox Relays:', inboxRelays);
console.log('📤 Outbox Relays:', outboxRelays);
console.log(`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`);
});
}
});
</script> </script>
<div class="w-full flex justify-center"> <div class="w-full flex justify-center">
@ -617,11 +574,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div <div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words" class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
> >
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent} <EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
</div> </div>
{/if} {/if}
{/if} {/if}
@ -784,11 +737,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div <div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words" class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
> >
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent} <EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
</div> </div>
{/if} {/if}
{/if} {/if}
@ -938,11 +887,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div <div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words" class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
> >
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent} <EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
</div> </div>
{/if} {/if}
{/if} {/if}
@ -997,7 +942,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
{/if} {/if}
<div class="min-w-0 overflow-hidden"> <div class="min-w-0 overflow-hidden">
<EventDetails {event} {profile} {searchValue} /> <EventDetails {event} {profile} />
</div> </div>
<div class="min-w-0 overflow-hidden"> <div class="min-w-0 overflow-hidden">
<RelayActions {event} /> <RelayActions {event} />

5
src/routes/proxy+layout.ts

@ -1,5 +0,0 @@
import type { LayoutLoad } from "./$types";
export const load: LayoutLoad = async () => {
return {};
};

4
src/routes/publication/+page.server.ts

@ -5,7 +5,7 @@ import type { PageServerLoad } from "./$types";
const ROUTES = { const ROUTES = {
PUBLICATION_BASE: "/publication", PUBLICATION_BASE: "/publication",
NADDR: "/publication/naddr", NADDR: "/publication/naddr",
NEVENT: "/publication/nevent", NEVENT: "/publication/nevent",
ID: "/publication/id", ID: "/publication/id",
D_TAG: "/publication/d", D_TAG: "/publication/d",
START: "/start", START: "/start",
@ -38,4 +38,4 @@ export const load: PageServerLoad = ({ url }) => {
// If no query parameters, redirect to the start page // If no query parameters, redirect to the start page
redirect(301, ROUTES.START); redirect(301, ROUTES.START);
}; };

15
src/routes/publication/[type]/[identifier]/+layout.server.ts

@ -3,7 +3,10 @@ import type { LayoutServerLoad } from "./$types";
import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts"; import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts";
// AI-NOTE: Server-side event fetching for SEO metadata // AI-NOTE: Server-side event fetching for SEO metadata
async function fetchEventServerSide(type: string, identifier: string): Promise<NostrEvent | null> { async function fetchEventServerSide(
type: string,
identifier: string,
): Promise<NostrEvent | null> {
// For now, return null to indicate server-side fetch not implemented // For now, return null to indicate server-side fetch not implemented
// This will fall back to client-side fetching // This will fall back to client-side fetching
return null; return null;
@ -16,10 +19,12 @@ export const load: LayoutServerLoad = async ({ params, url }) => {
const indexEvent = await fetchEventServerSide(type, identifier); const indexEvent = await fetchEventServerSide(type, identifier);
// Extract metadata for meta tags (use fallbacks if no event found) // Extract metadata for meta tags (use fallbacks if no event found)
const title = indexEvent?.tags.find((tag) => tag[0] === "title")?.[1] || "Alexandria Publication"; const title = indexEvent?.tags.find((tag) => tag[0] === "title")?.[1] ||
const summary = indexEvent?.tags.find((tag) => tag[0] === "summary")?.[1] || "Alexandria Publication";
const summary = indexEvent?.tags.find((tag) => tag[0] === "summary")?.[1] ||
"Alexandria is a digital library, utilizing Nostr events for curated publications and wiki pages."; "Alexandria is a digital library, utilizing Nostr events for curated publications and wiki pages.";
const image = indexEvent?.tags.find((tag) => tag[0] === "image")?.[1] || "/screenshots/old_books.jpg"; const image = indexEvent?.tags.find((tag) => tag[0] === "image")?.[1] ||
"/screenshots/old_books.jpg";
const currentUrl = `${url.origin}${url.pathname}`; const currentUrl = `${url.origin}${url.pathname}`;
return { return {
@ -31,4 +36,4 @@ export const load: LayoutServerLoad = async ({ params, url }) => {
currentUrl, currentUrl,
}, },
}; };
}; };

52
src/routes/publication/[type]/[identifier]/+page.ts

@ -1,30 +1,40 @@
import { error } from "@sveltejs/kit"; import { error } from "@sveltejs/kit";
import type { PageLoad } from "./$types"; import type { PageLoad } from "./$types";
import { fetchEventByDTag, fetchEventById, fetchEventByNaddr, fetchEventByNevent } from "../../../../lib/utils/websocket_utils.ts"; import {
fetchEventByDTag,
fetchEventById,
fetchEventByNaddr,
fetchEventByNevent,
} from "../../../../lib/utils/websocket_utils.ts";
import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts"; import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts";
export const load: PageLoad = async ({ params, parent }: { params: { type: string; identifier: string }; parent: any }) => { export const load: PageLoad = async (
{ params, parent }: {
params: { type: string; identifier: string };
parent: any;
},
) => {
const { type, identifier } = params; const { type, identifier } = params;
// Get layout data (no server-side data since SSR is disabled) // Get layout data (no server-side data since SSR is disabled)
const layoutData = await parent(); const layoutData = await parent();
// AI-NOTE: Always fetch client-side since server-side fetch returns null for now // AI-NOTE: Always fetch client-side since server-side fetch returns null for now
let indexEvent: NostrEvent | null = null; let indexEvent: NostrEvent | null = null;
try { try {
// Handle different identifier types // Handle different identifier types
switch (type) { switch (type) {
case 'id': case "id":
indexEvent = await fetchEventById(identifier); indexEvent = await fetchEventById(identifier);
break; break;
case 'd': case "d":
indexEvent = await fetchEventByDTag(identifier); indexEvent = await fetchEventByDTag(identifier);
break; break;
case 'naddr': case "naddr":
indexEvent = await fetchEventByNaddr(identifier); indexEvent = await fetchEventByNaddr(identifier);
break; break;
case 'nevent': case "nevent":
indexEvent = await fetchEventByNevent(identifier); indexEvent = await fetchEventByNevent(identifier);
break; break;
default: default:
@ -33,32 +43,36 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin
} catch (err) { } catch (err) {
throw err; throw err;
} }
if (!indexEvent) { if (!indexEvent) {
// AI-NOTE: Handle case where no relays are available during preloading // AI-NOTE: Handle case where no relays are available during preloading
// This prevents 404 errors when relay stores haven't been populated yet // This prevents 404 errors when relay stores haven't been populated yet
// Create appropriate search link based on type // Create appropriate search link based on type
let searchParam = ''; let searchParam = "";
switch (type) { switch (type) {
case 'id': case "id":
searchParam = `id=${identifier}`; searchParam = `id=${identifier}`;
break; break;
case 'd': case "d":
searchParam = `d=${identifier}`; searchParam = `d=${identifier}`;
break; break;
case 'naddr': case "naddr":
case 'nevent': case "nevent":
searchParam = `id=${identifier}`; searchParam = `id=${identifier}`;
break; break;
default: default:
searchParam = `q=${identifier}`; searchParam = `q=${identifier}`;
} }
error(404, `Event not found for ${type}: ${identifier}. href="/events?${searchParam}"`); error(
404,
`Event not found for ${type}: ${identifier}. href="/events?${searchParam}"`,
);
} }
const publicationType = indexEvent.tags.find((tag) => tag[0] === "type")?.[1] ?? ""; const publicationType =
indexEvent.tags.find((tag) => tag[0] === "type")?.[1] ?? "";
// AI-NOTE: Use proper NDK instance from layout or create one with relays // AI-NOTE: Use proper NDK instance from layout or create one with relays
let ndk = layoutData?.ndk; let ndk = layoutData?.ndk;
@ -75,6 +89,6 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin
indexEvent, indexEvent,
ndk, // Use minimal NDK instance ndk, // Use minimal NDK instance
}; };
return result; return result;
}; };

10
src/routes/visualize/+page.ts

@ -1,9 +1,9 @@
import type { PageLoad } from './$types'; import type { PageLoad } from "./$types";
export const load: PageLoad = async ({ url }) => { export const load: PageLoad = async ({ url }) => {
const eventId = url.searchParams.get('event'); const eventId = url.searchParams.get("event");
return { return {
eventId eventId,
}; };
}; };

8
src/styles/notifications.css

@ -151,7 +151,13 @@
/* Transition utilities */ /* Transition utilities */
.transition-colors { .transition-colors {
transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, text-decoration-color 0.15s ease-in-out, fill 0.15s ease-in-out, stroke 0.15s ease-in-out; transition:
color 0.15s ease-in-out,
background-color 0.15s ease-in-out,
border-color 0.15s ease-in-out,
text-decoration-color 0.15s ease-in-out,
fill 0.15s ease-in-out,
stroke 0.15s ease-in-out;
} }
.transition-all { .transition-all {

20
src/styles/publications.css

@ -100,7 +100,8 @@
/* blockquote; prose and poetry quotes */ /* blockquote; prose and poetry quotes */
.publication-leather .quoteblock, .publication-leather .quoteblock,
.publication-leather .verseblock { .publication-leather .verseblock {
@apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50 dark:border-primary-500 dark:bg-primary-700; @apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50
dark:border-primary-500 dark:bg-primary-700;
} }
.publication-leather .verseblock pre.content { .publication-leather .verseblock pre.content {
@ -154,7 +155,8 @@
} }
.publication-leather .admonitionblock.tip { .publication-leather .admonitionblock.tip {
@apply rounded overflow-hidden border border-success-100 dark:border-success-800; @apply rounded overflow-hidden border border-success-100
dark:border-success-800;
} }
.publication-leather .admonitionblock.tip .icon, .publication-leather .admonitionblock.tip .icon,
@ -172,7 +174,8 @@
} }
.publication-leather .admonitionblock.important { .publication-leather .admonitionblock.important {
@apply rounded overflow-hidden border border-primary-200 dark:border-primary-700; @apply rounded overflow-hidden border border-primary-200
dark:border-primary-700;
} }
.publication-leather .admonitionblock.important .icon, .publication-leather .admonitionblock.important .icon,
@ -181,7 +184,8 @@
} }
.publication-leather .admonitionblock.caution { .publication-leather .admonitionblock.caution {
@apply rounded overflow-hidden border border-warning-200 dark:border-warning-700; @apply rounded overflow-hidden border border-warning-200
dark:border-warning-700;
} }
.publication-leather .admonitionblock.caution .icon, .publication-leather .admonitionblock.caution .icon,
@ -190,7 +194,8 @@
} }
.publication-leather .admonitionblock.warning { .publication-leather .admonitionblock.warning {
@apply rounded overflow-hidden border border-danger-200 dark:border-danger-800; @apply rounded overflow-hidden border border-danger-200
dark:border-danger-800;
} }
.publication-leather .admonitionblock.warning .icon, .publication-leather .admonitionblock.warning .icon,
@ -201,7 +206,7 @@
/* listingblock, literalblock */ /* listingblock, literalblock */
.publication-leather .listingblock, .publication-leather .listingblock,
.publication-leather .literalblock { .publication-leather .literalblock {
@apply p-4 rounded bg-highlight dark:bg-primary-700; @apply p-4 rounded bg-highlight dark:bg-primary-700;
} }
.publication-leather .sidebarblock .title, .publication-leather .sidebarblock .title,
@ -254,7 +259,8 @@
@screen lg { @screen lg {
@media (hover: hover) { @media (hover: hover) {
.blog .discreet .card-leather:not(:hover) { .blog .discreet .card-leather:not(:hover) {
@apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition duration-500 ease-in-out; @apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition
duration-500 ease-in-out;
} }
.blog .discreet .group { .blog .discreet .group {
@apply bg-transparent; @apply bg-transparent;

6
src/styles/scrollbar.css

@ -1,7 +1,8 @@
@layer components { @layer components {
/* Global scrollbar styles */ /* Global scrollbar styles */
* { * {
scrollbar-color: rgba(87, 66, 41, 0.8) transparent; /* Transparent track, default scrollbar thumb */ scrollbar-color: rgba(87, 66, 41, 0.8)
transparent; /* Transparent track, default scrollbar thumb */
} }
/* Webkit Browsers (Chrome, Safari, Edge) */ /* Webkit Browsers (Chrome, Safari, Edge) */
@ -14,7 +15,8 @@
} }
*::-webkit-scrollbar-thumb { *::-webkit-scrollbar-thumb {
@apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600 dark:hover:bg-primary-800; @apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600
dark:hover:bg-primary-800;
border-radius: 6px; /* Rounded scrollbar */ border-radius: 6px; /* Rounded scrollbar */
} }
} }

28
src/styles/visualize.css

@ -30,7 +30,8 @@
} }
.legend-letter { .legend-letter {
@apply absolute inset-0 flex items-center justify-center text-black text-xs font-bold; @apply absolute inset-0 flex items-center justify-center text-black text-xs
font-bold;
} }
.legend-text { .legend-text {
@ -39,7 +40,8 @@
/* Network visualization styles - specific to visualization */ /* Network visualization styles - specific to visualization */
.network-container { .network-container {
@apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px] max-h-[900px]; @apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px]
max-h-[900px];
} }
.network-svg-container { .network-svg-container {
@ -48,11 +50,15 @@
.network-svg { .network-svg {
@apply w-full sm:h-[100%] border; @apply w-full sm:h-[100%] border;
@apply border border-primary-200 has-[:hover]:border-primary-700 dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500 rounded; @apply border border-primary-200 has-[:hover]:border-primary-700
dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500
rounded;
} }
.network-error { .network-error {
@apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200 rounded-lg mb-4; @apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200
rounded-lg mb-4;
} }
.network-error-title { .network-error-title {
@ -78,8 +84,9 @@
/* Tooltip styles - specific to visualization tooltips */ /* Tooltip styles - specific to visualization tooltips */
.tooltip-close-btn { .tooltip-close-btn {
@apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-600 @apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700
rounded-full p-1 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200; dark:hover:bg-gray-600 rounded-full p-1 text-gray-500 hover:text-gray-700
dark:text-gray-400 dark:hover:text-gray-200;
} }
.tooltip-content { .tooltip-content {
@ -91,7 +98,8 @@
} }
.tooltip-title-link { .tooltip-title-link {
@apply text-gray-800 hover:text-blue-600 dark:text-gray-200 dark:hover:text-blue-400; @apply text-gray-800 hover:text-blue-600 dark:text-gray-200
dark:hover:text-blue-400;
} }
.tooltip-metadata { .tooltip-metadata {
@ -99,11 +107,13 @@
} }
.tooltip-summary { .tooltip-summary {
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40; @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto
max-h-40;
} }
.tooltip-content-preview { .tooltip-content-preview {
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40; @apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto
max-h-40;
} }
.tooltip-help-text { .tooltip-help-text {

85
test_data/LaTeXtestfile.md

@ -1,12 +1,24 @@
# This is a testfile for writing mathematic formulas in NostrMarkup # This is a testfile for writing mathematic formulas in NostrMarkup
This document covers the rendering of formulas in TeX/LaTeX and AsciiMath notation, or some combination of those within the same page. It is meant to be rendered by clients utilizing MathJax. This document covers the rendering of formulas in TeX/LaTeX and AsciiMath
notation, or some combination of those within the same page. It is meant to be
If you want the entire document to be rendered as mathematics, place the entire thing in a backtick-codeblock, but know that this makes the document slower to load, it is harder to format the prose, and the result is less legible. It also doesn't increase portability, as it's easy to export markup as LaTeX files, or as PDFs, with the formulas rendered. rendered by clients utilizing MathJax.
The general idea, is that anything placed within `single backticks` is inline code, and inline-code will all be scanned for typical mathematics statements and rendered with best-effort. (For more precise rendering, use Asciidoc.) We will not render text that is not marked as inline code, as mathematical formulas, as that is prose. If you want the entire document to be rendered as mathematics, place the entire
thing in a backtick-codeblock, but know that this makes the document slower to
If you want the TeX to be blended into the surrounding text, wrap the text within single `$`. Otherwise, use double `$$` symbols, for display math, and it will appear on its own line. load, it is harder to format the prose, and the result is less legible. It also
doesn't increase portability, as it's easy to export markup as LaTeX files, or
as PDFs, with the formulas rendered.
The general idea, is that anything placed within `single backticks` is inline
code, and inline-code will all be scanned for typical mathematics statements and
rendered with best-effort. (For more precise rendering, use Asciidoc.) We will
not render text that is not marked as inline code, as mathematical formulas, as
that is prose.
If you want the TeX to be blended into the surrounding text, wrap the text
within single `$`. Otherwise, use double `$$` symbols, for display math, and it
will appear on its own line.
## TeX Examples ## TeX Examples
@ -16,36 +28,25 @@ Same equation, in the display mode: `$$\sqrt{x}$$`
Something more complex, inline: `$\mathbb{N} = \{ a \in \mathbb{Z} : a > 0 \}$` Something more complex, inline: `$\mathbb{N} = \{ a \in \mathbb{Z} : a > 0 \}$`
Something complex, in display mode: `$$P \left( A=2 \, \middle| \, \dfrac{A^2}{B}>4 \right)$$` Something complex, in display mode:
`$$P \left( A=2 \, \middle| \, \dfrac{A^2}{B}>4 \right)$$`
Another example of `$$\prod_{i=1}^{n} x_i - 1$$` inline formulas. Another example of `$$\prod_{i=1}^{n} x_i - 1$$` inline formulas.
Function example: Function example: `$$ f(x)= \begin{cases} 1/d_{ij} & \quad \text{when
`$$ $d_{ij} \leq 160$}\\ 0 & \quad \text{otherwise} \end{cases}
f(x)=
\begin{cases}
1/d_{ij} & \quad \text{when $d_{ij} \leq 160$}\\
0 & \quad \text{otherwise}
\end{cases}
$$ $$ `
`
And a matrix: And a matrix: ` $$
`
$$
M = M = \begin{bmatrix} \frac{5}{6} & \frac{1}{6} & 0 \\[0.3em] \frac{5}{6} & 0 &
\begin{bmatrix} \frac{1}{6} \\[0.3em] 0 & \frac{5}{6} & \frac{1}{6} \end{bmatrix}
\frac{5}{6} & \frac{1}{6} & 0 \\[0.3em]
\frac{5}{6} & 0 & \frac{1}{6} \\[0.3em]
0 & \frac{5}{6} & \frac{1}{6}
\end{bmatrix}
$$ $$ `
`
LaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this sort of thing. LaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this
sort of thing.
`\\begin{tabular}{|c|c|c|l|r|} `\\begin{tabular}{|c|c|c|l|r|}
\\hline \\hline
@ -69,13 +70,17 @@ We also recognize common LaTeX statements:
Greek letters are a snap: `$\Psi$`, `$\psi$`, `$\Phi$`, `$\phi$`. Greek letters are a snap: `$\Psi$`, `$\psi$`, `$\Phi$`, `$\phi$`.
Equations within text are easy--- A well known Maxwell thermodynamic relation is `$\left.{\partial T \over \partial P}\right|_{s} = \left.{\partial v \over \partial s}\right|_{P}$`. Equations within text are easy--- A well known Maxwell thermodynamic relation is
`$\left.{\partial T \over \partial P}\right|_{s} = \left.{\partial v \over \partial s}\right|_{P}$`.
You can also set aside equations like so: `\begin{eqnarray} du &=& T\ ds -P\ dv, \qquad \mbox{first law.}\label{fl}\\ ds &\ge& {\delta q \over T}.\qquad \qquad \mbox{second law.} \label{sl} \end {eqnarray}` You can also set aside equations like so:
`\begin{eqnarray} du &=& T\ ds -P\ dv, \qquad \mbox{first law.}\label{fl}\\ ds &\ge& {\delta q \over T}.\qquad \qquad \mbox{second law.} \label{sl} \end {eqnarray}`
## And some good ole Asciimath ## And some good ole Asciimath
Asciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy stuff easier to find. If you want it inline, include it inline. If you want it on a separate line, put a hard-return before and after. Asciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy
stuff easier to find. If you want it inline, include it inline. If you want it
on a separate line, put a hard-return before and after.
Inline text example here `$E=mc^2$` and another `$1/(x+1)$`; very simple. Inline text example here `$E=mc^2$` and another `$1/(x+1)$`; very simple.
@ -109,19 +114,23 @@ Using the quadratic formula, the roots of `$x^2-6x+4=0$` are
Advanced alignment and matrices looks like this: Advanced alignment and matrices looks like this:
A `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or vector, `$$((1),(0))$$`. A `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or
vector, `$$((1),(0))$$`.
The outer brackets determine the delimiters e.g. `$|(a,b),(c,d)|=ad-bc$`. The outer brackets determine the delimiters e.g. `$|(a,b),(c,d)|=ad-bc$`.
A general `$m xx n$` matrix `$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$` A general `$m xx n$` matrix
`$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$`
## Mixed Examples ## Mixed Examples
Here are some examples mixing LaTeX and AsciiMath: Here are some examples mixing LaTeX and AsciiMath:
- LaTeX inline: `$\frac{1}{2}$` vs AsciiMath inline: `$1/2$` - LaTeX inline: `$\frac{1}{2}$` vs AsciiMath inline: `$1/2$`
- LaTeX display: `$$\sum_{i=1}^n x_i$$` vs AsciiMath display: `$$sum_(i=1)^n x_i$$` - LaTeX display: `$$\sum_{i=1}^n x_i$$` vs AsciiMath display:
- LaTeX matrix: `$$\begin{pmatrix} a & b \\ c & d \end{pmatrix}$$` vs AsciiMath matrix: `$$((a,b),(c,d))$$` `$$sum_(i=1)^n x_i$$`
- LaTeX matrix: `$$\begin{pmatrix} a & b \\ c & d \end{pmatrix}$$` vs AsciiMath
matrix: `$$((a,b),(c,d))$$`
## Edge Cases ## Edge Cases
@ -134,9 +143,9 @@ Here are some examples mixing LaTeX and AsciiMath:
- CSS with dollar signs: `color: $primary-color` - CSS with dollar signs: `color: $primary-color`
This document should demonstrate that: This document should demonstrate that:
1. LaTeX is processed within inline code blocks with proper delimiters 1. LaTeX is processed within inline code blocks with proper delimiters
2. AsciiMath is processed within inline code blocks with proper delimiters 2. AsciiMath is processed within inline code blocks with proper delimiters
3. Regular code blocks remain unchanged 3. Regular code blocks remain unchanged
4. Mixed content is handled correctly 4. Mixed content is handled correctly
5. Edge cases are handled gracefully 5. Edge cases are handled gracefully $$
$$

26
tests/e2e/my_notes_layout.pw.spec.ts

@ -1,4 +1,4 @@
import { test, expect, type Page } from '@playwright/test'; import { expect, type Page, test } from "@playwright/test";
// Utility to check for horizontal scroll bar // Utility to check for horizontal scroll bar
async function hasHorizontalScroll(page: Page, selector: string) { async function hasHorizontalScroll(page: Page, selector: string) {
@ -9,16 +9,16 @@ async function hasHorizontalScroll(page: Page, selector: string) {
}, selector); }, selector);
} }
test.describe('My Notes Layout', () => { test.describe("My Notes Layout", () => {
test.beforeEach(async ({ page }) => { test.beforeEach(async ({ page }) => {
await page.goto('/my-notes'); await page.goto("/my-notes");
await page.waitForSelector('h1:text("My Notes")'); await page.waitForSelector('h1:text("My Notes")');
}); });
test('no horizontal scroll bar for all tag type and tag filter combinations', async ({ page }) => { test("no horizontal scroll bar for all tag type and tag filter combinations", async ({ page }) => {
// Helper to check scroll for current state // Helper to check scroll for current state
async function assertNoScroll() { async function assertNoScroll() {
const hasScroll = await hasHorizontalScroll(page, 'main, body, html'); const hasScroll = await hasHorizontalScroll(page, "main, body, html");
expect(hasScroll).toBeFalsy(); expect(hasScroll).toBeFalsy();
} }
@ -26,9 +26,11 @@ test.describe('My Notes Layout', () => {
await assertNoScroll(); await assertNoScroll();
// Get all tag type buttons // Get all tag type buttons
const tagTypeButtons = await page.locator('aside button').all(); const tagTypeButtons = await page.locator("aside button").all();
// Only consider tag type buttons (first N) // Only consider tag type buttons (first N)
const tagTypeCount = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-6 > button').count(); const tagTypeCount = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-6 > button",
).count();
// For each single tag type // For each single tag type
for (let i = 0; i < tagTypeCount; i++) { for (let i = 0; i < tagTypeCount; i++) {
// Click tag type button // Click tag type button
@ -36,7 +38,9 @@ test.describe('My Notes Layout', () => {
await page.waitForTimeout(100); // Wait for UI update await page.waitForTimeout(100); // Wait for UI update
await assertNoScroll(); await assertNoScroll();
// Get tag filter buttons (after tag type buttons) // Get tag filter buttons (after tag type buttons)
const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all(); const tagFilterButtons = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-4 > button",
).all();
// Try all single tag filter selections // Try all single tag filter selections
for (let j = 0; j < tagFilterButtons.length; j++) { for (let j = 0; j < tagFilterButtons.length; j++) {
await tagFilterButtons[j].click(); await tagFilterButtons[j].click();
@ -72,7 +76,9 @@ test.describe('My Notes Layout', () => {
await page.waitForTimeout(100); await page.waitForTimeout(100);
await assertNoScroll(); await assertNoScroll();
// Get tag filter buttons for this combination // Get tag filter buttons for this combination
const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all(); const tagFilterButtons = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-4 > button",
).all();
// Try all single tag filter selections // Try all single tag filter selections
for (let k = 0; k < tagFilterButtons.length; k++) { for (let k = 0; k < tagFilterButtons.length; k++) {
await tagFilterButtons[k].click(); await tagFilterButtons[k].click();
@ -100,4 +106,4 @@ test.describe('My Notes Layout', () => {
} }
} }
}); });
}); });

275
tests/unit/ZettelEditor.test.ts

@ -1,37 +1,45 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { AsciiDocMetadata } from "../../src/lib/utils/asciidoc_metadata"; import type { AsciiDocMetadata } from "../../src/lib/utils/asciidoc_metadata";
// Mock all Svelte components and dependencies // Mock all Svelte components and dependencies
vi.mock("flowbite-svelte", () => ({ vi.mock("flowbite-svelte", () => ({
Textarea: vi.fn().mockImplementation((props) => { Textarea: vi.fn().mockImplementation((props) => {
return { return {
$$render: () => `<textarea data-testid="textarea" class="${props.class || ''}" rows="${props.rows || 12}" ${props.disabled ? 'disabled' : ''} placeholder="${props.placeholder || ''}"></textarea>`, $$render: () =>
$$bind: { value: props.bind, oninput: props.oninput } `<textarea data-testid="textarea" class="${props.class || ""}" rows="${
props.rows || 12
}" ${props.disabled ? "disabled" : ""} placeholder="${
props.placeholder || ""
}"></textarea>`,
$$bind: { value: props.bind, oninput: props.oninput },
}; };
}), }),
Button: vi.fn().mockImplementation((props) => { Button: vi.fn().mockImplementation((props) => {
return { return {
$$render: () => `<button data-testid="preview-button" class="${props.class || ''}" ${props.disabled ? 'disabled' : ''} onclick="${props.onclick || ''}">${props.children || ''}</button>`, $$render: () =>
$$bind: { onclick: props.onclick } `<button data-testid="preview-button" class="${props.class || ""}" ${
props.disabled ? "disabled" : ""
} onclick="${props.onclick || ""}">${props.children || ""}</button>`,
$$bind: { onclick: props.onclick },
}; };
}) }),
})); }));
vi.mock("flowbite-svelte-icons", () => ({ vi.mock("flowbite-svelte-icons", () => ({
EyeOutline: vi.fn().mockImplementation(() => ({ EyeOutline: vi.fn().mockImplementation(() => ({
$$render: () => `<svg data-testid="eye-icon"></svg>` $$render: () => `<svg data-testid="eye-icon"></svg>`,
})) })),
})); }));
vi.mock("asciidoctor", () => ({ vi.mock("asciidoctor", () => ({
default: vi.fn(() => ({ default: vi.fn(() => ({
convert: vi.fn((content, options) => { convert: vi.fn((content, options) => {
// Mock AsciiDoctor conversion - return simple HTML // Mock AsciiDoctor conversion - return simple HTML
return content.replace(/^==\s+(.+)$/gm, '<h2>$1</h2>') return content.replace(/^==\s+(.+)$/gm, "<h2>$1</h2>")
.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>') .replace(/\*\*(.+?)\*\*/g, "<strong>$1</strong>")
.replace(/\*(.+?)\*/g, '<em>$1</em>'); .replace(/\*(.+?)\*/g, "<em>$1</em>");
}) }),
})) })),
})); }));
// Mock sessionStorage // Mock sessionStorage
@ -41,21 +49,21 @@ const mockSessionStorage = {
removeItem: vi.fn(), removeItem: vi.fn(),
clear: vi.fn(), clear: vi.fn(),
}; };
Object.defineProperty(global, 'sessionStorage', { Object.defineProperty(global, "sessionStorage", {
value: mockSessionStorage, value: mockSessionStorage,
writable: true writable: true,
}); });
// Mock window object for DOM manipulation // Mock window object for DOM manipulation
Object.defineProperty(global, 'window', { Object.defineProperty(global, "window", {
value: { value: {
sessionStorage: mockSessionStorage, sessionStorage: mockSessionStorage,
document: { document: {
querySelector: vi.fn(), querySelector: vi.fn(),
createElement: vi.fn(), createElement: vi.fn(),
} },
}, },
writable: true writable: true,
}); });
// Mock DOM methods // Mock DOM methods
@ -64,14 +72,14 @@ const mockCreateElement = vi.fn();
const mockAddEventListener = vi.fn(); const mockAddEventListener = vi.fn();
const mockRemoveEventListener = vi.fn(); const mockRemoveEventListener = vi.fn();
Object.defineProperty(global, 'document', { Object.defineProperty(global, "document", {
value: { value: {
querySelector: mockQuerySelector, querySelector: mockQuerySelector,
createElement: mockCreateElement, createElement: mockCreateElement,
addEventListener: mockAddEventListener, addEventListener: mockAddEventListener,
removeEventListener: mockRemoveEventListener, removeEventListener: mockRemoveEventListener,
}, },
writable: true writable: true,
}); });
describe("ZettelEditor Component Logic", () => { describe("ZettelEditor Component Logic", () => {
@ -90,8 +98,9 @@ describe("ZettelEditor Component Logic", () => {
describe("Publication Format Detection Logic", () => { describe("Publication Format Detection Logic", () => {
it("should detect document header format", () => { it("should detect document header format", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the regex pattern used in the component // Test the regex pattern used in the component
const hasDocumentHeader = contentWithDocumentHeader.match(/^=\s+/m); const hasDocumentHeader = contentWithDocumentHeader.match(/^=\s+/m);
expect(hasDocumentHeader).toBeTruthy(); expect(hasDocumentHeader).toBeTruthy();
@ -99,12 +108,12 @@ describe("ZettelEditor Component Logic", () => {
it("should detect index card format", () => { it("should detect index card format", () => {
const contentWithIndexCard = "index card\n\n== Section 1\nContent"; const contentWithIndexCard = "index card\n\n== Section 1\nContent";
// Test the logic used in the component // Test the logic used in the component
const lines = contentWithIndexCard.split(/\r?\n/); const lines = contentWithIndexCard.split(/\r?\n/);
let hasIndexCard = false; let hasIndexCard = false;
for (const line of lines) { for (const line of lines) {
if (line.trim().toLowerCase() === 'index card') { if (line.trim().toLowerCase() === "index card") {
hasIndexCard = true; hasIndexCard = true;
break; break;
} }
@ -113,8 +122,9 @@ describe("ZettelEditor Component Logic", () => {
}); });
it("should not detect publication format for normal section content", () => { it("should not detect publication format for normal section content", () => {
const normalContent = "== Section 1\nContent\n\n== Section 2\nMore content"; const normalContent =
"== Section 1\nContent\n\n== Section 2\nMore content";
// Test the logic used in the component // Test the logic used in the component
const lines = normalContent.split(/\r?\n/); const lines = normalContent.split(/\r?\n/);
let hasPublicationHeader = false; let hasPublicationHeader = false;
@ -123,7 +133,7 @@ describe("ZettelEditor Component Logic", () => {
hasPublicationHeader = true; hasPublicationHeader = true;
break; break;
} }
if (line.trim().toLowerCase() === 'index card') { if (line.trim().toLowerCase() === "index card") {
hasPublicationHeader = true; hasPublicationHeader = true;
break; break;
} }
@ -135,26 +145,30 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Parsing Logic", () => { describe("Content Parsing Logic", () => {
it("should parse sections with document header", () => { it("should parse sections with document header", () => {
const content = "== Section 1\n:author: Test Author\n\nContent 1"; const content = "== Section 1\n:author: Test Author\n\nContent 1";
// Test the parsing logic // Test the parsing logic
const hasDocumentHeader = content.match(/^=\s+/m); const hasDocumentHeader = content.match(/^=\s+/m);
expect(hasDocumentHeader).toBeFalsy(); // This content doesn't have a document header expect(hasDocumentHeader).toBeFalsy(); // This content doesn't have a document header
// Test section splitting logic // Test section splitting logic
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); const sectionStrings = content.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sectionStrings).toHaveLength(1); expect(sectionStrings).toHaveLength(1);
expect(sectionStrings[0]).toContain("== Section 1"); expect(sectionStrings[0]).toContain("== Section 1");
}); });
it("should parse sections without document header", () => { it("should parse sections without document header", () => {
const content = "== Section 1\nContent 1"; const content = "== Section 1\nContent 1";
// Test the parsing logic // Test the parsing logic
const hasDocumentHeader = content.match(/^=\s+/m); const hasDocumentHeader = content.match(/^=\s+/m);
expect(hasDocumentHeader).toBeFalsy(); expect(hasDocumentHeader).toBeFalsy();
// Test section splitting logic // Test section splitting logic
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); const sectionStrings = content.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sectionStrings).toHaveLength(1); expect(sectionStrings).toHaveLength(1);
expect(sectionStrings[0]).toContain("== Section 1"); expect(sectionStrings[0]).toContain("== Section 1");
}); });
@ -168,49 +182,70 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Conversion Logic", () => { describe("Content Conversion Logic", () => {
it("should convert document title to section title", () => { it("should convert document title to section title", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the conversion logic // Test the conversion logic
let convertedContent = contentWithDocumentHeader.replace(/^=\s+(.+)$/gm, '== $1'); let convertedContent = contentWithDocumentHeader.replace(
convertedContent = convertedContent.replace(/^index card$/gim, ''); /^=\s+(.+)$/gm,
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); "== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent"); expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent");
}); });
it("should remove index card line", () => { it("should remove index card line", () => {
const contentWithIndexCard = "index card\n\n== Section 1\nContent"; const contentWithIndexCard = "index card\n\n== Section 1\nContent";
// Test the conversion logic // Test the conversion logic
let convertedContent = contentWithIndexCard.replace(/^=\s+(.+)$/gm, '== $1'); let convertedContent = contentWithIndexCard.replace(
convertedContent = convertedContent.replace(/^index card$/gim, ''); /^=\s+(.+)$/gm,
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); "== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("\n\n== Section 1\nContent"); expect(finalContent).toBe("\n\n== Section 1\nContent");
}); });
it("should clean up double newlines", () => { it("should clean up double newlines", () => {
const contentWithExtraNewlines = "= Document Title\n\n\n== Section 1\nContent"; const contentWithExtraNewlines =
"= Document Title\n\n\n== Section 1\nContent";
// Test the conversion logic // Test the conversion logic
let convertedContent = contentWithExtraNewlines.replace(/^=\s+(.+)$/gm, '== $1'); let convertedContent = contentWithExtraNewlines.replace(
convertedContent = convertedContent.replace(/^index card$/gim, ''); /^=\s+(.+)$/gm,
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n'); "== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent"); expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent");
}); });
}); });
describe("SessionStorage Integration", () => { describe("SessionStorage Integration", () => {
it("should store content in sessionStorage when switching to publication editor", () => { it("should store content in sessionStorage when switching to publication editor", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent"; const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the sessionStorage logic // Test the sessionStorage logic
mockSessionStorage.setItem('zettelEditorContent', contentWithDocumentHeader); mockSessionStorage.setItem(
mockSessionStorage.setItem('zettelEditorSource', 'publication-format'); "zettelEditorContent",
contentWithDocumentHeader,
expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorContent', contentWithDocumentHeader); );
expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorSource', 'publication-format'); mockSessionStorage.setItem("zettelEditorSource", "publication-format");
expect(mockSessionStorage.setItem).toHaveBeenCalledWith(
"zettelEditorContent",
contentWithDocumentHeader,
);
expect(mockSessionStorage.setItem).toHaveBeenCalledWith(
"zettelEditorSource",
"publication-format",
);
}); });
}); });
@ -219,7 +254,7 @@ describe("ZettelEditor Component Logic", () => {
const sections = [{ title: "Section 1", content: "Content 1", tags: [] }]; const sections = [{ title: "Section 1", content: "Content 1", tags: [] }];
const eventCount = sections.length; const eventCount = sections.length;
const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`; const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`;
expect(eventCount).toBe(1); expect(eventCount).toBe(1);
expect(eventText).toBe("1 event"); expect(eventText).toBe("1 event");
}); });
@ -227,11 +262,11 @@ describe("ZettelEditor Component Logic", () => {
it("should calculate correct event count for multiple sections", () => { it("should calculate correct event count for multiple sections", () => {
const sections = [ const sections = [
{ title: "Section 1", content: "Content 1", tags: [] }, { title: "Section 1", content: "Content 1", tags: [] },
{ title: "Section 2", content: "Content 2", tags: [] } { title: "Section 2", content: "Content 2", tags: [] },
]; ];
const eventCount = sections.length; const eventCount = sections.length;
const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`; const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`;
expect(eventCount).toBe(2); expect(eventCount).toBe(2);
expect(eventText).toBe("2 events"); expect(eventText).toBe("2 events");
}); });
@ -240,11 +275,17 @@ describe("ZettelEditor Component Logic", () => {
describe("Tag Processing Logic", () => { describe("Tag Processing Logic", () => {
it("should process tags correctly", () => { it("should process tags correctly", () => {
// Mock the metadataToTags function // Mock the metadataToTags function
const mockMetadataToTags = vi.fn().mockReturnValue([["author", "Test Author"]]); const mockMetadataToTags = vi.fn().mockReturnValue([[
"author",
const mockMetadata = { title: "Section 1", author: "Test Author" } as AsciiDocMetadata; "Test Author",
]]);
const mockMetadata = {
title: "Section 1",
author: "Test Author",
} as AsciiDocMetadata;
const tags = mockMetadataToTags(mockMetadata); const tags = mockMetadataToTags(mockMetadata);
expect(tags).toEqual([["author", "Test Author"]]); expect(tags).toEqual([["author", "Test Author"]]);
expect(mockMetadataToTags).toHaveBeenCalledWith(mockMetadata); expect(mockMetadataToTags).toHaveBeenCalledWith(mockMetadata);
}); });
@ -252,10 +293,10 @@ describe("ZettelEditor Component Logic", () => {
it("should handle empty tags", () => { it("should handle empty tags", () => {
// Mock the metadataToTags function // Mock the metadataToTags function
const mockMetadataToTags = vi.fn().mockReturnValue([]); const mockMetadataToTags = vi.fn().mockReturnValue([]);
const mockMetadata = { title: "Section 1" } as AsciiDocMetadata; const mockMetadata = { title: "Section 1" } as AsciiDocMetadata;
const tags = mockMetadataToTags(mockMetadata); const tags = mockMetadataToTags(mockMetadata);
expect(tags).toEqual([]); expect(tags).toEqual([]);
}); });
}); });
@ -264,11 +305,11 @@ describe("ZettelEditor Component Logic", () => {
it("should process AsciiDoc content correctly", () => { it("should process AsciiDoc content correctly", () => {
// Mock the asciidoctor conversion // Mock the asciidoctor conversion
const mockConvert = vi.fn((content, options) => { const mockConvert = vi.fn((content, options) => {
return content.replace(/^==\s+(.+)$/gm, '<h2>$1</h2>') return content.replace(/^==\s+(.+)$/gm, "<h2>$1</h2>")
.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>') .replace(/\*\*(.+?)\*\*/g, "<strong>$1</strong>")
.replace(/\*(.+?)\*/g, '<em>$1</em>'); .replace(/\*(.+?)\*/g, "<em>$1</em>");
}); });
const content = "== Test Section\n\nThis is **bold** and *italic* text."; const content = "== Test Section\n\nThis is **bold** and *italic* text.";
const processedContent = mockConvert(content, { const processedContent = mockConvert(content, {
standalone: false, standalone: false,
@ -278,10 +319,10 @@ describe("ZettelEditor Component Logic", () => {
sectids: true, sectids: true,
}, },
}); });
expect(processedContent).toContain('<h2>Test Section</h2>'); expect(processedContent).toContain("<h2>Test Section</h2>");
expect(processedContent).toContain('<strong>bold</strong>'); expect(processedContent).toContain("<strong>bold</strong>");
expect(processedContent).toContain('<em>italic</em>'); expect(processedContent).toContain("<em>italic</em>");
}); });
}); });
@ -291,9 +332,9 @@ describe("ZettelEditor Component Logic", () => {
const mockParseFunction = vi.fn().mockImplementation(() => { const mockParseFunction = vi.fn().mockImplementation(() => {
throw new Error("Parsing error"); throw new Error("Parsing error");
}); });
const content = "== Section 1\nContent 1"; const content = "== Section 1\nContent 1";
// Should not throw error when called // Should not throw error when called
expect(() => { expect(() => {
try { try {
@ -321,12 +362,12 @@ describe("ZettelEditor Component Logic", () => {
onContentChange: vi.fn(), onContentChange: vi.fn(),
onPreviewToggle: vi.fn(), onPreviewToggle: vi.fn(),
}; };
expect(expectedProps).toHaveProperty('content'); expect(expectedProps).toHaveProperty("content");
expect(expectedProps).toHaveProperty('placeholder'); expect(expectedProps).toHaveProperty("placeholder");
expect(expectedProps).toHaveProperty('showPreview'); expect(expectedProps).toHaveProperty("showPreview");
expect(expectedProps).toHaveProperty('onContentChange'); expect(expectedProps).toHaveProperty("onContentChange");
expect(expectedProps).toHaveProperty('onPreviewToggle'); expect(expectedProps).toHaveProperty("onPreviewToggle");
}); });
}); });
@ -334,12 +375,12 @@ describe("ZettelEditor Component Logic", () => {
it("should integrate with ZettelParser utilities", () => { it("should integrate with ZettelParser utilities", () => {
// Mock the parseAsciiDocSections function // Mock the parseAsciiDocSections function
const mockParseAsciiDocSections = vi.fn().mockReturnValue([ const mockParseAsciiDocSections = vi.fn().mockReturnValue([
{ title: "Section 1", content: "Content 1", tags: [] } { title: "Section 1", content: "Content 1", tags: [] },
]); ]);
const content = "== Section 1\nContent 1"; const content = "== Section 1\nContent 1";
const sections = mockParseAsciiDocSections(content, 2); const sections = mockParseAsciiDocSections(content, 2);
expect(sections).toHaveLength(1); expect(sections).toHaveLength(1);
expect(sections[0].title).toBe("Section 1"); expect(sections[0].title).toBe("Section 1");
}); });
@ -348,21 +389,21 @@ describe("ZettelEditor Component Logic", () => {
// Mock the utility functions // Mock the utility functions
const mockExtractDocumentMetadata = vi.fn().mockReturnValue({ const mockExtractDocumentMetadata = vi.fn().mockReturnValue({
metadata: { title: "Document Title" } as AsciiDocMetadata, metadata: { title: "Document Title" } as AsciiDocMetadata,
content: "Document content" content: "Document content",
}); });
const mockExtractSectionMetadata = vi.fn().mockReturnValue({ const mockExtractSectionMetadata = vi.fn().mockReturnValue({
metadata: { title: "Section Title" } as AsciiDocMetadata, metadata: { title: "Section Title" } as AsciiDocMetadata,
content: "Section content", content: "Section content",
title: "Section Title" title: "Section Title",
}); });
const documentContent = "= Document Title\nDocument content"; const documentContent = "= Document Title\nDocument content";
const sectionContent = "== Section Title\nSection content"; const sectionContent = "== Section Title\nSection content";
const documentResult = mockExtractDocumentMetadata(documentContent); const documentResult = mockExtractDocumentMetadata(documentContent);
const sectionResult = mockExtractSectionMetadata(sectionContent); const sectionResult = mockExtractSectionMetadata(sectionContent);
expect(documentResult.metadata.title).toBe("Document Title"); expect(documentResult.metadata.title).toBe("Document Title");
expect(sectionResult.title).toBe("Section Title"); expect(sectionResult.title).toBe("Section Title");
}); });
@ -370,27 +411,35 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Validation", () => { describe("Content Validation", () => {
it("should validate content structure", () => { it("should validate content structure", () => {
const validContent = "== Section 1\nContent here\n\n== Section 2\nMore content"; const validContent =
"== Section 1\nContent here\n\n== Section 2\nMore content";
const invalidContent = "Just some text without sections"; const invalidContent = "Just some text without sections";
// Test section detection // Test section detection
const validSections = validContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); const validSections = validContent.split(/(?=^==\s+)/gm).filter((
const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); section: string,
) => section.trim());
const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(validSections.length).toBeGreaterThan(0); expect(validSections.length).toBeGreaterThan(0);
// The invalid content will have one section (the entire content) since it doesn't start with == // The invalid content will have one section (the entire content) since it doesn't start with ==
expect(invalidSections.length).toBe(1); expect(invalidSections.length).toBe(1);
}); });
it("should handle mixed content types", () => { it("should handle mixed content types", () => {
const mixedContent = "= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content"; const mixedContent =
"= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content";
// Test document header detection // Test document header detection
const hasDocumentHeader = mixedContent.match(/^=\s+/m); const hasDocumentHeader = mixedContent.match(/^=\s+/m);
expect(hasDocumentHeader).toBeTruthy(); expect(hasDocumentHeader).toBeTruthy();
// Test section extraction // Test section extraction
const sections = mixedContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim()); const sections = mixedContent.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sections.length).toBeGreaterThan(0); expect(sections.length).toBeGreaterThan(0);
}); });
}); });
@ -398,13 +447,13 @@ describe("ZettelEditor Component Logic", () => {
describe("String Manipulation", () => { describe("String Manipulation", () => {
it("should handle string replacements correctly", () => { it("should handle string replacements correctly", () => {
const originalContent = "= Title\n\n== Section\nContent"; const originalContent = "= Title\n\n== Section\nContent";
// Test various string manipulations // Test various string manipulations
const convertedContent = originalContent const convertedContent = originalContent
.replace(/^=\s+(.+)$/gm, '== $1') .replace(/^=\s+(.+)$/gm, "== $1")
.replace(/^index card$/gim, '') .replace(/^index card$/gim, "")
.replace(/\n\s*\n\s*\n/g, '\n\n'); .replace(/\n\s*\n\s*\n/g, "\n\n");
expect(convertedContent).toBe("== Title\n\n== Section\nContent"); expect(convertedContent).toBe("== Title\n\n== Section\nContent");
}); });
@ -414,16 +463,16 @@ describe("ZettelEditor Component Logic", () => {
"index card\n\n== Section\nContent", // Index card "index card\n\n== Section\nContent", // Index card
"= Title\nindex card\n== Section\nContent", // Both "= Title\nindex card\n== Section\nContent", // Both
]; ];
edgeCases.forEach(content => { edgeCases.forEach((content) => {
const converted = content const converted = content
.replace(/^=\s+(.+)$/gm, '== $1') .replace(/^=\s+(.+)$/gm, "== $1")
.replace(/^index card$/gim, '') .replace(/^index card$/gim, "")
.replace(/\n\s*\n\s*\n/g, '\n\n'); .replace(/\n\s*\n\s*\n/g, "\n\n");
expect(converted).toBeDefined(); expect(converted).toBeDefined();
expect(typeof converted).toBe('string'); expect(typeof converted).toBe("string");
}); });
}); });
}); });
}); });

337
tests/unit/eventInput30040.test.ts

@ -1,6 +1,12 @@
import { describe, it, expect, vi, beforeEach } from "vitest"; import { beforeEach, describe, expect, it, vi } from "vitest";
import { build30040EventSet, validate30040EventSet } from "../../src/lib/utils/event_input_utils"; import {
import { extractDocumentMetadata, parseAsciiDocWithMetadata } from "../../src/lib/utils/asciidoc_metadata"; build30040EventSet,
validate30040EventSet,
} from "../../src/lib/utils/event_input_utils";
import {
extractDocumentMetadata,
parseAsciiDocWithMetadata,
} from "../../src/lib/utils/asciidoc_metadata";
// Mock NDK and other dependencies // Mock NDK and other dependencies
vi.mock("@nostr-dev-kit/ndk", () => ({ vi.mock("@nostr-dev-kit/ndk", () => ({
@ -60,16 +66,29 @@ This is the content of the second section.`;
const tags: [string, string][] = [["type", "article"]]; const tags: [string, string][] = [["type", "article"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe(""); expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-document-with-preamble"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Test Document with Preamble"]); "d",
"test-document-with-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Document with Preamble",
]);
expect(indexEvent.tags).toContainEqual(["author", "John Doe"]); expect(indexEvent.tags).toContainEqual(["author", "John Doe"]);
expect(indexEvent.tags).toContainEqual(["version", "1.0"]); expect(indexEvent.tags).toContainEqual(["version", "1.0"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is a test document with preamble"]); expect(indexEvent.tags).toContainEqual([
"summary",
"This is a test document with preamble",
]);
expect(indexEvent.tags).toContainEqual(["t", "test"]); expect(indexEvent.tags).toContainEqual(["t", "test"]);
expect(indexEvent.tags).toContainEqual(["t", "preamble"]); expect(indexEvent.tags).toContainEqual(["t", "preamble"]);
expect(indexEvent.tags).toContainEqual(["t", "asciidoc"]); expect(indexEvent.tags).toContainEqual(["t", "asciidoc"]);
@ -80,22 +99,47 @@ This is the content of the second section.`;
// First section // First section
expect(sectionEvents[0].kind).toBe(30041); expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the content of the first section."); expect(sectionEvents[0].content).toBe(
expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-with-preamble-first-section"]); "This is the content of the first section.",
);
expect(sectionEvents[0].tags).toContainEqual([
"d",
"test-document-with-preamble-first-section",
]);
expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]); expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); expect(sectionEvents[0].tags).toContainEqual([
expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]); "author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This is the first section",
]);
// Second section // Second section
expect(sectionEvents[1].kind).toBe(30041); expect(sectionEvents[1].kind).toBe(30041);
expect(sectionEvents[1].content).toBe("This is the content of the second section."); expect(sectionEvents[1].content).toBe(
expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-with-preamble-second-section"]); "This is the content of the second section.",
);
expect(sectionEvents[1].tags).toContainEqual([
"d",
"test-document-with-preamble-second-section",
]);
expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]); expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]);
expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]); expect(sectionEvents[1].tags).toContainEqual([
"summary",
"This is the second section",
]);
// Test a-tags in index event // Test a-tags in index event
expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-first-section"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-second-section"]); "a",
"30041:test-pubkey:test-document-with-preamble-first-section",
]);
expect(indexEvent.tags).toContainEqual([
"a",
"30041:test-pubkey:test-document-with-preamble-second-section",
]);
}); });
}); });
@ -118,32 +162,64 @@ This is the content of the second section.`;
const tags: [string, string][] = [["type", "article"]]; const tags: [string, string][] = [["type", "article"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe(""); expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-document-without-preamble"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Test Document without Preamble"]); "d",
expect(indexEvent.tags).toContainEqual(["summary", "This is a test document without preamble"]); "test-document-without-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Document without Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a test document without preamble",
]);
// Test section events // Test section events
expect(sectionEvents).toHaveLength(2); expect(sectionEvents).toHaveLength(2);
// First section // First section
expect(sectionEvents[0].kind).toBe(30041); expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the content of the first section."); expect(sectionEvents[0].content).toBe(
expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-without-preamble-first-section"]); "This is the content of the first section.",
);
expect(sectionEvents[0].tags).toContainEqual([
"d",
"test-document-without-preamble-first-section",
]);
expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]); expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); expect(sectionEvents[0].tags).toContainEqual([
expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]); "author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This is the first section",
]);
// Second section // Second section
expect(sectionEvents[1].kind).toBe(30041); expect(sectionEvents[1].kind).toBe(30041);
expect(sectionEvents[1].content).toBe("This is the content of the second section."); expect(sectionEvents[1].content).toBe(
expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-without-preamble-second-section"]); "This is the content of the second section.",
);
expect(sectionEvents[1].tags).toContainEqual([
"d",
"test-document-without-preamble-second-section",
]);
expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]); expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]);
expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]); expect(sectionEvents[1].tags).toContainEqual([
"summary",
"This is the second section",
]);
}); });
}); });
@ -163,14 +239,27 @@ This is the preamble content.
const tags: [string, string][] = [["type", "skeleton"]]; const tags: [string, string][] = [["type", "skeleton"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe(""); expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-with-preamble"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document with Preamble"]); "d",
expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document with preamble"]); "skeleton-document-with-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Skeleton Document with Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a skeleton document with preamble",
]);
// Test section events // Test section events
expect(sectionEvents).toHaveLength(3); expect(sectionEvents).toHaveLength(3);
@ -179,8 +268,14 @@ This is the preamble content.
sectionEvents.forEach((section, index) => { sectionEvents.forEach((section, index) => {
expect(section.kind).toBe(30041); expect(section.kind).toBe(30041);
expect(section.content).toBe(""); expect(section.content).toBe("");
expect(section.tags).toContainEqual(["d", `skeleton-document-with-preamble-empty-section-${index + 1}`]); expect(section.tags).toContainEqual([
expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]); "d",
`skeleton-document-with-preamble-empty-section-${index + 1}`,
]);
expect(section.tags).toContainEqual([
"title",
`Empty Section ${index + 1}`,
]);
}); });
}); });
}); });
@ -199,14 +294,27 @@ This is the preamble content.
const tags: [string, string][] = [["type", "skeleton"]]; const tags: [string, string][] = [["type", "skeleton"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe(""); expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-without-preamble"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document without Preamble"]); "d",
expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document without preamble"]); "skeleton-document-without-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Skeleton Document without Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a skeleton document without preamble",
]);
// Test section events // Test section events
expect(sectionEvents).toHaveLength(3); expect(sectionEvents).toHaveLength(3);
@ -215,8 +323,14 @@ This is the preamble content.
sectionEvents.forEach((section, index) => { sectionEvents.forEach((section, index) => {
expect(section.kind).toBe(30041); expect(section.kind).toBe(30041);
expect(section.content).toBe(""); expect(section.content).toBe("");
expect(section.tags).toContainEqual(["d", `skeleton-document-without-preamble-empty-section-${index + 1}`]); expect(section.tags).toContainEqual([
expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]); "d",
`skeleton-document-without-preamble-empty-section-${index + 1}`,
]);
expect(section.tags).toContainEqual([
"title",
`Empty Section ${index + 1}`,
]);
}); });
}); });
}); });
@ -228,7 +342,11 @@ index card`;
const tags: [string, string][] = [["type", "index-card"]]; const tags: [string, string][] = [["type", "index-card"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
@ -249,14 +367,27 @@ index card`;
const tags: [string, string][] = [["type", "index-card"]]; const tags: [string, string][] = [["type", "index-card"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event // Test index event
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe(""); expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-index-card-with-metadata"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Test Index Card with Metadata"]); "d",
expect(indexEvent.tags).toContainEqual(["summary", "This is an index card with metadata"]); "test-index-card-with-metadata",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Index Card with Metadata",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is an index card with metadata",
]);
expect(indexEvent.tags).toContainEqual(["t", "index"]); expect(indexEvent.tags).toContainEqual(["t", "index"]);
expect(indexEvent.tags).toContainEqual(["t", "card"]); expect(indexEvent.tags).toContainEqual(["t", "card"]);
expect(indexEvent.tags).toContainEqual(["t", "metadata"]); expect(indexEvent.tags).toContainEqual(["t", "metadata"]);
@ -303,23 +434,45 @@ This is the section content.`;
const tags: [string, string][] = [["type", "complex"]]; const tags: [string, string][] = [["type", "complex"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event metadata // Test index event metadata
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "complex-metadata-document"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Complex Metadata Document"]); "d",
"complex-metadata-document",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Complex Metadata Document",
]);
expect(indexEvent.tags).toContainEqual(["author", "Jane Smith"]); // Should use header line author expect(indexEvent.tags).toContainEqual(["author", "Jane Smith"]); // Should use header line author
expect(indexEvent.tags).toContainEqual(["author", "Override Author"]); // Additional author from attribute expect(indexEvent.tags).toContainEqual(["author", "Override Author"]); // Additional author from attribute
expect(indexEvent.tags).toContainEqual(["author", "Third Author"]); // Additional author from attribute expect(indexEvent.tags).toContainEqual(["author", "Third Author"]); // Additional author from attribute
expect(indexEvent.tags).toContainEqual(["version", "2.0"]); // Should use revision line version expect(indexEvent.tags).toContainEqual(["version", "2.0"]); // Should use revision line version
expect(indexEvent.tags).toContainEqual(["summary", "This is a complex document with all metadata types Alternative description field"]); expect(indexEvent.tags).toContainEqual([
"summary",
"This is a complex document with all metadata types Alternative description field",
]);
expect(indexEvent.tags).toContainEqual(["published_on", "2024-03-01"]); expect(indexEvent.tags).toContainEqual(["published_on", "2024-03-01"]);
expect(indexEvent.tags).toContainEqual(["published_by", "Alexandria Complex"]); expect(indexEvent.tags).toContainEqual([
"published_by",
"Alexandria Complex",
]);
expect(indexEvent.tags).toContainEqual(["type", "book"]); expect(indexEvent.tags).toContainEqual(["type", "book"]);
expect(indexEvent.tags).toContainEqual(["image", "https://example.com/cover.jpg"]); expect(indexEvent.tags).toContainEqual([
"image",
"https://example.com/cover.jpg",
]);
expect(indexEvent.tags).toContainEqual(["i", "978-0-123456-78-9"]); expect(indexEvent.tags).toContainEqual(["i", "978-0-123456-78-9"]);
expect(indexEvent.tags).toContainEqual(["source", "https://github.com/alexandria/complex"]); expect(indexEvent.tags).toContainEqual([
"source",
"https://github.com/alexandria/complex",
]);
expect(indexEvent.tags).toContainEqual(["auto-update", "yes"]); expect(indexEvent.tags).toContainEqual(["auto-update", "yes"]);
expect(indexEvent.tags).toContainEqual(["t", "complex"]); expect(indexEvent.tags).toContainEqual(["t", "complex"]);
expect(indexEvent.tags).toContainEqual(["t", "metadata"]); expect(indexEvent.tags).toContainEqual(["t", "metadata"]);
@ -332,13 +485,31 @@ This is the section content.`;
expect(sectionEvents).toHaveLength(1); expect(sectionEvents).toHaveLength(1);
expect(sectionEvents[0].kind).toBe(30041); expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the section content."); expect(sectionEvents[0].content).toBe("This is the section content.");
expect(sectionEvents[0].tags).toContainEqual(["d", "complex-metadata-document-section-with-complex-metadata"]); expect(sectionEvents[0].tags).toContainEqual([
expect(sectionEvents[0].tags).toContainEqual(["title", "Section with Complex Metadata"]); "d",
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]); "complex-metadata-document-section-with-complex-metadata",
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Co-Author"]); ]);
expect(sectionEvents[0].tags).toContainEqual(["summary", "This section has complex metadata Alternative description for section"]); expect(sectionEvents[0].tags).toContainEqual([
"title",
"Section with Complex Metadata",
]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Co-Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This section has complex metadata Alternative description for section",
]);
expect(sectionEvents[0].tags).toContainEqual(["type", "chapter"]); expect(sectionEvents[0].tags).toContainEqual(["type", "chapter"]);
expect(sectionEvents[0].tags).toContainEqual(["image", "https://example.com/section-image.jpg"]); expect(sectionEvents[0].tags).toContainEqual([
"image",
"https://example.com/section-image.jpg",
]);
expect(sectionEvents[0].tags).toContainEqual(["t", "section"]); expect(sectionEvents[0].tags).toContainEqual(["t", "section"]);
expect(sectionEvents[0].tags).toContainEqual(["t", "complex"]); expect(sectionEvents[0].tags).toContainEqual(["t", "complex"]);
expect(sectionEvents[0].tags).toContainEqual(["t", "metadata"]); expect(sectionEvents[0].tags).toContainEqual(["t", "metadata"]);
@ -387,7 +558,9 @@ index card`;
const validation = validate30040EventSet(content); const validation = validate30040EventSet(content);
expect(validation.valid).toBe(false); expect(validation.valid).toBe(false);
expect(validation.reason).toContain("30040 events must have a document title"); expect(validation.reason).toContain(
"30040 events must have a document title",
);
}); });
}); });
@ -400,11 +573,21 @@ This is just preamble content.`;
const tags: [string, string][] = []; const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "document-with-no-sections"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Document with No Sections"]); "d",
"document-with-no-sections",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Document with No Sections",
]);
expect(sectionEvents).toHaveLength(0); expect(sectionEvents).toHaveLength(0);
}); });
@ -418,16 +601,27 @@ Content here.`;
const tags: [string, string][] = []; const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "document-with-special-characters-test-more"]); expect(indexEvent.tags).toContainEqual([
expect(indexEvent.tags).toContainEqual(["title", "Document with Special Characters: Test & More!"]); "d",
"document-with-special-characters-test-more",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Document with Special Characters: Test & More!",
]);
expect(sectionEvents).toHaveLength(1); expect(sectionEvents).toHaveLength(1);
}); });
it("should handle document with very long title", () => { it("should handle document with very long title", () => {
const content = `= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality const content =
`= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality
:summary: This document has a very long title :summary: This document has a very long title
== Section 1 == Section 1
@ -436,11 +630,18 @@ Content here.`;
const tags: [string, string][] = []; const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent); const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040); expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["title", "This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality"]); expect(indexEvent.tags).toContainEqual([
"title",
"This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality",
]);
expect(sectionEvents).toHaveLength(1); expect(sectionEvents).toHaveLength(1);
}); });
}); });
}); });

2
tests/unit/latexRendering.test.ts

@ -1,4 +1,4 @@
import { describe, it, expect } from "vitest"; import { describe, expect, it } from "vitest";
import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupParser"; import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupParser";
import { readFileSync } from "fs"; import { readFileSync } from "fs";
import { join } from "path"; import { join } from "path";

124
tests/unit/metadataExtraction.test.ts

@ -1,10 +1,10 @@
import { describe, it, expect } from "vitest"; import { describe, expect, it } from "vitest";
import { import {
extractDocumentMetadata, extractDocumentMetadata,
extractSectionMetadata, extractSectionMetadata,
parseAsciiDocWithMetadata, extractSmartMetadata,
metadataToTags, metadataToTags,
extractSmartMetadata parseAsciiDocWithMetadata,
} from "../../src/lib/utils/asciidoc_metadata.ts"; } from "../../src/lib/utils/asciidoc_metadata.ts";
describe("AsciiDoc Metadata Extraction", () => { describe("AsciiDoc Metadata Extraction", () => {
@ -39,13 +39,15 @@ This is the content of the second section.`;
it("extractDocumentMetadata should extract document metadata correctly", () => { it("extractDocumentMetadata should extract document metadata correctly", () => {
const { metadata, content } = extractDocumentMetadata(testContent); const { metadata, content } = extractDocumentMetadata(testContent);
expect(metadata.title).toBe("Test Document with Metadata"); expect(metadata.title).toBe("Test Document with Metadata");
expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.version).toBe("1.0"); expect(metadata.version).toBe("1.0");
expect(metadata.publicationDate).toBe("2024-01-15"); expect(metadata.publicationDate).toBe("2024-01-15");
expect(metadata.publishedBy).toBe("Alexandria Test"); expect(metadata.publishedBy).toBe("Alexandria Test");
expect(metadata.summary).toBe("This is a test document for metadata extraction"); expect(metadata.summary).toBe(
"This is a test document for metadata extraction",
);
expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]); expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.type).toBe("article"); expect(metadata.type).toBe("article");
expect(metadata.tags).toEqual(["test", "metadata", "asciidoc"]); expect(metadata.tags).toEqual(["test", "metadata", "asciidoc"]);
@ -53,7 +55,7 @@ This is the content of the second section.`;
expect(metadata.isbn).toBe("978-0-123456-78-9"); expect(metadata.isbn).toBe("978-0-123456-78-9");
expect(metadata.source).toBe("https://github.com/alexandria/test"); expect(metadata.source).toBe("https://github.com/alexandria/test");
expect(metadata.autoUpdate).toBe("yes"); expect(metadata.autoUpdate).toBe("yes");
// Content should not include the header metadata // Content should not include the header metadata
expect(content).toContain("This is the preamble content"); expect(content).toContain("This is the preamble content");
expect(content).toContain("== First Section"); expect(content).toContain("== First Section");
@ -70,7 +72,7 @@ This is the content of the second section.`;
This is the content of the first section.`; This is the content of the first section.`;
const { metadata, content, title } = extractSectionMetadata(sectionContent); const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("First Section"); expect(title).toBe("First Section");
expect(metadata.authors).toEqual(["Section Author"]); expect(metadata.authors).toEqual(["Section Author"]);
expect(metadata.summary).toBe("This is the first section"); expect(metadata.summary).toBe("This is the first section");
@ -86,7 +88,7 @@ Stella
Some context text`; Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent); const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1"); expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella"]); expect(metadata.authors).toEqual(["Stella"]);
expect(metadata.summary).toBe("Some summary"); expect(metadata.summary).toBe("Some summary");
@ -102,7 +104,7 @@ Stella
Some context text`; Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent); const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1"); expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella", "John Doe"]); expect(metadata.authors).toEqual(["Stella", "John Doe"]);
expect(metadata.summary).toBe("Some summary"); expect(metadata.summary).toBe("Some summary");
@ -118,22 +120,26 @@ This is not an author line
Some context text`; Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent); const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1"); expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella"]); expect(metadata.authors).toEqual(["Stella"]);
expect(metadata.summary).toBe("Some summary"); expect(metadata.summary).toBe("Some summary");
expect(content.trim()).toBe("This is not an author line\nSome context text"); expect(content.trim()).toBe(
"This is not an author line\nSome context text",
);
}); });
it("parseAsciiDocWithMetadata should parse complete document", () => { it("parseAsciiDocWithMetadata should parse complete document", () => {
const parsed = parseAsciiDocWithMetadata(testContent); const parsed = parseAsciiDocWithMetadata(testContent);
expect(parsed.metadata.title).toBe("Test Document with Metadata"); expect(parsed.metadata.title).toBe("Test Document with Metadata");
expect(parsed.sections).toHaveLength(2); expect(parsed.sections).toHaveLength(2);
expect(parsed.sections[0].title).toBe("First Section"); expect(parsed.sections[0].title).toBe("First Section");
expect(parsed.sections[1].title).toBe("Second Section"); expect(parsed.sections[1].title).toBe("Second Section");
expect(parsed.sections[0].metadata.authors).toEqual(["Section Author"]); expect(parsed.sections[0].metadata.authors).toEqual(["Section Author"]);
expect(parsed.sections[1].metadata.summary).toBe("This is the second section"); expect(parsed.sections[1].metadata.summary).toBe(
"This is the second section",
);
}); });
it("metadataToTags should convert metadata to Nostr tags", () => { it("metadataToTags should convert metadata to Nostr tags", () => {
@ -142,11 +148,11 @@ Some context text`;
authors: ["Author 1", "Author 2"], authors: ["Author 1", "Author 2"],
version: "1.0", version: "1.0",
summary: "Test summary", summary: "Test summary",
tags: ["tag1", "tag2"] tags: ["tag1", "tag2"],
}; };
const tags = metadataToTags(metadata); const tags = metadataToTags(metadata);
expect(tags).toContainEqual(["title", "Test Title"]); expect(tags).toContainEqual(["title", "Test Title"]);
expect(tags).toContainEqual(["author", "Author 1"]); expect(tags).toContainEqual(["author", "Author 1"]);
expect(tags).toContainEqual(["author", "Author 2"]); expect(tags).toContainEqual(["author", "Author 2"]);
@ -161,16 +167,16 @@ Some context text`;
index card`; index card`;
const { metadata, content } = extractDocumentMetadata(indexCardContent); const { metadata, content } = extractDocumentMetadata(indexCardContent);
expect(metadata.title).toBe("Test Index Card"); expect(metadata.title).toBe("Test Index Card");
expect(content.trim()).toBe("index card"); expect(content.trim()).toBe("index card");
}); });
it("should handle empty content gracefully", () => { it("should handle empty content gracefully", () => {
const emptyContent = ""; const emptyContent = "";
const { metadata, content } = extractDocumentMetadata(emptyContent); const { metadata, content } = extractDocumentMetadata(emptyContent);
expect(metadata.title).toBeUndefined(); expect(metadata.title).toBeUndefined();
expect(content).toBe(""); expect(content).toBe("");
}); });
@ -182,7 +188,7 @@ index card`;
Some content here.`; Some content here.`;
const { metadata } = extractDocumentMetadata(contentWithKeywords); const { metadata } = extractDocumentMetadata(contentWithKeywords);
expect(metadata.tags).toEqual(["keyword1", "keyword2", "keyword3"]); expect(metadata.tags).toEqual(["keyword1", "keyword2", "keyword3"]);
}); });
@ -194,7 +200,7 @@ Some content here.`;
Some content here.`; Some content here.`;
const { metadata } = extractDocumentMetadata(contentWithBoth); const { metadata } = extractDocumentMetadata(contentWithBoth);
// Both tags and keywords are valid, both should be accumulated // Both tags and keywords are valid, both should be accumulated
expect(metadata.tags).toEqual(["tag1", "tag2", "keyword1", "keyword2"]); expect(metadata.tags).toEqual(["tag1", "tag2", "keyword1", "keyword2"]);
}); });
@ -206,7 +212,7 @@ Some content here.`;
Content here.`; Content here.`;
const { metadata } = extractDocumentMetadata(contentWithTags); const { metadata } = extractDocumentMetadata(contentWithTags);
expect(metadata.tags).toEqual(["tag1", "tag2", "tag3"]); expect(metadata.tags).toEqual(["tag1", "tag2", "tag3"]);
}); });
@ -221,15 +227,19 @@ Content here.`;
Content here.`; Content here.`;
const { metadata: summaryMetadata } = extractDocumentMetadata(contentWithSummary); const { metadata: summaryMetadata } = extractDocumentMetadata(
const { metadata: descriptionMetadata } = extractDocumentMetadata(contentWithDescription); contentWithSummary,
);
const { metadata: descriptionMetadata } = extractDocumentMetadata(
contentWithDescription,
);
expect(summaryMetadata.summary).toBe("This is a summary"); expect(summaryMetadata.summary).toBe("This is a summary");
expect(descriptionMetadata.summary).toBe("This is a description"); expect(descriptionMetadata.summary).toBe("This is a description");
}); });
describe('Smart metadata extraction', () => { describe("Smart metadata extraction", () => {
it('should handle section-only content correctly', () => { it("should handle section-only content correctly", () => {
const sectionOnlyContent = `== First Section const sectionOnlyContent = `== First Section
:author: Section Author :author: Section Author
:description: This is the first section :description: This is the first section
@ -244,20 +254,20 @@ This is the content of the first section.
This is the content of the second section.`; This is the content of the second section.`;
const { metadata, content } = extractSmartMetadata(sectionOnlyContent); const { metadata, content } = extractSmartMetadata(sectionOnlyContent);
// Should extract title from first section // Should extract title from first section
expect(metadata.title).toBe('First Section'); expect(metadata.title).toBe("First Section");
// Should not have document-level metadata since there's no document header // Should not have document-level metadata since there's no document header
expect(metadata.authors).toBeUndefined(); expect(metadata.authors).toBeUndefined();
expect(metadata.version).toBeUndefined(); expect(metadata.version).toBeUndefined();
expect(metadata.publicationDate).toBeUndefined(); expect(metadata.publicationDate).toBeUndefined();
// Content should be preserved // Content should be preserved
expect(content).toBe(sectionOnlyContent); expect(content).toBe(sectionOnlyContent);
}); });
it('should handle minimal document header (just title) correctly', () => { it("should handle minimal document header (just title) correctly", () => {
const minimalDocumentHeader = `= Test Document const minimalDocumentHeader = `= Test Document
== First Section == First Section
@ -273,22 +283,22 @@ This is the content of the first section.
This is the content of the second section.`; This is the content of the second section.`;
const { metadata, content } = extractSmartMetadata(minimalDocumentHeader); const { metadata, content } = extractSmartMetadata(minimalDocumentHeader);
// Should extract title from document header // Should extract title from document header
expect(metadata.title).toBe('Test Document'); expect(metadata.title).toBe("Test Document");
// Should not have document-level metadata since there's no other metadata // Should not have document-level metadata since there's no other metadata
expect(metadata.authors).toBeUndefined(); expect(metadata.authors).toBeUndefined();
// Note: version might be set from section attributes like :type: chapter // Note: version might be set from section attributes like :type: chapter
expect(metadata.publicationDate).toBeUndefined(); expect(metadata.publicationDate).toBeUndefined();
// Content should preserve the title line for 30040 events // Content should preserve the title line for 30040 events
expect(content).toContain('= Test Document'); expect(content).toContain("= Test Document");
expect(content).toContain('== First Section'); expect(content).toContain("== First Section");
expect(content).toContain('== Second Section'); expect(content).toContain("== Second Section");
}); });
it('should handle document with full header correctly', () => { it("should handle document with full header correctly", () => {
const documentWithHeader = `= Test Document const documentWithHeader = `= Test Document
John Doe <john@example.com> John Doe <john@example.com>
1.0, 2024-01-15: Alexandria Test 1.0, 2024-01-15: Alexandria Test
@ -302,21 +312,21 @@ John Doe <john@example.com>
This is the content.`; This is the content.`;
const { metadata, content } = extractSmartMetadata(documentWithHeader); const { metadata, content } = extractSmartMetadata(documentWithHeader);
// Should extract document-level metadata // Should extract document-level metadata
expect(metadata.title).toBe('Test Document'); expect(metadata.title).toBe("Test Document");
expect(metadata.authors).toEqual(['John Doe', 'Jane Smith']); expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.version).toBe('1.0'); expect(metadata.version).toBe("1.0");
expect(metadata.publishedBy).toBe('Alexandria Test'); expect(metadata.publishedBy).toBe("Alexandria Test");
expect(metadata.publicationDate).toBe('2024-01-15'); expect(metadata.publicationDate).toBe("2024-01-15");
expect(metadata.summary).toBe('This is a test document'); expect(metadata.summary).toBe("This is a test document");
// Content should be cleaned // Content should be cleaned
expect(content).not.toContain('= Test Document'); expect(content).not.toContain("= Test Document");
expect(content).not.toContain('John Doe <john@example.com>'); expect(content).not.toContain("John Doe <john@example.com>");
expect(content).not.toContain('1.0, 2024-01-15: Alexandria Test'); expect(content).not.toContain("1.0, 2024-01-15: Alexandria Test");
expect(content).not.toContain(':summary: This is a test document'); expect(content).not.toContain(":summary: This is a test document");
expect(content).not.toContain(':author: Jane Smith'); expect(content).not.toContain(":author: Jane Smith");
}); });
}); });
}); });

132
tests/unit/nostr_identifiers.test.ts

@ -1,106 +1,112 @@
import { describe, it, expect } from 'vitest'; import { describe, expect, it } from "vitest";
import { import {
isEventId,
isCoordinate,
parseCoordinate,
createCoordinate, createCoordinate,
isNostrIdentifier isCoordinate,
} from '../../src/lib/utils/nostr_identifiers'; isEventId,
isNostrIdentifier,
parseCoordinate,
} from "../../src/lib/utils/nostr_identifiers";
describe('Nostr Identifier Validation', () => { describe("Nostr Identifier Validation", () => {
describe('isEventId', () => { describe("isEventId", () => {
it('should validate correct hex event IDs', () => { it("should validate correct hex event IDs", () => {
const validId = 'a'.repeat(64); const validId = "a".repeat(64);
expect(isEventId(validId)).toBe(true); expect(isEventId(validId)).toBe(true);
const validIdWithMixedCase = 'A'.repeat(32) + 'f'.repeat(32); const validIdWithMixedCase = "A".repeat(32) + "f".repeat(32);
expect(isEventId(validIdWithMixedCase)).toBe(true); expect(isEventId(validIdWithMixedCase)).toBe(true);
}); });
it('should reject invalid event IDs', () => { it("should reject invalid event IDs", () => {
expect(isEventId('')).toBe(false); expect(isEventId("")).toBe(false);
expect(isEventId('abc')).toBe(false); expect(isEventId("abc")).toBe(false);
expect(isEventId('a'.repeat(63))).toBe(false); // too short expect(isEventId("a".repeat(63))).toBe(false); // too short
expect(isEventId('a'.repeat(65))).toBe(false); // too long expect(isEventId("a".repeat(65))).toBe(false); // too long
expect(isEventId('g'.repeat(64))).toBe(false); // invalid hex char expect(isEventId("g".repeat(64))).toBe(false); // invalid hex char
}); });
}); });
describe('isCoordinate', () => { describe("isCoordinate", () => {
it('should validate correct coordinates', () => { it("should validate correct coordinates", () => {
const validCoordinate = `30040:${'a'.repeat(64)}:chapter-1`; const validCoordinate = `30040:${"a".repeat(64)}:chapter-1`;
expect(isCoordinate(validCoordinate)).toBe(true); expect(isCoordinate(validCoordinate)).toBe(true);
const coordinateWithColonsInDTag = `30041:${'b'.repeat(64)}:chapter:with:colons`; const coordinateWithColonsInDTag = `30041:${
"b".repeat(64)
}:chapter:with:colons`;
expect(isCoordinate(coordinateWithColonsInDTag)).toBe(true); expect(isCoordinate(coordinateWithColonsInDTag)).toBe(true);
}); });
it('should reject invalid coordinates', () => { it("should reject invalid coordinates", () => {
expect(isCoordinate('')).toBe(false); expect(isCoordinate("")).toBe(false);
expect(isCoordinate('abc')).toBe(false); expect(isCoordinate("abc")).toBe(false);
expect(isCoordinate('30040:abc:chapter-1')).toBe(false); // invalid pubkey expect(isCoordinate("30040:abc:chapter-1")).toBe(false); // invalid pubkey
expect(isCoordinate('30040:abc')).toBe(false); // missing d-tag expect(isCoordinate("30040:abc")).toBe(false); // missing d-tag
expect(isCoordinate('abc:def:ghi')).toBe(false); // invalid kind expect(isCoordinate("abc:def:ghi")).toBe(false); // invalid kind
expect(isCoordinate('-1:abc:def')).toBe(false); // negative kind expect(isCoordinate("-1:abc:def")).toBe(false); // negative kind
}); });
}); });
describe('parseCoordinate', () => { describe("parseCoordinate", () => {
it('should parse valid coordinates correctly', () => { it("should parse valid coordinates correctly", () => {
const coordinate = `30040:${'a'.repeat(64)}:chapter-1`; const coordinate = `30040:${"a".repeat(64)}:chapter-1`;
const parsed = parseCoordinate(coordinate); const parsed = parseCoordinate(coordinate);
expect(parsed).toEqual({ expect(parsed).toEqual({
kind: 30040, kind: 30040,
pubkey: 'a'.repeat(64), pubkey: "a".repeat(64),
dTag: 'chapter-1' dTag: "chapter-1",
}); });
}); });
it('should handle d-tags with colons', () => { it("should handle d-tags with colons", () => {
const coordinate = `30041:${'b'.repeat(64)}:chapter:with:colons`; const coordinate = `30041:${"b".repeat(64)}:chapter:with:colons`;
const parsed = parseCoordinate(coordinate); const parsed = parseCoordinate(coordinate);
expect(parsed).toEqual({ expect(parsed).toEqual({
kind: 30041, kind: 30041,
pubkey: 'b'.repeat(64), pubkey: "b".repeat(64),
dTag: 'chapter:with:colons' dTag: "chapter:with:colons",
}); });
}); });
it('should return null for invalid coordinates', () => { it("should return null for invalid coordinates", () => {
expect(parseCoordinate('')).toBeNull(); expect(parseCoordinate("")).toBeNull();
expect(parseCoordinate('abc')).toBeNull(); expect(parseCoordinate("abc")).toBeNull();
expect(parseCoordinate('30040:abc:chapter-1')).toBeNull(); expect(parseCoordinate("30040:abc:chapter-1")).toBeNull();
}); });
}); });
describe('createCoordinate', () => { describe("createCoordinate", () => {
it('should create valid coordinates', () => { it("should create valid coordinates", () => {
const coordinate = createCoordinate(30040, 'a'.repeat(64), 'chapter-1'); const coordinate = createCoordinate(30040, "a".repeat(64), "chapter-1");
expect(coordinate).toBe(`30040:${'a'.repeat(64)}:chapter-1`); expect(coordinate).toBe(`30040:${"a".repeat(64)}:chapter-1`);
}); });
it('should handle d-tags with colons', () => { it("should handle d-tags with colons", () => {
const coordinate = createCoordinate(30041, 'b'.repeat(64), 'chapter:with:colons'); const coordinate = createCoordinate(
expect(coordinate).toBe(`30041:${'b'.repeat(64)}:chapter:with:colons`); 30041,
"b".repeat(64),
"chapter:with:colons",
);
expect(coordinate).toBe(`30041:${"b".repeat(64)}:chapter:with:colons`);
}); });
}); });
describe('isNostrIdentifier', () => { describe("isNostrIdentifier", () => {
it('should accept valid event IDs', () => { it("should accept valid event IDs", () => {
expect(isNostrIdentifier('a'.repeat(64))).toBe(true); expect(isNostrIdentifier("a".repeat(64))).toBe(true);
}); });
it('should accept valid coordinates', () => { it("should accept valid coordinates", () => {
const coordinate = `30040:${'a'.repeat(64)}:chapter-1`; const coordinate = `30040:${"a".repeat(64)}:chapter-1`;
expect(isNostrIdentifier(coordinate)).toBe(true); expect(isNostrIdentifier(coordinate)).toBe(true);
}); });
it('should reject invalid identifiers', () => { it("should reject invalid identifiers", () => {
expect(isNostrIdentifier('')).toBe(false); expect(isNostrIdentifier("")).toBe(false);
expect(isNostrIdentifier('abc')).toBe(false); expect(isNostrIdentifier("abc")).toBe(false);
expect(isNostrIdentifier('30040:abc:chapter-1')).toBe(false); expect(isNostrIdentifier("30040:abc:chapter-1")).toBe(false);
}); });
}); });
}); });

742
tests/unit/relayDeduplication.test.ts

@ -1,11 +1,11 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'; import { beforeEach, describe, expect, it, vi } from "vitest";
import type { NDKEvent } from '@nostr-dev-kit/ndk'; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { import {
deduplicateContentEvents,
deduplicateAndCombineEvents, deduplicateAndCombineEvents,
deduplicateContentEvents,
getEventCoordinate,
isReplaceableEvent, isReplaceableEvent,
getEventCoordinate } from "../../src/lib/utils/eventDeduplication";
} from '../../src/lib/utils/eventDeduplication';
// Mock NDKEvent for testing // Mock NDKEvent for testing
class MockNDKEvent { class MockNDKEvent {
@ -16,162 +16,264 @@ class MockNDKEvent {
content: string; content: string;
tags: string[][]; tags: string[][];
constructor(id: string, kind: number, pubkey: string, created_at: number, dTag: string, content: string = '') { constructor(
id: string,
kind: number,
pubkey: string,
created_at: number,
dTag: string,
content: string = "",
) {
this.id = id; this.id = id;
this.kind = kind; this.kind = kind;
this.pubkey = pubkey; this.pubkey = pubkey;
this.created_at = created_at; this.created_at = created_at;
this.content = content; this.content = content;
this.tags = [['d', dTag]]; this.tags = [["d", dTag]];
} }
tagValue(tagName: string): string | undefined { tagValue(tagName: string): string | undefined {
const tag = this.tags.find(t => t[0] === tagName); const tag = this.tags.find((t) => t[0] === tagName);
return tag ? tag[1] : undefined; return tag ? tag[1] : undefined;
} }
} }
describe('Relay Deduplication Behavior Tests', () => { describe("Relay Deduplication Behavior Tests", () => {
let mockEvents: MockNDKEvent[]; let mockEvents: MockNDKEvent[];
beforeEach(() => { beforeEach(() => {
// Create test events with different timestamps // Create test events with different timestamps
mockEvents = [ mockEvents = [
// Older version of a publication content event // Older version of a publication content event
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old content'), new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old content",
),
// Newer version of the same publication content event // Newer version of the same publication content event
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Updated content'), new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"Updated content",
),
// Different publication content event // Different publication content event
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-2', 'Different content'), new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-2",
"Different content",
),
// Publication index event (should not be deduplicated) // Publication index event (should not be deduplicated)
new MockNDKEvent('event4', 30040, 'pubkey1', 1200, 'book-1', 'Index content'), new MockNDKEvent(
"event4",
30040,
"pubkey1",
1200,
"book-1",
"Index content",
),
// Regular text note (should not be deduplicated) // Regular text note (should not be deduplicated)
new MockNDKEvent('event5', 1, 'pubkey1', 1300, '', 'Regular note'), new MockNDKEvent("event5", 1, "pubkey1", 1300, "", "Regular note"),
]; ];
}); });
describe('Addressable Event Deduplication', () => { describe("Addressable Event Deduplication", () => {
it('should keep only the most recent version of addressable events by coordinate', () => { it("should keep only the most recent version of addressable events by coordinate", () => {
// Test the deduplication logic for content events // Test the deduplication logic for content events
const eventSets = [new Set(mockEvents.filter(e => e.kind === 30041) as NDKEvent[])]; const eventSets = [
new Set(mockEvents.filter((e) => e.kind === 30041) as NDKEvent[]),
];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should have 2 unique coordinates: chapter-1 and chapter-2 // Should have 2 unique coordinates: chapter-1 and chapter-2
expect(result.size).toBe(2); expect(result.size).toBe(2);
// Should keep the newer version of chapter-1 // Should keep the newer version of chapter-1
const chapter1Event = result.get('30041:pubkey1:chapter-1'); const chapter1Event = result.get("30041:pubkey1:chapter-1");
expect(chapter1Event?.id).toBe('event2'); expect(chapter1Event?.id).toBe("event2");
expect(chapter1Event?.content).toBe('Updated content'); expect(chapter1Event?.content).toBe("Updated content");
// Should keep chapter-2 // Should keep chapter-2
const chapter2Event = result.get('30041:pubkey1:chapter-2'); const chapter2Event = result.get("30041:pubkey1:chapter-2");
expect(chapter2Event?.id).toBe('event3'); expect(chapter2Event?.id).toBe("event3");
}); });
it('should handle events with missing d-tags gracefully', () => { it("should handle events with missing d-tags gracefully", () => {
const eventWithoutDTag = new MockNDKEvent('event6', 30041, 'pubkey1', 1400, '', 'No d-tag'); const eventWithoutDTag = new MockNDKEvent(
"event6",
30041,
"pubkey1",
1400,
"",
"No d-tag",
);
eventWithoutDTag.tags = []; // Remove d-tag eventWithoutDTag.tags = []; // Remove d-tag
const eventSets = [new Set([eventWithoutDTag] as NDKEvent[])]; const eventSets = [new Set([eventWithoutDTag] as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should not include events without d-tags // Should not include events without d-tags
expect(result.size).toBe(0); expect(result.size).toBe(0);
}); });
it('should handle events with missing timestamps', () => { it("should handle events with missing timestamps", () => {
const eventWithoutTimestamp = new MockNDKEvent('event7', 30041, 'pubkey1', 0, 'chapter-3', 'No timestamp'); const eventWithoutTimestamp = new MockNDKEvent(
const eventWithTimestamp = new MockNDKEvent('event8', 30041, 'pubkey1', 1500, 'chapter-3', 'With timestamp'); "event7",
30041,
const eventSets = [new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[])]; "pubkey1",
0,
"chapter-3",
"No timestamp",
);
const eventWithTimestamp = new MockNDKEvent(
"event8",
30041,
"pubkey1",
1500,
"chapter-3",
"With timestamp",
);
const eventSets = [
new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[]),
];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should prefer the event with timestamp // Should prefer the event with timestamp
const chapter3Event = result.get('30041:pubkey1:chapter-3'); const chapter3Event = result.get("30041:pubkey1:chapter-3");
expect(chapter3Event?.id).toBe('event8'); expect(chapter3Event?.id).toBe("event8");
}); });
}); });
describe('Mixed Event Type Deduplication', () => { describe("Mixed Event Type Deduplication", () => {
it('should only deduplicate addressable events (kinds 30000-39999)', () => { it("should only deduplicate addressable events (kinds 30000-39999)", () => {
const result = deduplicateAndCombineEvents( const result = deduplicateAndCombineEvents(
[mockEvents[4]] as NDKEvent[], // Regular text note [mockEvents[4]] as NDKEvent[], // Regular text note
new Set([mockEvents[3]] as NDKEvent[]), // Publication index new Set([mockEvents[3]] as NDKEvent[]), // Publication index
new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]) // Content events new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]), // Content events
); );
// Should have 4 events total: // Should have 4 events total:
// - 1 regular text note (not deduplicated) // - 1 regular text note (not deduplicated)
// - 1 publication index (not deduplicated) // - 1 publication index (not deduplicated)
// - 2 unique content events (deduplicated from 3) // - 2 unique content events (deduplicated from 3)
expect(result.length).toBe(4); expect(result.length).toBe(4);
// Verify the content events were deduplicated // Verify the content events were deduplicated
const contentEvents = result.filter(e => e.kind === 30041); const contentEvents = result.filter((e) => e.kind === 30041);
expect(contentEvents.length).toBe(2); expect(contentEvents.length).toBe(2);
// Verify the newer version was kept // Verify the newer version was kept
const newerEvent = contentEvents.find(e => e.id === 'event2'); const newerEvent = contentEvents.find((e) => e.id === "event2");
expect(newerEvent).toBeDefined(); expect(newerEvent).toBeDefined();
}); });
it('should handle non-addressable events correctly', () => { it("should handle non-addressable events correctly", () => {
const regularEvents = [ const regularEvents = [
new MockNDKEvent('note1', 1, 'pubkey1', 1000, '', 'Note 1'), new MockNDKEvent("note1", 1, "pubkey1", 1000, "", "Note 1"),
new MockNDKEvent('note2', 1, 'pubkey1', 2000, '', 'Note 2'), new MockNDKEvent("note2", 1, "pubkey1", 2000, "", "Note 2"),
new MockNDKEvent('profile1', 0, 'pubkey1', 1500, '', 'Profile 1'), new MockNDKEvent("profile1", 0, "pubkey1", 1500, "", "Profile 1"),
]; ];
const result = deduplicateAndCombineEvents( const result = deduplicateAndCombineEvents(
regularEvents as NDKEvent[], regularEvents as NDKEvent[],
new Set(), new Set(),
new Set() new Set(),
); );
// All regular events should be included (no deduplication) // All regular events should be included (no deduplication)
expect(result.length).toBe(3); expect(result.length).toBe(3);
}); });
}); });
describe('Coordinate System Validation', () => { describe("Coordinate System Validation", () => {
it('should correctly identify event coordinates', () => { it("should correctly identify event coordinates", () => {
const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test-chapter'); const event = new MockNDKEvent(
"test",
30041,
"pubkey123",
1000,
"test-chapter",
);
const coordinate = getEventCoordinate(event as NDKEvent); const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBe('30041:pubkey123:test-chapter'); expect(coordinate).toBe("30041:pubkey123:test-chapter");
}); });
it('should handle d-tags with colons correctly', () => { it("should handle d-tags with colons correctly", () => {
const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'chapter:with:colons'); const event = new MockNDKEvent(
"test",
30041,
"pubkey123",
1000,
"chapter:with:colons",
);
const coordinate = getEventCoordinate(event as NDKEvent); const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBe('30041:pubkey123:chapter:with:colons'); expect(coordinate).toBe("30041:pubkey123:chapter:with:colons");
}); });
it('should return null for non-replaceable events', () => { it("should return null for non-replaceable events", () => {
const event = new MockNDKEvent('test', 1, 'pubkey123', 1000, ''); const event = new MockNDKEvent("test", 1, "pubkey123", 1000, "");
const coordinate = getEventCoordinate(event as NDKEvent); const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBeNull(); expect(coordinate).toBeNull();
}); });
}); });
describe('Replaceable Event Detection', () => { describe("Replaceable Event Detection", () => {
it('should correctly identify replaceable events', () => { it("should correctly identify replaceable events", () => {
const addressableEvent = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test'); const addressableEvent = new MockNDKEvent(
const regularEvent = new MockNDKEvent('test', 1, 'pubkey123', 1000, ''); "test",
30041,
"pubkey123",
1000,
"test",
);
const regularEvent = new MockNDKEvent("test", 1, "pubkey123", 1000, "");
expect(isReplaceableEvent(addressableEvent as NDKEvent)).toBe(true); expect(isReplaceableEvent(addressableEvent as NDKEvent)).toBe(true);
expect(isReplaceableEvent(regularEvent as NDKEvent)).toBe(false); expect(isReplaceableEvent(regularEvent as NDKEvent)).toBe(false);
}); });
it('should handle edge cases of replaceable event ranges', () => { it("should handle edge cases of replaceable event ranges", () => {
const event29999 = new MockNDKEvent('test', 29999, 'pubkey123', 1000, 'test'); const event29999 = new MockNDKEvent(
const event30000 = new MockNDKEvent('test', 30000, 'pubkey123', 1000, 'test'); "test",
const event39999 = new MockNDKEvent('test', 39999, 'pubkey123', 1000, 'test'); 29999,
const event40000 = new MockNDKEvent('test', 40000, 'pubkey123', 1000, 'test'); "pubkey123",
1000,
"test",
);
const event30000 = new MockNDKEvent(
"test",
30000,
"pubkey123",
1000,
"test",
);
const event39999 = new MockNDKEvent(
"test",
39999,
"pubkey123",
1000,
"test",
);
const event40000 = new MockNDKEvent(
"test",
40000,
"pubkey123",
1000,
"test",
);
expect(isReplaceableEvent(event29999 as NDKEvent)).toBe(false); expect(isReplaceableEvent(event29999 as NDKEvent)).toBe(false);
expect(isReplaceableEvent(event30000 as NDKEvent)).toBe(true); expect(isReplaceableEvent(event30000 as NDKEvent)).toBe(true);
expect(isReplaceableEvent(event39999 as NDKEvent)).toBe(true); expect(isReplaceableEvent(event39999 as NDKEvent)).toBe(true);
@ -179,279 +281,429 @@ describe('Relay Deduplication Behavior Tests', () => {
}); });
}); });
describe('Edge Cases', () => { describe("Edge Cases", () => {
it('should handle empty event sets', () => { it("should handle empty event sets", () => {
const result = deduplicateContentEvents([]); const result = deduplicateContentEvents([]);
expect(result.size).toBe(0); expect(result.size).toBe(0);
}); });
it('should handle events with null/undefined values', () => { it("should handle events with null/undefined values", () => {
const invalidEvent = { const invalidEvent = {
id: undefined, id: undefined,
kind: 30041, kind: 30041,
pubkey: 'pubkey1', pubkey: "pubkey1",
created_at: 1000, created_at: 1000,
tagValue: () => undefined, // Return undefined for d-tag tagValue: () => undefined, // Return undefined for d-tag
} as unknown as NDKEvent; } as unknown as NDKEvent;
const eventSets = [new Set([invalidEvent])]; const eventSets = [new Set([invalidEvent])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should handle gracefully without crashing // Should handle gracefully without crashing
expect(result.size).toBe(0); expect(result.size).toBe(0);
}); });
it('should handle events from different authors with same d-tag', () => { it("should handle events from different authors with same d-tag", () => {
const event1 = new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'same-chapter', 'Author 1'); const event1 = new MockNDKEvent(
const event2 = new MockNDKEvent('event2', 30041, 'pubkey2', 1000, 'same-chapter', 'Author 2'); "event1",
30041,
"pubkey1",
1000,
"same-chapter",
"Author 1",
);
const event2 = new MockNDKEvent(
"event2",
30041,
"pubkey2",
1000,
"same-chapter",
"Author 2",
);
const eventSets = [new Set([event1, event2] as NDKEvent[])]; const eventSets = [new Set([event1, event2] as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should have 2 events (different coordinates due to different authors) // Should have 2 events (different coordinates due to different authors)
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.has('30041:pubkey1:same-chapter')).toBe(true); expect(result.has("30041:pubkey1:same-chapter")).toBe(true);
expect(result.has('30041:pubkey2:same-chapter')).toBe(true); expect(result.has("30041:pubkey2:same-chapter")).toBe(true);
}); });
}); });
}); });
describe('Relay Behavior Simulation', () => { describe("Relay Behavior Simulation", () => {
it('should simulate what happens when relays return duplicate events', () => { it("should simulate what happens when relays return duplicate events", () => {
// Simulate a relay that returns multiple versions of the same event // Simulate a relay that returns multiple versions of the same event
const relayEvents = [ const relayEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), new MockNDKEvent(
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), "event1",
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'), 30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-1",
"Middle version",
),
]; ];
// This simulates what a "bad" relay might return // This simulates what a "bad" relay might return
const eventSets = [new Set(relayEvents as NDKEvent[])]; const eventSets = [new Set(relayEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should only keep the newest version // Should only keep the newest version
expect(result.size).toBe(1); expect(result.size).toBe(1);
const keptEvent = result.get('30041:pubkey1:chapter-1'); const keptEvent = result.get("30041:pubkey1:chapter-1");
expect(keptEvent?.id).toBe('event2'); expect(keptEvent?.id).toBe("event2");
expect(keptEvent?.content).toBe('New version'); expect(keptEvent?.content).toBe("New version");
}); });
it('should simulate multiple relays returning different versions', () => { it("should simulate multiple relays returning different versions", () => {
// Simulate multiple relays returning different versions // Simulate multiple relays returning different versions
const relay1Events = [ const relay1Events = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Relay 1 version'), new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Relay 1 version",
),
]; ];
const relay2Events = [ const relay2Events = [
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Relay 2 version'), new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"Relay 2 version",
),
];
const eventSets = [
new Set(relay1Events as NDKEvent[]),
new Set(relay2Events as NDKEvent[]),
]; ];
const eventSets = [new Set(relay1Events as NDKEvent[]), new Set(relay2Events as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Should keep the newest version from any relay // Should keep the newest version from any relay
expect(result.size).toBe(1); expect(result.size).toBe(1);
const keptEvent = result.get('30041:pubkey1:chapter-1'); const keptEvent = result.get("30041:pubkey1:chapter-1");
expect(keptEvent?.id).toBe('event2'); expect(keptEvent?.id).toBe("event2");
expect(keptEvent?.content).toBe('Relay 2 version'); expect(keptEvent?.content).toBe("Relay 2 version");
}); });
}); });
describe('Real Relay Deduplication Tests', () => { describe("Real Relay Deduplication Tests", () => {
// These tests actually query real relays to see if they deduplicate // These tests actually query real relays to see if they deduplicate
// Note: These are integration tests and may be flaky due to network conditions // Note: These are integration tests and may be flaky due to network conditions
it('should detect if relays are returning duplicate replaceable events', async () => { it(
// This test queries real relays to see if they return duplicates "should detect if relays are returning duplicate replaceable events",
// We'll use a known author who has published multiple versions of content async () => {
// This test queries real relays to see if they return duplicates
// Known author with multiple publication content events // We'll use a known author who has published multiple versions of content
const testAuthor = 'npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks';
// Known author with multiple publication content events
// Query for publication content events (kind 30041) from this author const testAuthor =
// We expect relays to return only the most recent version of each d-tag "npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks";
// This is a placeholder - in a real test, we would: // Query for publication content events (kind 30041) from this author
// 1. Query multiple relays for the same author's 30041 events // We expect relays to return only the most recent version of each d-tag
// 2. Check if any relay returns multiple events with the same d-tag
// 3. Verify that if duplicates exist, our deduplication logic handles them // This is a placeholder - in a real test, we would:
// 1. Query multiple relays for the same author's 30041 events
console.log('Note: This test would require actual relay queries to verify deduplication behavior'); // 2. Check if any relay returns multiple events with the same d-tag
console.log('To run this test properly, we would need to:'); // 3. Verify that if duplicates exist, our deduplication logic handles them
console.log('1. Query real relays for replaceable events');
console.log('2. Check if relays return duplicates'); console.log(
console.log('3. Verify our deduplication logic works on real data'); "Note: This test would require actual relay queries to verify deduplication behavior",
);
// For now, we'll just assert that our logic is ready to handle real data console.log("To run this test properly, we would need to:");
expect(true).toBe(true); console.log("1. Query real relays for replaceable events");
}, 30000); // 30 second timeout for network requests console.log("2. Check if relays return duplicates");
console.log("3. Verify our deduplication logic works on real data");
it('should verify that our deduplication logic works on real relay data', async () => {
// This test would: // For now, we'll just assert that our logic is ready to handle real data
// 1. Fetch real events from relays expect(true).toBe(true);
// 2. Apply our deduplication logic },
// 3. Verify that the results are correct 30000,
); // 30 second timeout for network requests
console.log('Note: This test would require actual relay queries');
console.log('To implement this test, we would need to:'); it(
console.log('1. Set up NDK with real relays'); "should verify that our deduplication logic works on real relay data",
console.log('2. Fetch events for a known author with multiple versions'); async () => {
console.log('3. Apply deduplication and verify results'); // This test would:
// 1. Fetch real events from relays
expect(true).toBe(true); // 2. Apply our deduplication logic
}, 30000); // 3. Verify that the results are correct
console.log("Note: This test would require actual relay queries");
console.log("To implement this test, we would need to:");
console.log("1. Set up NDK with real relays");
console.log("2. Fetch events for a known author with multiple versions");
console.log("3. Apply deduplication and verify results");
expect(true).toBe(true);
},
30000,
);
}); });
describe('Practical Relay Behavior Analysis', () => { describe("Practical Relay Behavior Analysis", () => {
it('should document what we know about relay deduplication behavior', () => { it("should document what we know about relay deduplication behavior", () => {
// This test documents our current understanding of relay behavior // This test documents our current understanding of relay behavior
// based on the code analysis and the comment from onedev // based on the code analysis and the comment from onedev
console.log('\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ==='); console.log("\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ===");
console.log('\nBased on the code analysis and the comment from onedev:'); console.log("\nBased on the code analysis and the comment from onedev:");
console.log('\n1. THEORETICAL BEHAVIOR:'); console.log("\n1. THEORETICAL BEHAVIOR:");
console.log(' - Relays SHOULD handle deduplication for replaceable events'); console.log(
console.log(' - Only the most recent version of each coordinate should be stored'); " - Relays SHOULD handle deduplication for replaceable events",
console.log(' - Client-side deduplication should only be needed for cached/local events'); );
console.log(
console.log('\n2. REALITY CHECK:'); " - Only the most recent version of each coordinate should be stored",
console.log(' - Not all relays implement deduplication correctly'); );
console.log(' - Some relays may return multiple versions of the same event'); console.log(
console.log(' - Network conditions and relay availability can cause inconsistencies'); " - Client-side deduplication should only be needed for cached/local events",
);
console.log('\n3. ALEXANDRIA\'S APPROACH:');
console.log(' - Implements client-side deduplication as a safety net'); console.log("\n2. REALITY CHECK:");
console.log(' - Uses coordinate system (kind:pubkey:d-tag) for addressable events'); console.log(" - Not all relays implement deduplication correctly");
console.log(' - Keeps the most recent version based on created_at timestamp'); console.log(
console.log(' - Only applies to replaceable events (kinds 30000-39999)'); " - Some relays may return multiple versions of the same event",
);
console.log('\n4. WHY KEEP THE DEDUPLICATION:'); console.log(
console.log(' - Defensive programming against imperfect relay implementations'); " - Network conditions and relay availability can cause inconsistencies",
console.log(' - Handles multiple relay sources with different data'); );
console.log(' - Works with cached events that might be outdated');
console.log(' - Ensures consistent user experience regardless of relay behavior'); console.log("\n3. ALEXANDRIA'S APPROACH:");
console.log(" - Implements client-side deduplication as a safety net");
console.log('\n5. TESTING STRATEGY:'); console.log(
console.log(' - Unit tests verify our deduplication logic works correctly'); " - Uses coordinate system (kind:pubkey:d-tag) for addressable events",
console.log(' - Integration tests would verify relay behavior (when network allows)'); );
console.log(' - Monitoring can help determine if relays improve over time'); console.log(
" - Keeps the most recent version based on created_at timestamp",
);
console.log(" - Only applies to replaceable events (kinds 30000-39999)");
console.log("\n4. WHY KEEP THE DEDUPLICATION:");
console.log(
" - Defensive programming against imperfect relay implementations",
);
console.log(" - Handles multiple relay sources with different data");
console.log(" - Works with cached events that might be outdated");
console.log(
" - Ensures consistent user experience regardless of relay behavior",
);
console.log("\n5. TESTING STRATEGY:");
console.log(
" - Unit tests verify our deduplication logic works correctly",
);
console.log(
" - Integration tests would verify relay behavior (when network allows)",
);
console.log(
" - Monitoring can help determine if relays improve over time",
);
// This test documents our understanding rather than asserting specific behavior // This test documents our understanding rather than asserting specific behavior
expect(true).toBe(true); expect(true).toBe(true);
}); });
it('should provide recommendations for when to remove deduplication', () => { it("should provide recommendations for when to remove deduplication", () => {
console.log('\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ==='); console.log("\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ===");
console.log('\nThe deduplication logic should be kept until:'); console.log("\nThe deduplication logic should be kept until:");
console.log('\n1. RELAY STANDARDS:'); console.log("\n1. RELAY STANDARDS:");
console.log(' - NIP-33 (replaceable events) is widely implemented by relays'); console.log(
console.log(' - Relays consistently return only the most recent version'); " - NIP-33 (replaceable events) is widely implemented by relays",
console.log(' - No major relay implementations return duplicates'); );
console.log(" - Relays consistently return only the most recent version");
console.log('\n2. TESTING EVIDENCE:'); console.log(" - No major relay implementations return duplicates");
console.log(' - Real-world testing shows relays don\'t return duplicates');
console.log(' - Multiple relay operators confirm deduplication behavior'); console.log("\n2. TESTING EVIDENCE:");
console.log(' - No user reports of duplicate content issues'); console.log(" - Real-world testing shows relays don't return duplicates");
console.log(" - Multiple relay operators confirm deduplication behavior");
console.log('\n3. MONITORING:'); console.log(" - No user reports of duplicate content issues");
console.log(' - Add logging to track when deduplication is actually used');
console.log(' - Monitor relay behavior over time'); console.log("\n3. MONITORING:");
console.log(' - Collect metrics on duplicate events found'); console.log(
" - Add logging to track when deduplication is actually used",
console.log('\n4. GRADUAL REMOVAL:'); );
console.log(' - Make deduplication configurable (on/off)'); console.log(" - Monitor relay behavior over time");
console.log(' - Test with deduplication disabled in controlled environments'); console.log(" - Collect metrics on duplicate events found");
console.log(' - Monitor for issues before removing completely');
console.log("\n4. GRADUAL REMOVAL:");
console.log('\n5. FALLBACK STRATEGY:'); console.log(" - Make deduplication configurable (on/off)");
console.log(' - Keep deduplication as a fallback option'); console.log(
console.log(' - Allow users to enable it if they experience issues'); " - Test with deduplication disabled in controlled environments",
console.log(' - Maintain the code for potential future use'); );
console.log(" - Monitor for issues before removing completely");
console.log("\n5. FALLBACK STRATEGY:");
console.log(" - Keep deduplication as a fallback option");
console.log(" - Allow users to enable it if they experience issues");
console.log(" - Maintain the code for potential future use");
expect(true).toBe(true); expect(true).toBe(true);
}); });
}); });
describe('Logging and Monitoring Tests', () => { describe("Logging and Monitoring Tests", () => {
it('should verify that logging works when duplicates are found', () => { it("should verify that logging works when duplicates are found", () => {
// Mock console.log to capture output // Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events with duplicates // Create events with duplicates
const duplicateEvents = [ const duplicateEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), new MockNDKEvent(
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), "event1",
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'), 30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-1",
"Middle version",
),
]; ];
const eventSets = [new Set(duplicateEvents as NDKEvent[])]; const eventSets = [new Set(duplicateEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Verify the deduplication worked // Verify the deduplication worked
expect(result.size).toBe(1); expect(result.size).toBe(1);
// Verify that logging was called // Verify that logging was called
expect(consoleSpy).toHaveBeenCalledWith( expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] Found 2 duplicate events out of 3 total events') expect.stringContaining(
"[eventDeduplication] Found 2 duplicate events out of 3 total events",
),
); );
expect(consoleSpy).toHaveBeenCalledWith( expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] Reduced to 1 unique coordinates') expect.stringContaining(
"[eventDeduplication] Reduced to 1 unique coordinates",
),
); );
// Restore console.log // Restore console.log
consoleSpy.mockRestore(); consoleSpy.mockRestore();
}); });
it('should verify that logging works when no duplicates are found', () => { it("should verify that logging works when no duplicates are found", () => {
// Mock console.log to capture output // Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events without duplicates // Create events without duplicates
const uniqueEvents = [ const uniqueEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Content 1'), new MockNDKEvent(
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-2', 'Content 2'), "event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Content 1",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-2",
"Content 2",
),
]; ];
const eventSets = [new Set(uniqueEvents as NDKEvent[])]; const eventSets = [new Set(uniqueEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets); const result = deduplicateContentEvents(eventSets);
// Verify no deduplication was needed // Verify no deduplication was needed
expect(result.size).toBe(2); expect(result.size).toBe(2);
// Verify that logging was called with "no duplicates" message // Verify that logging was called with "no duplicates" message
expect(consoleSpy).toHaveBeenCalledWith( expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] No duplicates found in 2 events') expect.stringContaining(
"[eventDeduplication] No duplicates found in 2 events",
),
); );
// Restore console.log // Restore console.log
consoleSpy.mockRestore(); consoleSpy.mockRestore();
}); });
it('should verify that deduplicateAndCombineEvents logging works', () => { it("should verify that deduplicateAndCombineEvents logging works", () => {
// Mock console.log to capture output // Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events with duplicates // Create events with duplicates
const duplicateEvents = [ const duplicateEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'), new MockNDKEvent(
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'), "event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
]; ];
const result = deduplicateAndCombineEvents( const result = deduplicateAndCombineEvents(
[] as NDKEvent[], [] as NDKEvent[],
new Set(), new Set(),
new Set(duplicateEvents as NDKEvent[]) new Set(duplicateEvents as NDKEvent[]),
); );
// Verify the deduplication worked // Verify the deduplication worked
expect(result.length).toBe(1); expect(result.length).toBe(1);
// Verify that logging was called // Verify that logging was called
expect(consoleSpy).toHaveBeenCalledWith( expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates') expect.stringContaining(
"[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates",
),
); );
// Restore console.log // Restore console.log
consoleSpy.mockRestore(); consoleSpy.mockRestore();
}); });
}); });

353
tests/unit/tagExpansion.test.ts

@ -1,11 +1,11 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'; import { beforeEach, describe, expect, it, vi } from "vitest";
import type { NDKEvent } from '@nostr-dev-kit/ndk'; import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { import {
fetchProfilesForNewEvents,
fetchTaggedEventsFromRelays, fetchTaggedEventsFromRelays,
findTaggedEventsInFetched, findTaggedEventsInFetched,
fetchProfilesForNewEvents, type TagExpansionResult,
type TagExpansionResult } from "../../src/lib/utils/tag_event_fetch";
} from '../../src/lib/utils/tag_event_fetch';
// Mock NDKEvent for testing // Mock NDKEvent for testing
class MockNDKEvent { class MockNDKEvent {
@ -16,7 +16,14 @@ class MockNDKEvent {
content: string; content: string;
tags: string[][]; tags: string[][];
constructor(id: string, kind: number, pubkey: string, created_at: number, content: string = '', tags: string[][] = []) { constructor(
id: string,
kind: number,
pubkey: string,
created_at: number,
content: string = "",
tags: string[][] = [],
) {
this.id = id; this.id = id;
this.kind = kind; this.kind = kind;
this.pubkey = pubkey; this.pubkey = pubkey;
@ -26,151 +33,192 @@ class MockNDKEvent {
} }
tagValue(tagName: string): string | undefined { tagValue(tagName: string): string | undefined {
const tag = this.tags.find(t => t[0] === tagName); const tag = this.tags.find((t) => t[0] === tagName);
return tag ? tag[1] : undefined; return tag ? tag[1] : undefined;
} }
getMatchingTags(tagName: string): string[][] { getMatchingTags(tagName: string): string[][] {
return this.tags.filter(tag => tag[0] === tagName); return this.tags.filter((tag) => tag[0] === tagName);
} }
} }
// Mock NDK instance // Mock NDK instance
const mockNDK = { const mockNDK = {
fetchEvents: vi.fn() fetchEvents: vi.fn(),
}; };
// Mock the ndkInstance store // Mock the ndkInstance store
vi.mock('../../src/lib/ndk', () => ({ vi.mock("../../src/lib/ndk", () => ({
ndkInstance: { ndkInstance: {
subscribe: vi.fn((fn) => { subscribe: vi.fn((fn) => {
fn(mockNDK); fn(mockNDK);
return { unsubscribe: vi.fn() }; return { unsubscribe: vi.fn() };
}) }),
} },
})); }));
// Mock the profile cache utilities // Mock the profile cache utilities
vi.mock('../../src/lib/utils/profileCache', () => ({ vi.mock("../../src/lib/utils/profileCache", () => ({
extractPubkeysFromEvents: vi.fn((events: NDKEvent[]) => { extractPubkeysFromEvents: vi.fn((events: NDKEvent[]) => {
const pubkeys = new Set<string>(); const pubkeys = new Set<string>();
events.forEach(event => { events.forEach((event) => {
if (event.pubkey) pubkeys.add(event.pubkey); if (event.pubkey) pubkeys.add(event.pubkey);
}); });
return pubkeys; return pubkeys;
}), }),
batchFetchProfiles: vi.fn(async (pubkeys: string[], onProgress: (fetched: number, total: number) => void) => { batchFetchProfiles: vi.fn(
// Simulate progress updates async (
onProgress(0, pubkeys.length); pubkeys: string[],
onProgress(pubkeys.length, pubkeys.length); onProgress: (fetched: number, total: number) => void,
return []; ) => {
}) // Simulate progress updates
onProgress(0, pubkeys.length);
onProgress(pubkeys.length, pubkeys.length);
return [];
},
),
})); }));
describe('Tag Expansion Tests', () => { describe("Tag Expansion Tests", () => {
let mockPublications: MockNDKEvent[]; let mockPublications: MockNDKEvent[];
let mockContentEvents: MockNDKEvent[]; let mockContentEvents: MockNDKEvent[];
let mockAllEvents: MockNDKEvent[]; let mockAllEvents: MockNDKEvent[];
beforeEach(() => { beforeEach(() => {
vi.clearAllMocks(); vi.clearAllMocks();
// Create test publication index events (kind 30040) // Create test publication index events (kind 30040)
mockPublications = [ mockPublications = [
new MockNDKEvent('pub1', 30040, 'author1', 1000, 'Book 1', [ new MockNDKEvent("pub1", 30040, "author1", 1000, "Book 1", [
['t', 'bitcoin'], ["t", "bitcoin"],
['t', 'cryptocurrency'], ["t", "cryptocurrency"],
['a', '30041:author1:chapter-1'], ["a", "30041:author1:chapter-1"],
['a', '30041:author1:chapter-2'] ["a", "30041:author1:chapter-2"],
]),
new MockNDKEvent("pub2", 30040, "author2", 1100, "Book 2", [
["t", "bitcoin"],
["t", "blockchain"],
["a", "30041:author2:chapter-1"],
]), ]),
new MockNDKEvent('pub2', 30040, 'author2', 1100, 'Book 2', [ new MockNDKEvent("pub3", 30040, "author3", 1200, "Book 3", [
['t', 'bitcoin'], ["t", "ethereum"],
['t', 'blockchain'], ["a", "30041:author3:chapter-1"],
['a', '30041:author2:chapter-1']
]), ]),
new MockNDKEvent('pub3', 30040, 'author3', 1200, 'Book 3', [
['t', 'ethereum'],
['a', '30041:author3:chapter-1']
])
]; ];
// Create test content events (kind 30041) // Create test content events (kind 30041)
mockContentEvents = [ mockContentEvents = [
new MockNDKEvent('content1', 30041, 'author1', 1000, 'Chapter 1 content', [['d', 'chapter-1']]), new MockNDKEvent(
new MockNDKEvent('content2', 30041, 'author1', 1100, 'Chapter 2 content', [['d', 'chapter-2']]), "content1",
new MockNDKEvent('content3', 30041, 'author2', 1200, 'Author 2 Chapter 1', [['d', 'chapter-1']]), 30041,
new MockNDKEvent('content4', 30041, 'author3', 1300, 'Author 3 Chapter 1', [['d', 'chapter-1']]) "author1",
1000,
"Chapter 1 content",
[["d", "chapter-1"]],
),
new MockNDKEvent(
"content2",
30041,
"author1",
1100,
"Chapter 2 content",
[["d", "chapter-2"]],
),
new MockNDKEvent(
"content3",
30041,
"author2",
1200,
"Author 2 Chapter 1",
[["d", "chapter-1"]],
),
new MockNDKEvent(
"content4",
30041,
"author3",
1300,
"Author 3 Chapter 1",
[["d", "chapter-1"]],
),
]; ];
// Combine all events for testing // Combine all events for testing
mockAllEvents = [...mockPublications, ...mockContentEvents]; mockAllEvents = [...mockPublications, ...mockContentEvents];
}); });
describe('fetchTaggedEventsFromRelays', () => { describe("fetchTaggedEventsFromRelays", () => {
it('should fetch publications with matching tags from relays', async () => { it("should fetch publications with matching tags from relays", async () => {
// Mock the NDK fetch to return publications with 'bitcoin' tag // Mock the NDK fetch to return publications with 'bitcoin' tag
const bitcoinPublications = mockPublications.filter(pub => const bitcoinPublications = mockPublications.filter((pub) =>
pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin') pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin")
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(bitcoinPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
); );
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[]));
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[]));
const existingEventIds = new Set<string>(['existing-event']); const existingEventIds = new Set<string>(["existing-event"]);
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = await fetchTaggedEventsFromRelays( const result = await fetchTaggedEventsFromRelays(
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should fetch publications with bitcoin tag // Should fetch publications with bitcoin tag
expect(mockNDK.fetchEvents).toHaveBeenCalledWith({ expect(mockNDK.fetchEvents).toHaveBeenCalledWith({
kinds: [30040], kinds: [30040],
"#t": ['bitcoin'], "#t": ["bitcoin"],
limit: 30 limit: 30,
}); });
// Should return the matching publications // Should return the matching publications
expect(result.publications).toHaveLength(2); expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).toContain('pub1'); expect(result.publications.map((p) => p.id)).toContain("pub1");
expect(result.publications.map(p => p.id)).toContain('pub2'); expect(result.publications.map((p) => p.id)).toContain("pub2");
// Should fetch content events for the publications // Should fetch content events for the publications
expect(mockNDK.fetchEvents).toHaveBeenCalledWith({ expect(mockNDK.fetchEvents).toHaveBeenCalledWith({
kinds: [30041, 30818], kinds: [30041, 30818],
"#d": ['chapter-1', 'chapter-2'] "#d": ["chapter-1", "chapter-2"],
}); });
}); });
it('should filter out existing events to avoid duplicates', async () => { it("should filter out existing events to avoid duplicates", async () => {
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockPublications as NDKEvent[])); mockNDK.fetchEvents.mockResolvedValueOnce(
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[])); new Set(mockPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
);
const existingEventIds = new Set<string>(['pub1']); // pub1 already exists const existingEventIds = new Set<string>(["pub1"]); // pub1 already exists
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = await fetchTaggedEventsFromRelays( const result = await fetchTaggedEventsFromRelays(
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should exclude pub1 since it already exists // Should exclude pub1 since it already exists
expect(result.publications).toHaveLength(2); expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).not.toContain('pub1'); expect(result.publications.map((p) => p.id)).not.toContain("pub1");
expect(result.publications.map(p => p.id)).toContain('pub2'); expect(result.publications.map((p) => p.id)).toContain("pub2");
expect(result.publications.map(p => p.id)).toContain('pub3'); expect(result.publications.map((p) => p.id)).toContain("pub3");
}); });
it('should handle empty tag array gracefully', async () => { it("should handle empty tag array gracefully", async () => {
// Mock empty result for empty tags // Mock empty result for empty tags
mockNDK.fetchEvents.mockResolvedValueOnce(new Set()); mockNDK.fetchEvents.mockResolvedValueOnce(new Set());
const existingEventIds = new Set<string>(); const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
@ -179,7 +227,7 @@ describe('Tag Expansion Tests', () => {
[], [],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
expect(result.publications).toHaveLength(0); expect(result.publications).toHaveLength(0);
@ -187,95 +235,101 @@ describe('Tag Expansion Tests', () => {
}); });
}); });
describe('findTaggedEventsInFetched', () => { describe("findTaggedEventsInFetched", () => {
it('should find publications with matching tags in already fetched events', () => { it("should find publications with matching tags in already fetched events", () => {
const existingEventIds = new Set<string>(['existing-event']); const existingEventIds = new Set<string>(["existing-event"]);
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[], mockAllEvents as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should find publications with bitcoin tag // Should find publications with bitcoin tag
expect(result.publications).toHaveLength(2); expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).toContain('pub1'); expect(result.publications.map((p) => p.id)).toContain("pub1");
expect(result.publications.map(p => p.id)).toContain('pub2'); expect(result.publications.map((p) => p.id)).toContain("pub2");
// Should find content events for those publications // Should find content events for those publications
expect(result.contentEvents).toHaveLength(4); expect(result.contentEvents).toHaveLength(4);
expect(result.contentEvents.map(c => c.id)).toContain('content1'); expect(result.contentEvents.map((c) => c.id)).toContain("content1");
expect(result.contentEvents.map(c => c.id)).toContain('content2'); expect(result.contentEvents.map((c) => c.id)).toContain("content2");
expect(result.contentEvents.map(c => c.id)).toContain('content3'); expect(result.contentEvents.map((c) => c.id)).toContain("content3");
expect(result.contentEvents.map(c => c.id)).toContain('content4'); expect(result.contentEvents.map((c) => c.id)).toContain("content4");
}); });
it('should exclude base events from search results', () => { it("should exclude base events from search results", () => {
const existingEventIds = new Set<string>(['pub1']); // pub1 is a base event const existingEventIds = new Set<string>(["pub1"]); // pub1 is a base event
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[], mockAllEvents as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should exclude pub1 since it's a base event // Should exclude pub1 since it's a base event
expect(result.publications).toHaveLength(1); expect(result.publications).toHaveLength(1);
expect(result.publications.map(p => p.id)).not.toContain('pub1'); expect(result.publications.map((p) => p.id)).not.toContain("pub1");
expect(result.publications.map(p => p.id)).toContain('pub2'); expect(result.publications.map((p) => p.id)).toContain("pub2");
}); });
it('should handle multiple tags (OR logic)', () => { it("should handle multiple tags (OR logic)", () => {
const existingEventIds = new Set<string>(); const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[], mockAllEvents as NDKEvent[],
['bitcoin', 'ethereum'], ["bitcoin", "ethereum"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should find publications with either bitcoin OR ethereum tags // Should find publications with either bitcoin OR ethereum tags
expect(result.publications).toHaveLength(3); expect(result.publications).toHaveLength(3);
expect(result.publications.map(p => p.id)).toContain('pub1'); // bitcoin expect(result.publications.map((p) => p.id)).toContain("pub1"); // bitcoin
expect(result.publications.map(p => p.id)).toContain('pub2'); // bitcoin expect(result.publications.map((p) => p.id)).toContain("pub2"); // bitcoin
expect(result.publications.map(p => p.id)).toContain('pub3'); // ethereum expect(result.publications.map((p) => p.id)).toContain("pub3"); // ethereum
}); });
it('should handle events without tags gracefully', () => { it("should handle events without tags gracefully", () => {
const eventWithoutTags = new MockNDKEvent('no-tags', 30040, 'author4', 1000, 'No tags'); const eventWithoutTags = new MockNDKEvent(
"no-tags",
30040,
"author4",
1000,
"No tags",
);
const allEventsWithNoTags = [...mockAllEvents, eventWithoutTags]; const allEventsWithNoTags = [...mockAllEvents, eventWithoutTags];
const existingEventIds = new Set<string>(); const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
allEventsWithNoTags as NDKEvent[], allEventsWithNoTags as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should not include events without tags // Should not include events without tags
expect(result.publications.map(p => p.id)).not.toContain('no-tags'); expect(result.publications.map((p) => p.id)).not.toContain("no-tags");
}); });
}); });
describe('fetchProfilesForNewEvents', () => { describe("fetchProfilesForNewEvents", () => {
it('should extract pubkeys and fetch profiles for new events', async () => { it("should extract pubkeys and fetch profiles for new events", async () => {
const onProgressUpdate = vi.fn(); const onProgressUpdate = vi.fn();
const debug = vi.fn(); const debug = vi.fn();
@ -283,7 +337,7 @@ describe('Tag Expansion Tests', () => {
mockPublications as NDKEvent[], mockPublications as NDKEvent[],
mockContentEvents as NDKEvent[], mockContentEvents as NDKEvent[],
onProgressUpdate, onProgressUpdate,
debug debug,
); );
// Should call progress update with initial state // Should call progress update with initial state
@ -296,7 +350,7 @@ describe('Tag Expansion Tests', () => {
expect(onProgressUpdate).toHaveBeenCalledWith(null); expect(onProgressUpdate).toHaveBeenCalledWith(null);
}); });
it('should handle empty event arrays gracefully', async () => { it("should handle empty event arrays gracefully", async () => {
const onProgressUpdate = vi.fn(); const onProgressUpdate = vi.fn();
const debug = vi.fn(); const debug = vi.fn();
@ -304,7 +358,7 @@ describe('Tag Expansion Tests', () => {
[], [],
[], [],
onProgressUpdate, onProgressUpdate,
debug debug,
); );
// Should not call progress update for empty arrays // Should not call progress update for empty arrays
@ -312,27 +366,31 @@ describe('Tag Expansion Tests', () => {
}); });
}); });
describe('Tag Expansion Integration', () => { describe("Tag Expansion Integration", () => {
it('should demonstrate the complete tag expansion flow', async () => { it("should demonstrate the complete tag expansion flow", async () => {
// This test simulates the complete flow from the visualize page // This test simulates the complete flow from the visualize page
// Step 1: Mock relay fetch for 'bitcoin' tag // Step 1: Mock relay fetch for 'bitcoin' tag
const bitcoinPublications = mockPublications.filter(pub => const bitcoinPublications = mockPublications.filter((pub) =>
pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin') pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin")
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(bitcoinPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
); );
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[]));
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[]));
const existingEventIds = new Set<string>(['base-event']); const existingEventIds = new Set<string>(["base-event"]);
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
// Step 2: Fetch from relays // Step 2: Fetch from relays
const relayResult = await fetchTaggedEventsFromRelays( const relayResult = await fetchTaggedEventsFromRelays(
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
expect(relayResult.publications).toHaveLength(2); expect(relayResult.publications).toHaveLength(2);
@ -341,10 +399,10 @@ describe('Tag Expansion Tests', () => {
// Step 3: Search in fetched events // Step 3: Search in fetched events
const searchResult = findTaggedEventsInFetched( const searchResult = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[], mockAllEvents as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
expect(searchResult.publications).toHaveLength(2); expect(searchResult.publications).toHaveLength(2);
@ -356,20 +414,27 @@ describe('Tag Expansion Tests', () => {
relayResult.publications, relayResult.publications,
relayResult.contentEvents, relayResult.contentEvents,
onProgressUpdate, onProgressUpdate,
debug debug,
); );
expect(onProgressUpdate).toHaveBeenCalledWith(null); expect(onProgressUpdate).toHaveBeenCalledWith(null);
}); });
}); });
describe('Edge Cases and Error Handling', () => { describe("Edge Cases and Error Handling", () => {
it('should handle malformed a-tags gracefully', () => { it("should handle malformed a-tags gracefully", () => {
const malformedPublication = new MockNDKEvent('malformed', 30040, 'author1', 1000, 'Malformed', [ const malformedPublication = new MockNDKEvent(
['t', 'bitcoin'], "malformed",
['a', 'invalid-tag-format'], // Missing parts 30040,
['a', '30041:author1:chapter-1'] // Valid format "author1",
]); 1000,
"Malformed",
[
["t", "bitcoin"],
["a", "invalid-tag-format"], // Missing parts
["a", "30041:author1:chapter-1"], // Valid format
],
);
const allEventsWithMalformed = [...mockAllEvents, malformedPublication]; const allEventsWithMalformed = [...mockAllEvents, malformedPublication];
const existingEventIds = new Set<string>(); const existingEventIds = new Set<string>();
@ -378,10 +443,10 @@ describe('Tag Expansion Tests', () => {
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
allEventsWithMalformed as NDKEvent[], allEventsWithMalformed as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should still work and include the publication with valid a-tags // Should still work and include the publication with valid a-tags
@ -389,32 +454,50 @@ describe('Tag Expansion Tests', () => {
expect(result.contentEvents.length).toBeGreaterThan(0); expect(result.contentEvents.length).toBeGreaterThan(0);
}); });
it('should handle events with d-tags containing colons', () => { it("should handle events with d-tags containing colons", () => {
const publicationWithColonDTag = new MockNDKEvent('colon-pub', 30040, 'author1', 1000, 'Colon d-tag', [ const publicationWithColonDTag = new MockNDKEvent(
['t', 'bitcoin'], "colon-pub",
['a', '30041:author1:chapter:with:colons'] 30040,
]); "author1",
1000,
"Colon d-tag",
[
["t", "bitcoin"],
["a", "30041:author1:chapter:with:colons"],
],
);
const contentWithColonDTag = new MockNDKEvent('colon-content', 30041, 'author1', 1100, 'Content with colon d-tag', [ const contentWithColonDTag = new MockNDKEvent(
['d', 'chapter:with:colons'] "colon-content",
]); 30041,
"author1",
1100,
"Content with colon d-tag",
[
["d", "chapter:with:colons"],
],
);
const allEventsWithColons = [...mockAllEvents, publicationWithColonDTag, contentWithColonDTag]; const allEventsWithColons = [
...mockAllEvents,
publicationWithColonDTag,
contentWithColonDTag,
];
const existingEventIds = new Set<string>(); const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = []; const baseEvents: NDKEvent[] = [];
const debug = vi.fn(); const debug = vi.fn();
const result = findTaggedEventsInFetched( const result = findTaggedEventsInFetched(
allEventsWithColons as NDKEvent[], allEventsWithColons as NDKEvent[],
['bitcoin'], ["bitcoin"],
existingEventIds, existingEventIds,
baseEvents, baseEvents,
debug debug,
); );
// Should handle d-tags with colons correctly // Should handle d-tags with colons correctly
expect(result.publications).toHaveLength(3); expect(result.publications).toHaveLength(3);
expect(result.contentEvents.map(c => c.id)).toContain('colon-content'); expect(result.contentEvents.map((c) => c.id)).toContain("colon-content");
}); });
}); });
}); });

8
vite.config.ts

@ -43,18 +43,20 @@ export default defineConfig({
// Expose the app version as a global variable // Expose the app version as a global variable
"import.meta.env.APP_VERSION": JSON.stringify(getAppVersionString()), "import.meta.env.APP_VERSION": JSON.stringify(getAppVersionString()),
// Enable debug logging for relays when needed // Enable debug logging for relays when needed
"process.env.DEBUG_RELAYS": JSON.stringify(process.env.DEBUG_RELAYS || "false"), "process.env.DEBUG_RELAYS": JSON.stringify(
process.env.DEBUG_RELAYS || "false",
),
}, },
optimizeDeps: { optimizeDeps: {
esbuildOptions: { esbuildOptions: {
define: { define: {
global: 'globalThis', global: "globalThis",
}, },
}, },
}, },
server: { server: {
fs: { fs: {
allow: ['..'], allow: [".."],
}, },
hmr: { hmr: {
overlay: false, // Disable HMR overlay to prevent ESM URL scheme errors overlay: false, // Disable HMR overlay to prevent ESM URL scheme errors

Loading…
Cancel
Save