Browse Source

Refactor markup generator for embedded events into Svelte snippets

- Eliminate a component that is no longer needed.
- Reduce duplicate code.
- Tidy up code along the way.
- Ran `deno fmt` to auto-format code (hence the large diff).
master
buttercat1791 7 months ago
parent
commit
d8c64260b3
  1. 40
      README.md
  2. 2617
      deno.lock
  3. 7
      playwright.config.ts
  4. 83
      src/app.css
  5. 4
      src/app.d.ts
  6. 10
      src/app.html
  7. 29
      src/lib/components/CommentViewer.svelte
  8. 33
      src/lib/components/EmbeddedEvent.svelte
  9. 194
      src/lib/components/EventDetails.svelte
  10. 69
      src/lib/components/Notifications.svelte
  11. 1
      src/lib/components/publications/PublicationSection.svelte
  12. 10
      src/lib/components/publications/table_of_contents.svelte.ts
  13. 321
      src/lib/components/util/Notifications.svelte
  14. 7
      src/lib/consts.ts
  15. 85
      src/lib/data_structures/docs/relay_selector_design.md
  16. 310
      src/lib/data_structures/publication_tree.ts
  17. 65
      src/lib/data_structures/websocket_pool.ts
  18. 2
      src/lib/navigator/EventNetwork/types.ts
  19. 2
      src/lib/navigator/EventNetwork/utils/common.ts
  20. 325
      src/lib/navigator/EventNetwork/utils/forceSimulation.ts
  21. 439
      src/lib/navigator/EventNetwork/utils/networkBuilder.ts
  22. 100
      src/lib/navigator/EventNetwork/utils/personNetworkBuilder.ts
  23. 83
      src/lib/navigator/EventNetwork/utils/starForceSimulation.ts
  24. 186
      src/lib/navigator/EventNetwork/utils/starNetworkBuilder.ts
  25. 29
      src/lib/navigator/EventNetwork/utils/tagNetworkBuilder.ts
  26. 298
      src/lib/ndk.ts
  27. 8
      src/lib/parser.ts
  28. 28
      src/lib/services/event_search_service.ts
  29. 57
      src/lib/services/publisher.ts
  30. 20
      src/lib/services/search_state_manager.ts
  31. 2
      src/lib/state.ts
  32. 2
      src/lib/stores/authStore.Svelte.ts
  33. 26
      src/lib/stores/networkStore.ts
  34. 119
      src/lib/stores/userStore.ts
  35. 43
      src/lib/stores/visualizationConfig.ts
  36. 24
      src/lib/utils.ts
  37. 10
      src/lib/utils/ZettelParser.ts
  38. 273
      src/lib/utils/asciidoc_metadata.ts
  39. 4
      src/lib/utils/community_checker.ts
  40. 59
      src/lib/utils/displayLimits.ts
  41. 101
      src/lib/utils/eventColors.ts
  42. 152
      src/lib/utils/eventDeduplication.ts
  43. 72
      src/lib/utils/event_input_utils.ts
  44. 71
      src/lib/utils/event_kind_utils.ts
  45. 78
      src/lib/utils/event_search.ts
  46. 12
      src/lib/utils/image_utils.ts
  47. 76
      src/lib/utils/kind24_utils.ts
  48. 58
      src/lib/utils/markup/MarkupInfo.md
  49. 3
      src/lib/utils/markup/advancedAsciidoctorPostProcessor.ts
  50. 44
      src/lib/utils/markup/advancedMarkupParser.ts
  51. 24
      src/lib/utils/markup/asciidoctorPostProcessor.ts
  52. 51
      src/lib/utils/markup/basicMarkupParser.ts
  53. 56
      src/lib/utils/markup/embeddedMarkupParser.ts
  54. 147
      src/lib/utils/markup/markupServices.ts
  55. 4
      src/lib/utils/markup/tikzRenderer.ts
  56. 2
      src/lib/utils/mime.ts
  57. 106
      src/lib/utils/network_detection.ts
  58. 41
      src/lib/utils/nostrEventService.ts
  59. 124
      src/lib/utils/nostrUtils.ts
  60. 40
      src/lib/utils/nostr_identifiers.ts
  61. 306
      src/lib/utils/notification_utils.ts
  62. 22
      src/lib/utils/npubCache.ts
  63. 69
      src/lib/utils/profileCache.ts
  64. 32
      src/lib/utils/profile_search.ts
  65. 7
      src/lib/utils/relayDiagnostics.ts
  66. 84
      src/lib/utils/relay_info_service.ts
  67. 380
      src/lib/utils/relay_management.ts
  68. 16
      src/lib/utils/search_result_formatter.ts
  69. 14
      src/lib/utils/search_utility.ts
  70. 272
      src/lib/utils/subscription_search.ts
  71. 117
      src/lib/utils/tag_event_fetch.ts
  72. 91
      src/lib/utils/websocket_utils.ts
  73. 141
      src/routes/+layout.ts
  74. 69
      src/routes/events/+page.svelte
  75. 5
      src/routes/proxy+layout.ts
  76. 4
      src/routes/publication/+page.server.ts
  77. 15
      src/routes/publication/[type]/[identifier]/+layout.server.ts
  78. 52
      src/routes/publication/[type]/[identifier]/+page.ts
  79. 10
      src/routes/visualize/+page.ts
  80. 8
      src/styles/notifications.css
  81. 20
      src/styles/publications.css
  82. 6
      src/styles/scrollbar.css
  83. 28
      src/styles/visualize.css
  84. 85
      test_data/LaTeXtestfile.md
  85. 26
      tests/e2e/my_notes_layout.pw.spec.ts
  86. 275
      tests/unit/ZettelEditor.test.ts
  87. 337
      tests/unit/eventInput30040.test.ts
  88. 2
      tests/unit/latexRendering.test.ts
  89. 124
      tests/unit/metadataExtraction.test.ts
  90. 132
      tests/unit/nostr_identifiers.test.ts
  91. 742
      tests/unit/relayDeduplication.test.ts
  92. 353
      tests/unit/tagExpansion.test.ts
  93. 8
      vite.config.ts

40
README.md

@ -3,19 +3,31 @@ @@ -3,19 +3,31 @@
# Alexandria
Alexandria is a reader and writer for curated publications, including e-books.
For a thorough introduction, please refer to our [project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1), viewable on Alexandria, or to the Alexandria [About page](https://next-alexandria.gitcitadel.eu/about).
For a thorough introduction, please refer to our
[project documention](https://next-alexandria.gitcitadel.eu/publication?d=gitcitadel-project-documentation-by-stella-v-1),
viewable on Alexandria, or to the Alexandria
[About page](https://next-alexandria.gitcitadel.eu/about).
It also contains a [universal event viewer](https://next-alexandria.gitcitadel.eu/events), with which you can search our relays, some aggregator relays, and your own relay list, to find and view event data.
It also contains a
[universal event viewer](https://next-alexandria.gitcitadel.eu/events), with
which you can search our relays, some aggregator relays, and your own relay
list, to find and view event data.
## Issues and Patches
If you would like to suggest a feature or report a bug, please use the [Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact).
If you would like to suggest a feature or report a bug, please use the
[Alexandria Contact page](https://next-alexandria.gitcitadel.eu/contact).
You can also contact us [on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg), directly.
You can also contact us
[on Nostr](https://next-alexandria.gitcitadel.eu/events?id=nprofile1qqsggm4l0xs23qfjwnkfwf6fqcs66s3lz637gaxhl4nwd2vtle8rnfqprfmhxue69uhhg6r9vehhyetnwshxummnw3erztnrdaks5zhueg),
directly.
## Developing
Make sure that you have [Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or [Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2) installed.
Make sure that you have
[Node.js](https://nodejs.org/en/download/package-manager) (v22 or above) or
[Deno](https://docs.deno.com/runtime/getting_started/installation/) (v2)
installed.
Once you've cloned this repo, install dependencies with NPM:
@ -43,7 +55,8 @@ deno task dev @@ -43,7 +55,8 @@ deno task dev
## Building
Alexandria is configured to run on a Node server. The [Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well.
Alexandria is configured to run on a Node server. The
[Node adapter](https://svelte.dev/docs/kit/adapter-node) works on Deno as well.
To build a production version of your app with Node, use:
@ -71,7 +84,8 @@ deno task preview @@ -71,7 +84,8 @@ deno task preview
## Docker + Deno
This application is configured to use the Deno runtime. A Docker container is provided to handle builds and deployments.
This application is configured to use the Deno runtime. A Docker container is
provided to handle builds and deployments.
To build the app for local development:
@ -87,9 +101,11 @@ docker run -d -p 3000:3000 local-alexandria @@ -87,9 +101,11 @@ docker run -d -p 3000:3000 local-alexandria
## Testing
_These tests are under development, but will run. They will later be added to the container._
_These tests are under development, but will run. They will later be added to
the container._
To run the Vitest suite we've built, install the program locally and run the tests.
To run the Vitest suite we've built, install the program locally and run the
tests.
```bash
npm run test
@ -103,4 +119,8 @@ npx playwright test @@ -103,4 +119,8 @@ npx playwright test
## Markup Support
Alexandria supports both Markdown and AsciiDoc markup for different content types. For a detailed list of supported tags and features in the basic and advanced markdown parsers, as well as information about AsciiDoc usage for publications and wikis, see [MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md).
Alexandria supports both Markdown and AsciiDoc markup for different content
types. For a detailed list of supported tags and features in the basic and
advanced markdown parsers, as well as information about AsciiDoc usage for
publications and wikis, see
[MarkupInfo.md](./src/lib/utils/markup/MarkupInfo.md).

2617
deno.lock

File diff suppressed because it is too large Load Diff

7
playwright.config.ts

@ -27,7 +27,7 @@ export default defineConfig({ @@ -27,7 +27,7 @@ export default defineConfig({
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: 'http://localhost:5173',
baseURL: "http://localhost:5173",
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry",
@ -49,7 +49,6 @@ export default defineConfig({ @@ -49,7 +49,6 @@ export default defineConfig({
name: "webkit",
use: { ...devices["Desktop Safari"] },
},
/* Test against mobile viewports. */
// {
// name: 'Mobile Chrome',
@ -73,8 +72,8 @@ export default defineConfig({ @@ -73,8 +72,8 @@ export default defineConfig({
/* Run your local dev server before starting the tests */
webServer: {
command: 'npm run dev',
url: 'http://localhost:5173',
command: "npm run dev",
url: "http://localhost:5173",
reuseExistingServer: !process.env.CI,
},

83
src/app.css

@ -28,7 +28,9 @@ @@ -28,7 +28,9 @@
}
div[role="tooltip"] button.btn-leather {
@apply hover:text-primary-600 dark:hover:text-primary-400 hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200 dark:hover:bg-gray-700;
@apply hover:text-primary-600 dark:hover:text-primary-400
hover:border-primary-600 dark:hover:border-primary-400 hover:bg-gray-200
dark:hover:bg-gray-700;
}
.image-border {
@ -36,8 +38,10 @@ @@ -36,8 +38,10 @@
}
div.card-leather {
@apply shadow-none text-primary-1000 border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
@apply shadow-none text-primary-1000 border-s-4 bg-highlight
border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
}
div.card-leather h1,
@ -46,11 +50,13 @@ @@ -46,11 +50,13 @@
div.card-leather h4,
div.card-leather h5,
div.card-leather h6 {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400;
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
}
div.card-leather .font-thin {
@apply text-gray-900 hover:text-primary-700 dark:text-gray-100 dark:hover:text-primary-300;
@apply text-gray-900 hover:text-primary-700 dark:text-gray-100
dark:hover:text-primary-300;
}
main {
@ -74,7 +80,8 @@ @@ -74,7 +80,8 @@
div.note-leather,
p.note-leather,
section.note-leather {
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 p-2 rounded;
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
p-2 rounded;
}
.edit div.note-leather:hover:not(:has(.note-leather:hover)),
@ -117,7 +124,8 @@ @@ -117,7 +124,8 @@
}
div.modal-leather > div {
@apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100 dark:border-primary-600;
@apply bg-primary-0 dark:bg-primary-950 border-b-[1px] border-primary-100
dark:border-primary-600;
}
div.modal-leather > div > h1,
@ -126,11 +134,14 @@ @@ -126,11 +134,14 @@
div.modal-leather > div > h4,
div.modal-leather > div > h5,
div.modal-leather > div > h6 {
@apply text-gray-900 hover:text-gray-900 dark:text-gray-100 dark:hover:text-gray-100;
@apply text-gray-900 hover:text-gray-900 dark:text-gray-100
dark:hover:text-gray-100;
}
div.modal-leather button {
@apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950 dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400;
@apply bg-primary-0 hover:bg-primary-0 dark:bg-primary-950
dark:hover:bg-primary-950 text-gray-900 hover:text-primary-600
dark:text-gray-100 dark:hover:text-primary-400;
}
/* Navbar */
@ -143,7 +154,8 @@ @@ -143,7 +154,8 @@
}
nav.navbar-leather svg {
@apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100 dark:hover:fill-primary-400;
@apply fill-gray-900 hover:fill-primary-600 dark:fill-gray-100
dark:hover:fill-primary-400;
}
nav.navbar-leather h1,
@ -152,7 +164,8 @@ @@ -152,7 +164,8 @@
nav.navbar-leather h4,
nav.navbar-leather h5,
nav.navbar-leather h6 {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400;
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
}
div.skeleton-leather div {
@ -201,16 +214,16 @@ @@ -201,16 +214,16 @@
.network-node-content {
@apply fill-primary-100;
}
/* Person link colors */
.person-link-signed {
@apply stroke-green-500;
}
.person-link-referenced {
@apply stroke-blue-400;
}
/* Person anchor node */
.person-anchor-node {
@apply fill-green-400 stroke-green-600;
@ -272,11 +285,13 @@ @@ -272,11 +285,13 @@
/* Lists */
.ol-leather li a,
.ul-leather li a {
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100 dark:hover:text-primary-400;
@apply text-gray-900 hover:text-primary-600 dark:text-gray-100
dark:hover:text-primary-400;
}
.link {
@apply underline cursor-pointer hover:text-primary-600 dark:hover:text-primary-400;
@apply underline cursor-pointer hover:text-primary-600
dark:hover:text-primary-400;
}
/* Card with transition */
@ -290,11 +305,14 @@ @@ -290,11 +305,14 @@
}
.tags span {
@apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5 rounded-sm dark:bg-primary-900 dark:text-primary-200;
@apply bg-primary-50 text-primary-800 text-sm font-medium me-2 px-2.5 py-0.5
rounded-sm dark:bg-primary-900 dark:text-primary-200;
}
.npub-badge {
@apply inline-flex space-x-1 items-center text-primary-600 dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border border-primary-600 dark:border-primary-500;
@apply inline-flex space-x-1 items-center text-primary-600
dark:text-primary-500 hover:underline me-2 px-2 py-0.5 rounded-sm border
border-primary-600 dark:border-primary-500;
svg {
@apply fill-primary-600 dark:fill-primary-500;
@ -305,14 +323,19 @@ @@ -305,14 +323,19 @@
@layer components {
/* Legend */
.leather-legend {
@apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2 rounded;
@apply shadow-none text-primary-1000 border border-s-4 bg-highlight border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
@apply relative m-4 sm:m-0 sm:absolute sm:top-1 sm:left-1 flex-shrink-0 p-2
rounded;
@apply shadow-none text-primary-1000 border border-s-4 bg-highlight
border-primary-200 has-[:hover]:border-primary-700;
@apply dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500;
}
/* Tooltip */
.tooltip-leather {
@apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border border-gray-200 dark:border-gray-700 transition-colors duration-200;
@apply fixed p-4 rounded shadow-lg bg-primary-0 dark:bg-primary-1000
text-gray-900 dark:text-gray-100 border border-gray-200
dark:border-gray-700 transition-colors duration-200;
max-width: 400px;
z-index: 1000;
}
@ -536,13 +559,15 @@ @@ -536,13 +559,15 @@
input[type="tel"],
input[type="url"],
textarea {
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100 border-s-4 border-primary-200 rounded shadow-none px-4 py-2;
@apply bg-primary-0 dark:bg-primary-1000 text-gray-900 dark:text-gray-100
border-s-4 border-primary-200 rounded shadow-none px-4 py-2;
@apply focus:border-primary-600 dark:focus:border-primary-400;
}
/* Table of Contents highlighting */
.toc-highlight {
@apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600 dark:border-primary-400 font-medium;
@apply bg-primary-200 dark:bg-primary-700 border-l-4 border-primary-600
dark:border-primary-400 font-medium;
transition: all 0.2s ease-in-out;
}
@ -551,14 +576,8 @@ @@ -551,14 +576,8 @@
}
/* Override prose first-line bold styling */
.prose p:first-line {
font-weight: normal !important;
}
.prose-sm p:first-line {
font-weight: normal !important;
}
.prose p:first-line,
.prose-sm p:first-line,
.prose-invert p:first-line {
font-weight: normal !important;
}

4
src/app.d.ts vendored

@ -23,7 +23,9 @@ declare global { @@ -23,7 +23,9 @@ declare global {
var MathJax: any;
var nostr: NDKNip07Signer & {
getRelays: () => Promise<Record<string, Record<string, boolean | undefined>>>;
getRelays: () => Promise<
Record<string, Record<string, boolean | undefined>>
>;
// deno-lint-ignore no-explicit-any
signEvent: (event: any) => Promise<any>;
};

10
src/app.html

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
<!doctype html>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
@ -26,14 +26,18 @@ @@ -26,14 +26,18 @@
},
};
</script>
<script src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
<script
src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"
></script>
<!-- highlight.js for code highlighting -->
<link
rel="stylesheet"
href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/github-dark.min.css"
/>
<script src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"></script>
<script
src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js"
></script>
%sveltekit.head%
</head>

29
src/lib/components/CommentViewer.svelte

@ -6,9 +6,7 @@ @@ -6,9 +6,7 @@
import { goto } from "$app/navigation";
import { onMount } from "svelte";
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser";
import { parseRepostContent, parseContent as parseNotificationContent } from "$lib/utils/notification_utils";
import EmbeddedEvent from "./EmbeddedEvent.svelte";
const { event } = $props<{ event: NDKEvent }>();
@ -654,19 +652,6 @@ @@ -654,19 +652,6 @@
return `${actualLevel * 16}px`;
}
async function parseContent(content: string, eventKind?: number): Promise<string> {
if (!content) return "";
// Use parseRepostContent for kind 6 and 16 events (reposts)
if (eventKind === 6 || eventKind === 16) {
return await parseRepostContent(content);
} else {
return await parseNotificationContent(content);
}
}
// AI-NOTE: 2025-01-24 - Get highlight source information
function getHighlightSource(highlightEvent: NDKEvent): { type: string; value: string; url?: string } | null {
// Check for e-tags (nostr events)
@ -785,11 +770,7 @@ @@ -785,11 +770,7 @@
<div class="text-sm text-gray-600 dark:text-gray-400 mb-2">
<span class="font-medium">Comment:</span>
</div>
{#await parseContent(node.event.getMatchingTags("comment")[0]?.[1] || "") then parsedContent}
{@html parsedContent}
{:catch}
{@html node.event.getMatchingTags("comment")[0]?.[1] || ""}
{/await}
<EmbeddedEvent nostrIdentifier={node.event.getMatchingTags("comment")[0]?.[1]} nestingLevel={0} />
</div>
{:else}
<!-- Simple highlight -->
@ -829,11 +810,7 @@ @@ -829,11 +810,7 @@
</div>
{:else}
<!-- Regular comment content -->
{#await parseContent(node.event.content || "", node.event.kind) then parsedContent}
{@html parsedContent}
{:catch}
{@html node.event.content || ""}
{/await}
<EmbeddedEvent nostrIdentifier={node.event.id} nestingLevel={0} />
{/if}
</div>
</div>

33
src/lib/components/EmbeddedEvent.svelte

@ -4,16 +4,14 @@ @@ -4,16 +4,14 @@
import { fetchEventWithFallback } from "$lib/utils/nostrUtils";
import { getUserMetadata, toNpub } from "$lib/utils/nostrUtils";
import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser";
import { parseRepostContent } from "$lib/utils/notification_utils";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
import { neventEncode, naddrEncode } from "$lib/utils";
import { parsedContent } from "$lib/components/util/Notifications.svelte";
import { naddrEncode } from "$lib/utils";
import { activeInboxRelays, ndkInstance } from "$lib/ndk";
import { goto } from "$app/navigation";
import { getEventType } from "$lib/utils/mime";
import { nip19 } from "nostr-tools";
import { get } from "svelte/store";
import { repostKinds } from "$lib/consts";
const {
nostrIdentifier,
@ -36,7 +34,6 @@ @@ -36,7 +34,6 @@
} | null>(null);
let loading = $state(true);
let error = $state<string | null>(null);
let parsedContent = $state("");
let authorDisplayName = $state<string | undefined>(undefined);
// Maximum nesting level allowed
@ -120,16 +117,6 @@ @@ -120,16 +117,6 @@
}
}
// Parse content if available
if (event?.content) {
if (event.kind === 6 || event.kind === 16) {
parsedContent = await parseRepostContent(event.content);
} else {
// Use embedded markup parser for nested events
parsedContent = await parseEmbeddedMarkup(event.content, nestingLevel + 1);
}
}
// Parse profile if it's a profile event
if (event?.kind === 0) {
try {
@ -196,10 +183,6 @@ @@ -196,10 +183,6 @@
}
}
function getNeventUrl(event: NDKEvent): string {
return neventEncode(event, $activeInboxRelays);
}
function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays);
}
@ -303,17 +286,15 @@ @@ -303,17 +286,15 @@
{/if}
<!-- Content for text events -->
{#if event.kind === 1 && parsedContent}
{#if event.kind === 1 || repostKinds.includes(event.kind)}
<div class="prose prose-sm dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 min-w-0 overflow-hidden">
<EmbeddedEventRenderer content={parsedContent.slice(0, 300)} nestingLevel={nestingLevel + 1} />
{#if parsedContent.length > 300}
{@render parsedContent(event.content.slice(0, 300))}
{#if event.content.length > 300}
<span class="text-gray-500 dark:text-gray-400">...</span>
{/if}
</div>
{/if}
<!-- Profile content -->
{#if event.kind === 0 && profile}
{:else if event.kind === 0 && profile}
<div class="space-y-2 min-w-0 overflow-hidden">
{#if profile.picture}
<img

194
src/lib/components/EventDetails.svelte

@ -1,13 +1,9 @@ @@ -1,13 +1,9 @@
<script lang="ts">
import { parseBasicmarkup } from "$lib/utils/markup/basicMarkupParser";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
import { getMimeTags } from "$lib/utils/mime";
import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { toNpub } from "$lib/utils/nostrUtils";
import { neventEncode, naddrEncode, nprofileEncode } from "$lib/utils";
import { activeInboxRelays, activeOutboxRelays } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import { activeInboxRelays } from "$lib/ndk";
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { getMatchingTags } from "$lib/utils/nostrUtils";
import ProfileHeader from "$components/cards/ProfileHeader.svelte";
@ -18,13 +14,11 @@ @@ -18,13 +14,11 @@
import { navigateToEvent } from "$lib/utils/nostrEventService";
import ContainingIndexes from "$lib/components/util/ContainingIndexes.svelte";
import Notifications from "$lib/components/Notifications.svelte";
import { parseRepostContent } from "$lib/utils/notification_utils";
import RelayActions from "$lib/components/RelayActions.svelte";
import EmbeddedEvent from "./EmbeddedEvent.svelte";
const {
event,
profile = null,
searchValue = null,
} = $props<{
event: NDKEvent;
profile?: {
@ -37,20 +31,11 @@ @@ -37,20 +31,11 @@
lud16?: string;
nip05?: string;
} | null;
searchValue?: string | null;
}>();
let showFullContent = $state(false);
let parsedContent = $state("");
let contentProcessing = $state(false);
let authorDisplayName = $state<string | undefined>(undefined);
// Determine if content should be truncated
let shouldTruncate = $state(false);
$effect(() => {
shouldTruncate = event.content.length > 250 && !showFullContent;
});
let showFullContent = $state(false);
let shouldTruncate = $derived(event.content.length > 250 && !showFullContent);
function getEventTitle(event: NDKEvent): string {
// First try to get title from title tag
@ -92,109 +77,11 @@ @@ -92,109 +77,11 @@
return getMatchingTags(event, "summary")[0]?.[1] || "";
}
function getEventHashtags(event: NDKEvent): string[] {
return getMatchingTags(event, "t").map((tag: string[]) => tag[1]);
}
function getEventTypeDisplay(event: NDKEvent): string {
const [mTag, MTag] = getMimeTags(event.kind || 0);
return MTag[1].split("/")[1] || `Event Kind ${event.kind}`;
}
function renderTag(tag: string[]): string {
if (tag[0] === "a" && tag.length > 1) {
const parts = tag[1].split(":");
if (parts.length >= 3) {
const [kind, pubkey, d] = parts;
// Validate that pubkey is a valid hex string
if (pubkey && /^[0-9a-fA-F]{64}$/.test(pubkey)) {
try {
const mockEvent = {
kind: +kind,
pubkey,
tags: [["d", d]],
content: "",
id: "",
sig: "",
} as any;
const naddr = naddrEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${naddr}' class='underline text-primary-700'>a:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode naddr for a tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else {
console.warn("Invalid pubkey in a tag in renderTag:", pubkey);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else {
console.warn("Invalid a tag format in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>a:${tag[1]}</span>`;
}
} else if (tag[0] === "e" && tag.length > 1) {
// Validate that event ID is a valid hex string
if (/^[0-9a-fA-F]{64}$/.test(tag[1])) {
try {
const mockEvent = {
id: tag[1],
kind: 1,
content: "",
tags: [],
pubkey: "",
sig: "",
} as any;
const nevent = neventEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${nevent}' class='underline text-primary-700'>e:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode nevent for e tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>e:${tag[1]}</span>`;
}
} else {
console.warn("Invalid event ID in e tag in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>e:${tag[1]}</span>`;
}
} else if (tag[0] === "note" && tag.length > 1) {
// 'note' tags are the same as 'e' tags but with different prefix
if (/^[0-9a-fA-F]{64}$/.test(tag[1])) {
try {
const mockEvent = {
id: tag[1],
kind: 1,
content: "",
tags: [],
pubkey: "",
sig: "",
} as any;
const nevent = neventEncode(mockEvent, $activeInboxRelays);
return `<a href='/events?id=${nevent}' class='underline text-primary-700'>note:${tag[1]}</a>`;
} catch (error) {
console.warn(
"Failed to encode nevent for note tag in renderTag:",
tag[1],
error,
);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>note:${tag[1]}</span>`;
}
} else {
console.warn("Invalid event ID in note tag in renderTag:", tag[1]);
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>note:${tag[1]}</span>`;
}
} else if (tag[0] === "d" && tag.length > 1) {
// 'd' tags are used for identifiers in addressable events
return `<a href='/events?d=${encodeURIComponent(tag[1])}' class='underline text-primary-700'>d:${tag[1]}</a>`;
} else {
return `<span class='bg-primary-50 text-primary-800 px-2 py-1 rounded text-xs font-mono'>${tag[0]}:${tag[1]}</span>`;
}
}
function getTagButtonInfo(tag: string[]): {
text: string;
gotoValue?: string;
@ -303,52 +190,12 @@ @@ -303,52 +190,12 @@
return { text: `${tag[0]}:${tag[1]}` };
}
function getNeventUrl(event: NDKEvent): string {
return neventEncode(event, $activeInboxRelays);
}
function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays);
}
function getNprofileUrl(pubkey: string): string {
return nprofileEncode(pubkey, $activeInboxRelays);
}
$effect(() => {
if (event && event.kind !== 0 && event.content) {
contentProcessing = true;
// Use parseRepostContent for kind 6 and 16 events (reposts)
if (event.kind === 6 || event.kind === 16) {
parseRepostContent(event.content).then((html) => {
parsedContent = html;
contentProcessing = false;
}).catch((error) => {
console.error('Error parsing repost content:', error);
contentProcessing = false;
});
} else {
// Use embedded markup parser for better Nostr event support
parseEmbeddedMarkup(event.content, 0).then((html) => {
parsedContent = html;
contentProcessing = false;
}).catch((error) => {
console.error('Error parsing embedded markup:', error);
contentProcessing = false;
});
}
} else {
contentProcessing = false;
parsedContent = "";
}
});
$effect(() => {
if (!event?.pubkey) {
authorDisplayName = undefined;
return;
}
getUserMetadata(toNpub(event.pubkey) as string).then((profile) => {
authorDisplayName =
profile.displayName ||
@ -403,13 +250,6 @@ @@ -403,13 +250,6 @@
return ids;
}
function isCurrentSearch(value: string): boolean {
if (!searchValue) return false;
// Compare ignoring case and possible nostr: prefix
const norm = (s: string) => s.replace(/^nostr:/, "").toLowerCase();
return norm(value) === norm(searchValue);
}
onMount(() => {
function handleInternalLinkClick(event: MouseEvent) {
const target = event.target as HTMLElement;
@ -468,8 +308,6 @@ @@ -468,8 +308,6 @@
</div>
{/if}
<!-- Containing Publications -->
<ContainingIndexes {event} />
@ -479,19 +317,15 @@ @@ -479,19 +317,15 @@
<div class="flex flex-col space-y-1 min-w-0">
<span class="text-gray-700 dark:text-gray-300 font-semibold">Content:</span>
<div class="prose dark:prose-invert max-w-none text-gray-900 dark:text-gray-100 break-words overflow-wrap-anywhere min-w-0">
{#if contentProcessing}
<div class="text-gray-500 dark:text-gray-400 italic">Processing content...</div>
{:else}
<div class={shouldTruncate ? 'max-h-32 overflow-hidden' : ''}>
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
</div>
{#if shouldTruncate}
<button
class="mt-2 text-primary-700 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-200"
onclick={() => (showFullContent = true)}>Show more</button
>
{/if}
{/if}
<div class={shouldTruncate ? 'max-h-32 overflow-hidden' : ''}>
<EmbeddedEvent nostrIdentifier={event.id} nestingLevel={0} />
</div>
{#if shouldTruncate}
<button
class="mt-2 text-primary-700 hover:text-primary-900 dark:text-primary-400 dark:hover:text-primary-200"
onclick={() => (showFullContent = true)}>Show more</button
>
{/if}
</div>
</div>
</div>

69
src/lib/components/Notifications.svelte

@ -1,37 +1,28 @@ @@ -1,37 +1,28 @@
<script lang="ts">
import "../../styles/notifications.css";
import { onMount } from "svelte";
import { Heading, P } from "flowbite-svelte";
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { userStore } from "$lib/stores/userStore";
import { userPubkey, isLoggedIn } from "$lib/stores/authStore.Svelte";
import { ndkInstance, activeInboxRelays } from "$lib/ndk";
import { ndkInstance } from "$lib/ndk";
import { goto } from "$app/navigation";
import { get } from "svelte/store";
import { nip19 } from "nostr-tools";
import { communityRelays, localRelays, anonymousRelays, searchRelays } from "$lib/consts";
import { createKind24Reply, getKind24RelaySet } from "$lib/utils/kind24_utils";
import { anonymousRelays } from "$lib/consts";
import { getKind24RelaySet } from "$lib/utils/kind24_utils";
import { createSignedEvent } from "$lib/utils/nostrEventService";
import RelayDisplay from "$lib/components/RelayDisplay.svelte";
import RelayInfoList from "$lib/components/RelayInfoList.svelte";
import { Modal, Button } from "flowbite-svelte";
import { searchProfiles } from "$lib/utils/search_utility";
import type { NostrProfile } from "$lib/utils/search_types";
import { PlusOutline, ReplyOutline, UserOutline } from "flowbite-svelte-icons";
import {
truncateContent,
truncateRenderedContent,
parseContent,
parseRepostContent,
renderQuotedContent,
getNotificationType,
fetchAuthorProfiles
} from "$lib/utils/notification_utils";
fetchAuthorProfiles,
quotedContent,
} from "$lib/components/util/Notifications.svelte";
import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { formatDate, neventEncode } from "$lib/utils";
import { toNpub, getUserMetadata, NDKRelaySetFromNDK } from "$lib/utils/nostrUtils";
import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import EmbeddedEventRenderer from "./EmbeddedEventRenderer.svelte";
import { NDKRelaySetFromNDK } from "$lib/utils/nostrUtils";
import EmbeddedEvent from "./EmbeddedEvent.svelte";
const { event } = $props<{ event: NDKEvent }>();
@ -60,7 +51,6 @@ @@ -60,7 +51,6 @@
let notificationMode = $state<"to-me" | "from-me" | "public-messages">("to-me");
let authorProfiles = $state<Map<string, { name?: string; displayName?: string; picture?: string }>>(new Map());
let filteredByUser = $state<string | null>(null);
// New Message Modal state
let showNewMessageModal = $state(false);
@ -69,7 +59,6 @@ @@ -69,7 +59,6 @@
let newMessageRelays = $state<string[]>([]);
let isComposingMessage = $state(false);
let replyToMessage = $state<NDKEvent | null>(null);
let quotedContent = $state<string>("");
// Recipient Selection Modal state
let showRecipientModal = $state(false);
@ -166,8 +155,6 @@ @@ -166,8 +155,6 @@
filteredByUser = null;
}
// AI-NOTE: New Message Modal Functions
function openNewMessageModal(messageToReplyTo?: NDKEvent) {
showNewMessageModal = true;
@ -178,12 +165,7 @@ @@ -178,12 +165,7 @@
replyToMessage = messageToReplyTo || null;
// If replying, set up the quote and pre-select all original recipients plus sender
if (messageToReplyTo) {
// Store clean content for UI display (no markdown formatting)
quotedContent = messageToReplyTo.content.length > 200
? messageToReplyTo.content.slice(0, 200) + "..."
: messageToReplyTo.content;
if (messageToReplyTo) {
// Collect all recipients: original sender + all p-tag recipients
const recipientPubkeys = new Set<string>();
@ -218,8 +200,6 @@ @@ -218,8 +200,6 @@
}).filter(recipient => recipient.pubkey); // Ensure we have valid pubkeys
console.log(`Pre-loaded ${selectedRecipients.length} recipients for reply:`, selectedRecipients.map(r => r.displayName || r.name || r.pubkey?.slice(0, 8)));
} else {
quotedContent = "";
}
}
@ -230,7 +210,6 @@ @@ -230,7 +210,6 @@
newMessageRelays = [];
isComposingMessage = false;
replyToMessage = null;
quotedContent = "";
}
// AI-NOTE: Recipient Selection Modal Functions
@ -580,8 +559,6 @@ @@ -580,8 +559,6 @@
}
}
// Check if user is viewing their own profile
$effect(() => {
if ($userStore.signedIn && $userStore.pubkey && event.pubkey) {
@ -607,8 +584,6 @@ @@ -607,8 +584,6 @@
}
});
// AI-NOTE: Refactored to avoid blocking $effect with async operations
// Calculate relay set when recipients change - non-blocking approach
$effect(() => {
@ -838,21 +813,13 @@ @@ -838,21 +813,13 @@
{#if message.getMatchingTags("q").length > 0}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
{#await renderQuotedContent(message, publicMessages) then quotedHtml}
{@html quotedHtml}
{:catch}
<!-- Fallback if quoted content fails to render -->
{/await}
{@render quotedContent(message, publicMessages)}
</div>
{/if}
{#if message.content}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
<div class="px-2">
{#await ((message.kind === 6 || message.kind === 16) ? parseRepostContent(message.content) : parseContent(message.content)) then parsedContent}
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html message.content}
{/await}
<EmbeddedEvent nostrIdentifier={message.id} nestingLevel={0} />
</div>
</div>
{/if}
@ -929,11 +896,7 @@ @@ -929,11 +896,7 @@
{#if notification.content}
<div class="text-sm text-gray-800 dark:text-gray-200 mb-2 leading-relaxed">
<div class="px-2">
{#await ((notification.kind === 6 || notification.kind === 16) ? parseRepostContent(notification.content) : parseContent(notification.content)) then parsedContent}
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html truncateContent(notification.content)}
{/await}
<EmbeddedEvent nostrIdentifier={notification.id} nestingLevel={0} />
</div>
</div>
{/if}
@ -964,15 +927,11 @@ @@ -964,15 +927,11 @@
</div>
<!-- Quoted Content Display -->
{#if quotedContent}
{#if replyToMessage}
<div class="quoted-content mb-4 p-3 rounded-r-lg">
<div class="text-sm text-gray-600 dark:text-gray-400 mb-1">Replying to:</div>
<div class="text-sm text-gray-800 dark:text-gray-200">
{#await parseContent(quotedContent) then parsedContent}
<EmbeddedEventRenderer content={parsedContent} nestingLevel={0} />
{:catch}
{@html quotedContent}
{/await}
<EmbeddedEvent nostrIdentifier={replyToMessage.id} nestingLevel={0} />
</div>
</div>
{/if}

1
src/lib/components/publications/PublicationSection.svelte

@ -1,5 +1,4 @@ @@ -1,5 +1,4 @@
<script lang="ts">
import type { PublicationTree } from "$lib/data_structures/publication_tree";
import {
contentParagraph,
sectionHeading,

10
src/lib/components/publications/table_of_contents.svelte.ts

@ -159,7 +159,7 @@ export class TableOfContents { @@ -159,7 +159,7 @@ export class TableOfContents {
// Handle any other nodes that have already been resolved in parallel.
await Promise.all(
Array.from(this.#publicationTree.resolvedAddresses).map((address) =>
this.#buildTocEntryFromResolvedNode(address),
this.#buildTocEntryFromResolvedNode(address)
),
);
@ -274,10 +274,10 @@ export class TableOfContents { @@ -274,10 +274,10 @@ export class TableOfContents {
});
entry.children.sort((a, b) => {
const aOrdinal =
addressToOrdinal.get(a.address) ?? Number.MAX_SAFE_INTEGER;
const bOrdinal =
addressToOrdinal.get(b.address) ?? Number.MAX_SAFE_INTEGER;
const aOrdinal = addressToOrdinal.get(a.address) ??
Number.MAX_SAFE_INTEGER;
const bOrdinal = addressToOrdinal.get(b.address) ??
Number.MAX_SAFE_INTEGER;
return aOrdinal - bOrdinal;
});
}

321
src/lib/components/util/Notifications.svelte

@ -0,0 +1,321 @@ @@ -0,0 +1,321 @@
<script module lang="ts">
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { NDKRelaySetFromNDK, toNpub, getUserMetadata } from "$lib/utils/nostrUtils";
import { get } from "svelte/store";
import { ndkInstance } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import { userStore, type UserState } from "$lib/stores/userStore";
import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { nip19 } from "nostr-tools";
import type NDK from "@nostr-dev-kit/ndk";
import { parseEmbeddedMarkup } from "$lib/utils/markup/embeddedMarkupParser";
export {
parsedContent,
repostContent,
quotedContent,
truncateContent,
truncateRenderedContent,
getNotificationType,
fetchAuthorProfiles
};
/**
* Truncates content to a specified length
*/
function truncateContent(content: string, maxLength: number = 300): string {
if (content.length <= maxLength) return content;
return content.slice(0, maxLength) + "...";
}
/**
* Truncates rendered HTML content while preserving quote boxes
*/
function truncateRenderedContent(renderedHtml: string, maxLength: number = 300): string {
if (renderedHtml.length <= maxLength) return renderedHtml;
const hasQuoteBoxes = renderedHtml.includes('jump-to-message');
if (hasQuoteBoxes) {
const quoteBoxPattern = /<div class="block w-fit my-2 px-3 py-2 bg-gray-200[^>]*onclick="window\.dispatchEvent\(new CustomEvent\('jump-to-message'[^>]*>[^<]*<\/div>/g;
const quoteBoxes = renderedHtml.match(quoteBoxPattern) || [];
let textOnly = renderedHtml.replace(quoteBoxPattern, '|||QUOTEBOX|||');
if (textOnly.length > maxLength) {
const availableLength = maxLength - (quoteBoxes.join('').length);
if (availableLength > 50) {
textOnly = textOnly.slice(0, availableLength) + "...";
} else {
textOnly = textOnly.slice(0, 50) + "...";
}
}
let result = textOnly;
quoteBoxes.forEach(box => {
result = result.replace('|||QUOTEBOX|||', box);
});
return result;
} else {
if (renderedHtml.includes('<')) {
const truncated = renderedHtml.slice(0, maxLength);
const lastTagStart = truncated.lastIndexOf('<');
const lastTagEnd = truncated.lastIndexOf('>');
if (lastTagStart > lastTagEnd) {
return renderedHtml.slice(0, lastTagStart) + "...";
}
return truncated + "...";
} else {
return renderedHtml.slice(0, maxLength) + "...";
}
}
}
/**
* Gets notification type based on event kind
*/
function getNotificationType(event: NDKEvent): string {
switch (event.kind) {
case 1: return "Reply";
case 1111: return "Custom Reply";
case 9802: return "Highlight";
case 6: return "Repost";
case 16: return "Generic Repost";
case 24: return "Public Message";
default: return `Kind ${event.kind}`;
}
}
/**
* Fetches author profiles for a list of events
*/
async function fetchAuthorProfiles(events: NDKEvent[]): Promise<Map<string, { name?: string; displayName?: string; picture?: string }>> {
const authorProfiles = new Map<string, { name?: string; displayName?: string; picture?: string }>();
const uniquePubkeys = new Set<string>();
events.forEach(event => {
if (event.pubkey) uniquePubkeys.add(event.pubkey);
});
const profilePromises = Array.from(uniquePubkeys).map(async (pubkey) => {
try {
const npub = toNpub(pubkey);
if (!npub) return;
// Try cache first
let profile = await getUserMetadata(npub, false);
if (profile && (profile.name || profile.displayName || profile.picture)) {
authorProfiles.set(pubkey, profile);
return;
}
// Try search relays
for (const relay of searchRelays) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) break;
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
relaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
return;
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from ${relay}:`, error);
}
}
// Try all available relays as fallback
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) return;
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
ndkRelaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
}
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from all relays:`, error);
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Error processing profile for ${pubkey}:`, error);
}
});
await Promise.all(profilePromises);
return authorProfiles;
}
async function findQuotedMessage(eventId: string, publicMessages: NDKEvent[]): Promise<NDKEvent | undefined> {
// Validate eventId format (should be 64 character hex string)
const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId);
if (!isValidEventId) return undefined;
// First try to find in local messages
let quotedMessage = publicMessages.find(msg => msg.id === eventId);
// If not found locally, fetch from relays
if (!quotedMessage) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (ndk) {
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays, ...searchRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const fetchedEvent = await ndk.fetchEvent({ ids: [eventId], limit: 1 }, undefined, ndkRelaySet);
quotedMessage = fetchedEvent || undefined;
}
}
} catch (error) {
console.warn(`[findQuotedMessage] Failed to fetch quoted event ${eventId}:`, error);
}
}
return quotedMessage;
}
</script>
{#snippet parsedContent(content: string)}
{#await parseEmbeddedMarkup(content, 0) then parsed}
{@html parsed}
{/await}
{/snippet}
{#snippet repostContent(content: string)}
{@const originalEvent = (() => {
try {
return JSON.parse(content);
} catch {
return null;
}
})()}
{#if originalEvent}
{@const originalContent = originalEvent.content || ""}
{@const originalAuthor = originalEvent.pubkey || ""}
{@const originalCreatedAt = originalEvent.created_at || 0}
{@const originalKind = originalEvent.kind || 1}
{@const formattedDate = originalCreatedAt ? new Date(originalCreatedAt * 1000).toLocaleDateString() : "Unknown date"}
{@const shortAuthor = originalAuthor ? `${originalAuthor.slice(0, 8)}...${originalAuthor.slice(-4)}` : "Unknown"}
<div class="embedded-repost bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 my-2">
<!-- Event header -->
<div class="flex items-center justify-between mb-3 min-w-0">
<div class="flex items-center space-x-2 min-w-0">
<span class="text-xs text-gray-500 dark:text-gray-400 font-mono flex-shrink-0">
Kind {originalKind}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0">
(repost)
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-600 dark:text-gray-400 flex-shrink-0">Author:</span>
<span class="text-xs text-gray-700 dark:text-gray-300 font-mono">
{shortAuthor}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-500 dark:text-gray-400">
{formattedDate}
</span>
</div>
<button
class="text-xs text-primary-600 dark:text-primary-500 hover:underline flex-shrink-0"
onclick={() => window.location.href=`/events?id=${originalEvent.id || 'unknown'}`}
>
View full event →
</button>
</div>
<!-- Reposted content -->
<div class="text-sm text-gray-800 dark:text-gray-200 leading-relaxed">
{#await parseEmbeddedMarkup(originalContent, 0) then parsedOriginalContent}
{@html parsedOriginalContent}
{/await}
</div>
</div>
{:else}
{#await parseEmbeddedMarkup(content, 0) then parsedContent}
{@html parsedContent}
{/await}
{/if}
{/snippet}
{#snippet quotedContent(message: NDKEvent, publicMessages: NDKEvent[])}
{@const qTags = message.getMatchingTags("q")}
{#if qTags.length > 0}
{@const qTag = qTags[0]}
{@const eventId = qTag[1]}
{#if eventId}
{#await findQuotedMessage(eventId, publicMessages) then quotedMessage}
{#if quotedMessage}
{@const quotedContent = quotedMessage.content ? quotedMessage.content.slice(0, 200) : "No content"}
{#await parseEmbeddedMarkup(quotedContent, 0) then parsedContent}
<button type="button" class="block text-left w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick={() => window.dispatchEvent(new CustomEvent('jump-to-message', { detail: eventId }))}>
{@html parsedContent}
</button>
{/await}
{:else}
{@const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId)}
{#if isValidEventId}
{@const nevent = (() => {
try {
return nip19.neventEncode({ id: eventId });
} catch (error) {
console.warn(`[quotedContent] Failed to encode nevent for ${eventId}:`, error);
return null;
}
})()}
{#if nevent}
<button type="button" class="block text-left w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick={() => window.location.href=`/events?id=${nevent}`}>
Quoted message not found. Click to view event {eventId.slice(0, 8)}...
</button>
{:else}
<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">
Quoted message not found. Event ID: {eventId.slice(0, 8)}...
</div>
{/if}
{:else}
<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">
Invalid quoted message reference
</div>
{/if}
{/if}
{/await}
{/if}
{/if}
{/snippet}

7
src/lib/consts.ts

@ -3,6 +3,7 @@ @@ -3,6 +3,7 @@
export const wikiKind = 30818;
export const indexKind = 30040;
export const zettelKinds = [30041, 30818, 30023];
export const repostKinds = [6, 16];
export const communityRelays = [
"wss://theforest.nostr1.com",
@ -16,7 +17,7 @@ export const searchRelays = [ @@ -16,7 +17,7 @@ export const searchRelays = [
"wss://nostr.wine",
"wss://relay.damus.io",
"wss://relay.nostr.band",
"wss://freelay.sovbit.host"
"wss://freelay.sovbit.host",
];
export const secondaryRelays = [
@ -32,7 +33,7 @@ export const secondaryRelays = [ @@ -32,7 +33,7 @@ export const secondaryRelays = [
export const anonymousRelays = [
"wss://freelay.sovbit.host",
"wss://thecitadel.nostr1.com"
"wss://thecitadel.nostr1.com",
];
export const lowbandwidthRelays = [
@ -44,7 +45,7 @@ export const lowbandwidthRelays = [ @@ -44,7 +45,7 @@ export const lowbandwidthRelays = [
export const localRelays: string[] = [
"ws://localhost:8080",
"ws://localhost:4869",
"ws://localhost:3334"
"ws://localhost:3334",
];
export enum FeedType {

85
src/lib/data_structures/docs/relay_selector_design.md

@ -1,6 +1,11 @@ @@ -1,6 +1,11 @@
# Relay Selector Class Design
The relay selector will be a singleton that tracks, rates, and ranks Nostr relays to help the application determine which relay should be used to handle each request. It will weight relays based on observed characteristics, then use these weights to implement a weighted round robin algorithm for selecting relays, with some additional modifications to account for domain-specific features of Nostr.
The relay selector will be a singleton that tracks, rates, and ranks Nostr
relays to help the application determine which relay should be used to handle
each request. It will weight relays based on observed characteristics, then use
these weights to implement a weighted round robin algorithm for selecting
relays, with some additional modifications to account for domain-specific
features of Nostr.
## Relay Weights
@ -9,63 +14,92 @@ The relay selector will be a singleton that tracks, rates, and ranks Nostr relay @@ -9,63 +14,92 @@ The relay selector will be a singleton that tracks, rates, and ranks Nostr relay
Relays are broadly divided into three categories:
1. **Public**: no authorization is required
2. **Private Write**: authorization is required to write to this relay, but not to read
3. **Private Read and Write**: authorization is required to use any features of this relay
2. **Private Write**: authorization is required to write to this relay, but not
to read
3. **Private Read and Write**: authorization is required to use any features of
this relay
The broadest level of relay selection is based on these categories.
- For users that are not logged in, public relays are used exclusively.
- For logged-in users, public and private read relays are initially rated equally for read operations.
- For logged-in users, private write relays are preferred above public relays for write operations.
- For logged-in users, public and private read relays are initially rated
equally for read operations.
- For logged-in users, private write relays are preferred above public relays
for write operations.
### User Preferences
The relay selector will respect user relay preferences while still attempting to optimize for responsiveness and success rate.
- User inbox relays will be stored in a separate list from general-purpose relays, and weighted and sorted separately using the same algorithm as the general-purpose relay list.
- Local relays (beginning with `wss://localhost` or `ws://localhost`) will be stored _unranked_ in a separate list, and used when the relay selector is operating on a web browser (as opposed to a server).
- When a caller requests relays from the relay selector, the selector will return:
The relay selector will respect user relay preferences while still attempting to
optimize for responsiveness and success rate.
- User inbox relays will be stored in a separate list from general-purpose
relays, and weighted and sorted separately using the same algorithm as the
general-purpose relay list.
- Local relays (beginning with `wss://localhost` or `ws://localhost`) will be
stored _unranked_ in a separate list, and used when the relay selector is
operating on a web browser (as opposed to a server).
- When a caller requests relays from the relay selector, the selector will
return:
- The highest-ranked general-purpose relay
- The highest-ranked user inbox relay
- (If on browser) any local relays
### Weighted Metrics
Several weighted metrics are used to compute a relay's score. The score is used to rank relays to determine which to prefer when fetching events.
Several weighted metrics are used to compute a relay's score. The score is used
to rank relays to determine which to prefer when fetching events.
#### Response Time
The response time weight of each relay is computed according to the logarithmic function $`r(t) = -log(t) + 1`$, where $`t`$ is the median response time in seconds. This function has a few features which make it useful:
The response time weight of each relay is computed according to the logarithmic
function $`r(t) = -log(t) + 1`$, where $`t`$ is the median response time in
seconds. This function has a few features which make it useful:
- $`r(1) = 1`$, making a response time of 1s the netural point. This causes the algorithm to prefer relays that respond in under 1s.
- $`r(0.3) \approx 1.5`$ and $`r(3) \approx 0.5`$. This clusters the 0.5 to 1.5 weight range in the 300ms to 3s response time range, which is a sufficiently rapid response time to keep user's from switching context.
- The function has a long tail, so it doesn't discount slower response times too heavily, too quickly.
- $`r(1) = 1`$, making a response time of 1s the netural point. This causes the
algorithm to prefer relays that respond in under 1s.
- $`r(0.3) \approx 1.5`$ and $`r(3) \approx 0.5`$. This clusters the 0.5 to 1.5
weight range in the 300ms to 3s response time range, which is a sufficiently
rapid response time to keep user's from switching context.
- The function has a long tail, so it doesn't discount slower response times too
heavily, too quickly.
#### Success Rate
The success rate $`s(x)`$ is computed as the fraction of total requests sent to the relay that returned at least one event in response. The optimal score is 1, meaning the relay successfully responds to 100% of requests.
The success rate $`s(x)`$ is computed as the fraction of total requests sent to
the relay that returned at least one event in response. The optimal score is 1,
meaning the relay successfully responds to 100% of requests.
#### Trust Level
Certain relays may be assigned a constant "trust level" score $`T`$. This modifier is a number in the range $`[-0.5, 0.5]`$ that indicates how much a relay is trusted by the GitCitadel organization.
Certain relays may be assigned a constant "trust level" score $`T`$. This
modifier is a number in the range $`[-0.5, 0.5]`$ that indicates how much a
relay is trusted by the GitCitadel organization.
A few factors contribute to a higher trust rating:
- Effective filtering of spam and abusive content.
- Good data transparency, including such policies as honoring deletion requests.
- Event aggregation policies that aim at synchronization with the broader relay network.
- Event aggregation policies that aim at synchronization with the broader relay
network.
#### Preferred Vendors
Certain relays may be assigned a constant "preferred vendor" score $`V`$. This modifier is a number in the range $`[0, 0.5]`$. It is used to increase the priority of GitCitadel's preferred relay vendors.
Certain relays may be assigned a constant "preferred vendor" score $`V`$. This
modifier is a number in the range $`[0, 0.5]`$. It is used to increase the
priority of GitCitadel's preferred relay vendors.
### Overall Weight
The overall weight of a relay is calculated as $`w(t, x) = r(t) \times s(x) + T + V`$. The `RelaySelector` class maintains a list of relays sorted by their overall weights. The weights may be updated at runtime when $`t`$ or $`x`$ change. On update, the relay list is re-sorted to account for the new weights.
The overall weight of a relay is calculated as
$`w(t, x) = r(t) \times s(x) + T + V`$. The `RelaySelector` class maintains a
list of relays sorted by their overall weights. The weights may be updated at
runtime when $`t`$ or $`x`$ change. On update, the relay list is re-sorted to
account for the new weights.
## Algorithm
The relay weights contribute to a weighted round robin (WRR) algorithm for relay selection. Pseudocode for the algorithm is given below:
The relay weights contribute to a weighted round robin (WRR) algorithm for relay
selection. Pseudocode for the algorithm is given below:
```pseudocode
Constants and Variables:
@ -86,11 +120,13 @@ Function getRelay: @@ -86,11 +120,13 @@ Function getRelay:
## Class Methods
The `RelaySelector` class should expose the following methods to support updates to relay weights. Pseudocode for each method is given below.
The `RelaySelector` class should expose the following methods to support updates
to relay weights. Pseudocode for each method is given below.
### Add Response Time Datum
This function updates the class state by side effect. Locking should be used in concurrent use cases.
This function updates the class state by side effect. Locking should be used in
concurrent use cases.
```pseudocode
Constants and Variables:
@ -123,7 +159,8 @@ Function addResponseTimeDatum: @@ -123,7 +159,8 @@ Function addResponseTimeDatum:
### Add Success Rate Datum
This function updates the class state by side effect. Locking should be used in concurrent use cases.
This function updates the class state by side effect. Locking should be used in
concurrent use cases.
```pseudocode
Constants and Variables:

310
src/lib/data_structures/publication_tree.ts

@ -2,7 +2,10 @@ import { Lazy } from "./lazy.ts"; @@ -2,7 +2,10 @@ import { Lazy } from "./lazy.ts";
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type NDK from "@nostr-dev-kit/ndk";
import { fetchEventById } from "../utils/websocket_utils.ts";
import { fetchEventWithFallback, NDKRelaySetFromNDK } from "../utils/nostrUtils.ts";
import {
fetchEventWithFallback,
NDKRelaySetFromNDK,
} from "../utils/nostrUtils.ts";
import { get } from "svelte/store";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
import { searchRelays, secondaryRelays } from "../consts.ts";
@ -50,7 +53,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -50,7 +53,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* A map of addresses in the tree to their corresponding events.
*/
#events: Map<string, NDKEvent>;
/**
* Simple cache for fetched events to avoid re-fetching.
*/
@ -486,7 +489,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -486,7 +489,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
continue;
}
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) {
if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null };
}
@ -494,7 +500,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -494,7 +500,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
}
} while (this.#cursor.tryMoveToParent());
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) {
if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null };
}
@ -533,7 +542,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -533,7 +542,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
}
} while (this.#cursor.tryMoveToParent());
if (this.#cursor.target && this.#cursor.target.status === PublicationTreeNodeStatus.Error) {
if (
this.#cursor.target &&
this.#cursor.target.status === PublicationTreeNodeStatus.Error
) {
return { done: false, value: null };
}
@ -588,47 +600,84 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -588,47 +600,84 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
.filter((tag) => tag[0] === "a")
.map((tag) => tag[1]);
console.debug(`[PublicationTree] Current event ${currentEvent.id} has ${currentEvent.tags.length} tags:`, currentEvent.tags);
console.debug(`[PublicationTree] Found ${currentChildAddresses.length} a-tags in current event:`, currentChildAddresses);
console.debug(
`[PublicationTree] Current event ${currentEvent.id} has ${currentEvent.tags.length} tags:`,
currentEvent.tags,
);
console.debug(
`[PublicationTree] Found ${currentChildAddresses.length} a-tags in current event:`,
currentChildAddresses,
);
// If no a-tags found, try e-tags as fallback
if (currentChildAddresses.length === 0) {
const eTags = currentEvent.tags
.filter((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1]));
console.debug(`[PublicationTree] Found ${eTags.length} e-tags for current event ${currentEvent.id}:`, eTags.map(tag => tag[1]));
.filter((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
);
console.debug(
`[PublicationTree] Found ${eTags.length} e-tags for current event ${currentEvent.id}:`,
eTags.map((tag) => tag[1]),
);
// For e-tags with hex IDs, fetch the referenced events to get their addresses
const eTagPromises = eTags.map(async (tag) => {
try {
console.debug(`[PublicationTree] Fetching event for e-tag ${tag[1]} in depthFirstRetrieve`);
console.debug(
`[PublicationTree] Fetching event for e-tag ${
tag[1]
} in depthFirstRetrieve`,
);
const referencedEvent = await fetchEventById(tag[1]);
if (referencedEvent) {
// Construct the proper address format from the referenced event
const dTag = referencedEvent.tags.find(tag => tag[0] === "d")?.[1];
const dTag = referencedEvent.tags.find((tag) => tag[0] === "d")
?.[1];
if (dTag) {
const address = `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(`[PublicationTree] Constructed address from e-tag in depthFirstRetrieve: ${address}`);
const address =
`${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(
`[PublicationTree] Constructed address from e-tag in depthFirstRetrieve: ${address}`,
);
return address;
} else {
console.debug(`[PublicationTree] Referenced event ${tag[1]} has no d-tag in depthFirstRetrieve`);
console.debug(
`[PublicationTree] Referenced event ${
tag[1]
} has no d-tag in depthFirstRetrieve`,
);
}
} else {
console.debug(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]} in depthFirstRetrieve - event not found`);
console.debug(
`[PublicationTree] Failed to fetch event for e-tag ${
tag[1]
} in depthFirstRetrieve - event not found`,
);
}
return null;
} catch (error) {
console.warn(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]} in depthFirstRetrieve:`, error);
console.warn(
`[PublicationTree] Failed to fetch event for e-tag ${
tag[1]
} in depthFirstRetrieve:`,
error,
);
return null;
}
});
const resolvedAddresses = await Promise.all(eTagPromises);
const validAddresses = resolvedAddresses.filter(addr => addr !== null) as string[];
console.debug(`[PublicationTree] Resolved ${validAddresses.length} valid addresses from e-tags in depthFirstRetrieve:`, validAddresses);
const validAddresses = resolvedAddresses.filter((addr) =>
addr !== null
) as string[];
console.debug(
`[PublicationTree] Resolved ${validAddresses.length} valid addresses from e-tags in depthFirstRetrieve:`,
validAddresses,
);
if (validAddresses.length > 0) {
currentChildAddresses.push(...validAddresses);
}
@ -646,9 +695,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -646,9 +695,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
// Augment the tree with the children of the current event.
const childPromises = currentChildAddresses
.filter(childAddress => !this.#nodes.has(childAddress))
.map(childAddress => this.#addNode(childAddress, currentNode!));
.filter((childAddress) => !this.#nodes.has(childAddress))
.map((childAddress) => this.#addNode(childAddress, currentNode!));
await Promise.all(childPromises);
// Push the popped address's children onto the stack for the next iteration.
@ -663,7 +712,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -663,7 +712,7 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
#addNode(address: string, parentNode: PublicationTreeNode) {
const lazyNode = new Lazy<PublicationTreeNode>(() =>
this.#resolveNode(address, parentNode),
this.#resolveNode(address, parentNode)
);
parentNode.children!.push(lazyNode);
this.#nodes.set(address, lazyNode);
@ -686,10 +735,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -686,10 +735,10 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
): Promise<PublicationTreeNode> {
// Check cache first
let event = this.#eventCache.get(address);
if (!event) {
const [kind, pubkey, dTag] = address.split(":");
// AI-NOTE: 2025-01-24 - Enhanced event fetching with comprehensive fallback
// First try to fetch using the enhanced fetchEventWithFallback function
// which includes search relay fallback logic
@ -698,33 +747,50 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -698,33 +747,50 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
authors: [pubkey],
"#d": [dTag],
}, 5000) // 5 second timeout for publication events
.then(fetchedEvent => {
.then((fetchedEvent) => {
if (fetchedEvent) {
// Cache the event if found
this.#eventCache.set(address, fetchedEvent);
event = fetchedEvent;
}
if (!event) {
console.warn(
`[PublicationTree] Event with address ${address} not found on primary relays, trying search relays.`,
);
// If still not found, try a more aggressive search using search relays
return this.#trySearchRelayFallback(address, kind, pubkey, dTag, parentNode);
// If still not found, try a more aggressive search using search relays
return this.#trySearchRelayFallback(
address,
kind,
pubkey,
dTag,
parentNode,
);
}
return this.#buildNodeFromEvent(event, address, parentNode);
})
.catch(error => {
console.warn(`[PublicationTree] Error fetching event for address ${address}:`, error);
// Try search relay fallback even on error
return this.#trySearchRelayFallback(address, kind, pubkey, dTag, parentNode);
.catch((error) => {
console.warn(
`[PublicationTree] Error fetching event for address ${address}:`,
error,
);
// Try search relay fallback even on error
return this.#trySearchRelayFallback(
address,
kind,
pubkey,
dTag,
parentNode,
);
});
}
return Promise.resolve(this.#buildNodeFromEvent(event, address, parentNode));
return Promise.resolve(
this.#buildNodeFromEvent(event, address, parentNode),
);
}
/**
@ -732,54 +798,75 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -732,54 +798,75 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* This method tries to find events on search relays when they're not found on primary relays
*/
async #trySearchRelayFallback(
address: string,
kind: string,
pubkey: string,
address: string,
kind: string,
pubkey: string,
dTag: string,
parentNode: PublicationTreeNode
parentNode: PublicationTreeNode,
): Promise<PublicationTreeNode> {
try {
console.log(`[PublicationTree] Trying search relay fallback for address: ${address}`);
console.log(
`[PublicationTree] Trying search relay fallback for address: ${address}`,
);
// Get current relay configuration
const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays);
// Create a comprehensive relay set including search relays
const allRelays = [...inboxRelays, ...outboxRelays, ...searchRelays, ...secondaryRelays];
const allRelays = [
...inboxRelays,
...outboxRelays,
...searchRelays,
...secondaryRelays,
];
const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates
console.log(`[PublicationTree] Trying ${uniqueRelays.length} relays for fallback search:`, uniqueRelays);
console.log(
`[PublicationTree] Trying ${uniqueRelays.length} relays for fallback search:`,
uniqueRelays,
);
// Try each relay individually with a shorter timeout
for (const relay of uniqueRelays) {
try {
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], this.#ndk);
const fetchedEvent = await this.#ndk.fetchEvent({
kinds: [parseInt(kind)],
authors: [pubkey],
"#d": [dTag],
}, undefined, relaySet).withTimeout(3000); // 3 second timeout per relay
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], this.#ndk);
const fetchedEvent = await this.#ndk.fetchEvent(
{
kinds: [parseInt(kind)],
authors: [pubkey],
"#d": [dTag],
},
undefined,
relaySet,
).withTimeout(3000); // 3 second timeout per relay
if (fetchedEvent) {
console.log(`[PublicationTree] Found event ${fetchedEvent.id} on search relay: ${relay}`);
console.log(
`[PublicationTree] Found event ${fetchedEvent.id} on search relay: ${relay}`,
);
// Cache the event
this.#eventCache.set(address, fetchedEvent);
this.#events.set(address, fetchedEvent);
return this.#buildNodeFromEvent(fetchedEvent, address, parentNode);
}
} catch (error) {
console.debug(`[PublicationTree] Failed to fetch from relay ${relay}:`, error);
console.debug(
`[PublicationTree] Failed to fetch from relay ${relay}:`,
error,
);
continue; // Try next relay
}
}
// If we get here, the event was not found on any relay
console.warn(`[PublicationTree] Event with address ${address} not found on any relay after fallback search.`);
console.warn(
`[PublicationTree] Event with address ${address} not found on any relay after fallback search.`,
);
return {
type: PublicationTreeNodeType.Leaf,
status: PublicationTreeNodeStatus.Error,
@ -787,10 +874,12 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -787,10 +874,12 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
parent: parentNode,
children: [],
};
} catch (error) {
console.error(`[PublicationTree] Error in search relay fallback for ${address}:`, error);
console.error(
`[PublicationTree] Error in search relay fallback for ${address}:`,
error,
);
return {
type: PublicationTreeNodeType.Leaf,
status: PublicationTreeNodeStatus.Error,
@ -806,9 +895,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -806,9 +895,9 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
* This extracts the common logic for building nodes from events
*/
#buildNodeFromEvent(
event: NDKEvent,
address: string,
parentNode: PublicationTreeNode
event: NDKEvent,
address: string,
parentNode: PublicationTreeNode,
): PublicationTreeNode {
this.#events.set(address, event);
@ -816,46 +905,68 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -816,46 +905,68 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
.filter((tag) => tag[0] === "a")
.map((tag) => tag[1]);
console.debug(`[PublicationTree] Event ${event.id} has ${event.tags.length} tags:`, event.tags);
console.debug(`[PublicationTree] Found ${childAddresses.length} a-tags:`, childAddresses);
console.debug(
`[PublicationTree] Event ${event.id} has ${event.tags.length} tags:`,
event.tags,
);
console.debug(
`[PublicationTree] Found ${childAddresses.length} a-tags:`,
childAddresses,
);
// If no a-tags found, try e-tags as fallback
if (childAddresses.length === 0) {
const eTags = event.tags
.filter((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1]));
console.debug(`[PublicationTree] Found ${eTags.length} e-tags for event ${event.id}:`, eTags.map(tag => tag[1]));
.filter((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
);
console.debug(
`[PublicationTree] Found ${eTags.length} e-tags for event ${event.id}:`,
eTags.map((tag) => tag[1]),
);
// For e-tags with hex IDs, fetch the referenced events to get their addresses
const eTagPromises = eTags.map(async (tag) => {
try {
console.debug(`[PublicationTree] Fetching event for e-tag ${tag[1]}`);
const referencedEvent = await fetchEventById(tag[1]);
if (referencedEvent) {
// Construct the proper address format from the referenced event
const dTag = referencedEvent.tags.find(tag => tag[0] === "d")?.[1];
const dTag = referencedEvent.tags.find((tag) => tag[0] === "d")
?.[1];
if (dTag) {
const address = `${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(`[PublicationTree] Constructed address from e-tag: ${address}`);
const address =
`${referencedEvent.kind}:${referencedEvent.pubkey}:${dTag}`;
console.debug(
`[PublicationTree] Constructed address from e-tag: ${address}`,
);
return address;
} else {
console.debug(`[PublicationTree] Referenced event ${tag[1]} has no d-tag`);
console.debug(
`[PublicationTree] Referenced event ${tag[1]} has no d-tag`,
);
}
} else {
console.debug(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}`);
console.debug(
`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}`,
);
}
return null;
} catch (error) {
console.warn(`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}:`, error);
console.warn(
`[PublicationTree] Failed to fetch event for e-tag ${tag[1]}:`,
error,
);
return null;
}
});
// Note: We can't await here since this is a synchronous method
// The e-tag resolution will happen when the children are processed
// For now, we'll add the e-tags as potential child addresses
const eTagAddresses = eTags.map(tag => tag[1]);
const eTagAddresses = eTags.map((tag) => tag[1]);
childAddresses.push(...eTagAddresses);
}
@ -868,11 +979,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -868,11 +979,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
};
// Add children asynchronously
const childPromises = childAddresses.map(address =>
const childPromises = childAddresses.map((address) =>
this.addEventByAddress(address, event)
);
Promise.all(childPromises).catch(error => {
console.warn(`[PublicationTree] Error adding children for ${address}:`, error);
Promise.all(childPromises).catch((error) => {
console.warn(
`[PublicationTree] Error adding children for ${address}:`,
error,
);
});
this.#nodeResolvedObservers.forEach((observer) => observer(address));
@ -881,10 +995,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> { @@ -881,10 +995,14 @@ export class PublicationTree implements AsyncIterable<NDKEvent | null> {
}
#getNodeType(event: NDKEvent): PublicationTreeNodeType {
if (event.kind === 30040 && (
event.tags.some((tag) => tag[0] === "a") ||
event.tags.some((tag) => tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1]))
)) {
if (
event.kind === 30040 && (
event.tags.some((tag) => tag[0] === "a") ||
event.tags.some((tag) =>
tag[0] === "e" && tag[1] && /^[0-9a-fA-F]{64}$/.test(tag[1])
)
)
) {
return PublicationTreeNodeType.Branch;
}

65
src/lib/data_structures/websocket_pool.ts

@ -42,7 +42,10 @@ export class WebSocketPool { @@ -42,7 +42,10 @@ export class WebSocketPool {
* @param maxConnections - The maximum number of simultaneous WebSocket connections. Defaults to
* 16.
*/
private constructor(idleTimeoutMs: number = 60000, maxConnections: number = 16) {
private constructor(
idleTimeoutMs: number = 60000,
maxConnections: number = 16,
) {
this.#idleTimeoutMs = idleTimeoutMs;
this.#maxConnections = maxConnections;
}
@ -71,15 +74,17 @@ export class WebSocketPool { @@ -71,15 +74,17 @@ export class WebSocketPool {
}
if (limit == null || isNaN(limit)) {
throw new Error('[WebSocketPool] Connection limit must be a number.');
throw new Error("[WebSocketPool] Connection limit must be a number.");
}
if (limit <= 0) {
throw new Error('[WebSocketPool] Connection limit must be greater than 0.');
throw new Error(
"[WebSocketPool] Connection limit must be greater than 0.",
);
}
if (!Number.isInteger(limit)) {
throw new Error('[WebSocketPool] Connection limit must be an integer.');
throw new Error("[WebSocketPool] Connection limit must be an integer.");
}
this.#maxConnections = limit;
@ -106,15 +111,15 @@ export class WebSocketPool { @@ -106,15 +111,15 @@ export class WebSocketPool {
}
if (timeoutMs == null || isNaN(timeoutMs)) {
throw new Error('[WebSocketPool] Idle timeout must be a number.');
throw new Error("[WebSocketPool] Idle timeout must be a number.");
}
if (timeoutMs <= 0) {
throw new Error('[WebSocketPool] Idle timeout must be greater than 0.');
throw new Error("[WebSocketPool] Idle timeout must be greater than 0.");
}
if (!Number.isInteger(timeoutMs)) {
throw new Error('[WebSocketPool] Idle timeout must be an integer.');
throw new Error("[WebSocketPool] Idle timeout must be an integer.");
}
this.#idleTimeoutMs = timeoutMs;
@ -151,9 +156,9 @@ export class WebSocketPool { @@ -151,9 +156,9 @@ export class WebSocketPool {
if (this.#pool.size >= this.#maxConnections) {
return new Promise((resolve, reject) => {
this.#waitingQueue.push({
url: normalizedUrl,
resolve: (handle) => resolve(handle.ws),
this.#waitingQueue.push({
url: normalizedUrl,
resolve: (handle) => resolve(handle.ws),
reject,
});
});
@ -163,7 +168,7 @@ export class WebSocketPool { @@ -163,7 +168,7 @@ export class WebSocketPool {
return newHandle.ws;
} catch (error) {
throw new Error(
`[WebSocketPool] Failed to acquire connection for ${normalizedUrl}: ${error}`
`[WebSocketPool] Failed to acquire connection for ${normalizedUrl}: ${error}`,
);
}
}
@ -179,7 +184,9 @@ export class WebSocketPool { @@ -179,7 +184,9 @@ export class WebSocketPool {
const normalizedUrl = this.#normalizeUrl(ws.url);
const handle = this.#pool.get(normalizedUrl);
if (!handle) {
throw new Error('[WebSocketPool] Attempted to release an unmanaged WebSocket connection.');
throw new Error(
"[WebSocketPool] Attempted to release an unmanaged WebSocket connection.",
);
}
if (--handle.refCount === 0) {
@ -191,8 +198,10 @@ export class WebSocketPool { @@ -191,8 +198,10 @@ export class WebSocketPool {
* Closes all WebSocket connections and "drains" the pool.
*/
public drain(): void {
console.debug(`[WebSocketPool] Draining pool with ${this.#pool.size} connections and ${this.#waitingQueue.length} waiting requests`);
console.debug(
`[WebSocketPool] Draining pool with ${this.#pool.size} connections and ${this.#waitingQueue.length} waiting requests`,
);
// Clear all idle timers first
for (const handle of this.#pool.values()) {
this.#clearIdleTimer(handle);
@ -200,7 +209,7 @@ export class WebSocketPool { @@ -200,7 +209,7 @@ export class WebSocketPool {
// Reject all waiting requests
for (const { reject } of this.#waitingQueue) {
reject(new Error('[WebSocketPool] Draining pool.'));
reject(new Error("[WebSocketPool] Draining pool."));
}
this.#waitingQueue = [];
@ -211,8 +220,8 @@ export class WebSocketPool { @@ -211,8 +220,8 @@ export class WebSocketPool {
}
}
this.#pool.clear();
console.debug('[WebSocketPool] Pool drained successfully');
console.debug("[WebSocketPool] Pool drained successfully");
}
// #endregion
@ -239,7 +248,9 @@ export class WebSocketPool { @@ -239,7 +248,9 @@ export class WebSocketPool {
this.#removeSocket(handle);
this.#processWaitingQueue();
reject(
new Error(`[WebSocketPool] WebSocket connection failed for ${url}: ${event.type}`)
new Error(
`[WebSocketPool] WebSocket connection failed for ${url}: ${event.type}`,
),
);
};
} catch (error) {
@ -251,7 +262,7 @@ export class WebSocketPool { @@ -251,7 +262,7 @@ export class WebSocketPool {
#removeSocket(handle: WebSocketHandle): void {
this.#clearIdleTimer(handle);
// Clean up event listeners to prevent memory leaks
// AI-NOTE: Code that checks out connections should clean up its own listener callbacks before
// releasing the connection to the pool.
@ -261,11 +272,13 @@ export class WebSocketPool { @@ -261,11 +272,13 @@ export class WebSocketPool {
handle.ws.onclose = null;
handle.ws.onmessage = null;
}
const url = this.#normalizeUrl(handle.ws.url);
this.#pool.delete(url);
console.debug(`[WebSocketPool] Removed socket for ${url}, pool size: ${this.#pool.size}`);
console.debug(
`[WebSocketPool] Removed socket for ${url}, pool size: ${this.#pool.size}`,
);
this.#processWaitingQueue();
}
@ -283,7 +296,9 @@ export class WebSocketPool { @@ -283,7 +296,9 @@ export class WebSocketPool {
handle.idleTimer = setTimeout(() => {
const refCount = handle.refCount;
if (refCount === 0 && handle.ws.readyState === WebSocket.OPEN) {
console.debug(`[WebSocketPool] Closing idle connection to ${handle.ws.url}`);
console.debug(
`[WebSocketPool] Closing idle connection to ${handle.ws.url}`,
);
handle.ws.close();
this.#removeSocket(handle);
}
@ -331,7 +346,7 @@ export class WebSocketPool { @@ -331,7 +346,7 @@ export class WebSocketPool {
#checkOut(handle: WebSocketHandle): void {
if (handle.refCount == null) {
throw new Error('[WebSocketPool] Handle refCount unexpectedly null.');
throw new Error("[WebSocketPool] Handle refCount unexpectedly null.");
}
++handle.refCount;
@ -346,10 +361,10 @@ export class WebSocketPool { @@ -346,10 +361,10 @@ export class WebSocketPool {
// The logic to remove a trailing slash for connection coalescing can be kept,
// but should be done on the normalized string.
if (urlObj.pathname !== '/' && normalized.endsWith('/')) {
if (urlObj.pathname !== "/" && normalized.endsWith("/")) {
normalized = normalized.slice(0, -1);
}
return normalized;
} catch {
// If URL is invalid, return it as-is and let WebSocket constructor handle the error.

2
src/lib/navigator/EventNetwork/types.ts

@ -53,7 +53,7 @@ export interface NetworkNode extends SimulationNodeDatum { @@ -53,7 +53,7 @@ export interface NetworkNode extends SimulationNodeDatum {
tagType?: string; // Type of tag (t, p, e, etc.)
tagValue?: string; // The tag value
connectedNodes?: string[]; // IDs of nodes that have this tag
// Person anchor specific fields
isPersonAnchor?: boolean; // Whether this is a person anchor node
pubkey?: string; // The person's public key

2
src/lib/navigator/EventNetwork/utils/common.ts

@ -38,4 +38,4 @@ export function createDebugFunction(prefix: string) { @@ -38,4 +38,4 @@ export function createDebugFunction(prefix: string) {
console.log(`[${prefix}]`, ...args);
}
};
}
}

325
src/lib/navigator/EventNetwork/utils/forceSimulation.ts

@ -1,11 +1,11 @@ @@ -1,11 +1,11 @@
/**
* D3 Force Simulation Utilities
*
*
* This module provides utilities for creating and managing D3 force-directed
* graph simulations for the event network visualization.
*/
import type { NetworkNode, NetworkLink } from "../types";
import type { NetworkLink, NetworkNode } from "../types";
import * as d3 from "d3";
import { createDebugFunction } from "./common";
@ -21,18 +21,18 @@ const debug = createDebugFunction("ForceSimulation"); @@ -21,18 +21,18 @@ const debug = createDebugFunction("ForceSimulation");
* Provides type safety for simulation operations
*/
export interface Simulation<NodeType, LinkType> {
nodes(): NodeType[];
nodes(nodes: NodeType[]): this;
alpha(): number;
alpha(alpha: number): this;
alphaTarget(): number;
alphaTarget(target: number): this;
restart(): this;
stop(): this;
tick(): this;
on(type: string, listener: (this: this) => void): this;
force(name: string): any;
force(name: string, force: any): this;
nodes(): NodeType[];
nodes(nodes: NodeType[]): this;
alpha(): number;
alpha(alpha: number): this;
alphaTarget(): number;
alphaTarget(target: number): this;
restart(): this;
stop(): this;
tick(): this;
on(type: string, listener: (this: this) => void): this;
force(name: string): any;
force(name: string, force: any): this;
}
/**
@ -40,175 +40,192 @@ export interface Simulation<NodeType, LinkType> { @@ -40,175 +40,192 @@ export interface Simulation<NodeType, LinkType> {
* Provides type safety for drag operations
*/
export interface D3DragEvent<GElement extends Element, Datum, Subject> {
active: number;
sourceEvent: any;
subject: Subject;
x: number;
y: number;
dx: number;
dy: number;
identifier: string | number;
active: number;
sourceEvent: any;
subject: Subject;
x: number;
y: number;
dx: number;
dy: number;
identifier: string | number;
}
/**
* Updates a node's velocity by applying a force
*
*
* @param node - The node to update
* @param deltaVx - Change in x velocity
* @param deltaVy - Change in y velocity
*/
export function updateNodeVelocity(
node: NetworkNode,
deltaVx: number,
deltaVy: number
node: NetworkNode,
deltaVx: number,
deltaVy: number,
) {
debug("Updating node velocity", {
nodeId: node.id,
currentVx: node.vx,
currentVy: node.vy,
deltaVx,
deltaVy
});
if (typeof node.vx === "number" && typeof node.vy === "number") {
node.vx = node.vx - deltaVx;
node.vy = node.vy - deltaVy;
debug("New velocity", { nodeId: node.id, vx: node.vx, vy: node.vy });
} else {
debug("Node velocity not defined", { nodeId: node.id });
}
debug("Updating node velocity", {
nodeId: node.id,
currentVx: node.vx,
currentVy: node.vy,
deltaVx,
deltaVy,
});
if (typeof node.vx === "number" && typeof node.vy === "number") {
node.vx = node.vx - deltaVx;
node.vy = node.vy - deltaVy;
debug("New velocity", { nodeId: node.id, vx: node.vx, vy: node.vy });
} else {
debug("Node velocity not defined", { nodeId: node.id });
}
}
/**
* Applies a logarithmic gravity force pulling the node toward the center
*
*
* The logarithmic scale ensures that nodes far from the center experience
* stronger gravity, preventing them from drifting too far away.
*
*
* @param node - The node to apply gravity to
* @param centerX - X coordinate of the center
* @param centerY - Y coordinate of the center
* @param alpha - Current simulation alpha (cooling factor)
*/
export function applyGlobalLogGravity(
node: NetworkNode,
centerX: number,
centerY: number,
alpha: number,
node: NetworkNode,
centerX: number,
centerY: number,
alpha: number,
) {
// Tag anchors and person anchors should not be affected by gravity
if (node.isTagAnchor || node.isPersonAnchor) return;
const dx = (node.x ?? 0) - centerX;
const dy = (node.y ?? 0) - centerY;
const distance = Math.sqrt(dx * dx + dy * dy);
// Tag anchors and person anchors should not be affected by gravity
if (node.isTagAnchor || node.isPersonAnchor) return;
const dx = (node.x ?? 0) - centerX;
const dy = (node.y ?? 0) - centerY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (distance === 0) return;
if (distance === 0) return;
const force = Math.log(distance + 1) * GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
const force = Math.log(distance + 1) * GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
}
/**
* Applies gravity between connected nodes
*
*
* This creates a cohesive force that pulls connected nodes toward their
* collective center of gravity, creating more meaningful clusters.
*
*
* @param node - The node to apply connected gravity to
* @param links - All links in the network
* @param alpha - Current simulation alpha (cooling factor)
*/
export function applyConnectedGravity(
node: NetworkNode,
links: NetworkLink[],
alpha: number,
node: NetworkNode,
links: NetworkLink[],
alpha: number,
) {
// Tag anchors and person anchors should not be affected by connected gravity
if (node.isTagAnchor || node.isPersonAnchor) return;
// Find all nodes connected to this node (excluding tag anchors and person anchors)
const connectedNodes = links
.filter(link => link.source.id === node.id || link.target.id === node.id)
.map(link => link.source.id === node.id ? link.target : link.source)
.filter(n => !n.isTagAnchor && !n.isPersonAnchor);
// Tag anchors and person anchors should not be affected by connected gravity
if (node.isTagAnchor || node.isPersonAnchor) return;
if (connectedNodes.length === 0) return;
// Find all nodes connected to this node (excluding tag anchors and person anchors)
const connectedNodes = links
.filter((link) => link.source.id === node.id || link.target.id === node.id)
.map((link) => link.source.id === node.id ? link.target : link.source)
.filter((n) => !n.isTagAnchor && !n.isPersonAnchor);
// Calculate center of gravity of connected nodes
const cogX = d3.mean(connectedNodes, (n: NetworkNode) => n.x);
const cogY = d3.mean(connectedNodes, (n: NetworkNode) => n.y);
if (connectedNodes.length === 0) return;
if (cogX === undefined || cogY === undefined) return;
// Calculate center of gravity of connected nodes
const cogX = d3.mean(connectedNodes, (n: NetworkNode) => n.x);
const cogY = d3.mean(connectedNodes, (n: NetworkNode) => n.y);
// Calculate force direction and magnitude
const dx = (node.x ?? 0) - cogX;
const dy = (node.y ?? 0) - cogY;
const distance = Math.sqrt(dx * dx + dy * dy);
if (cogX === undefined || cogY === undefined) return;
if (distance === 0) return;
// Calculate force direction and magnitude
const dx = (node.x ?? 0) - cogX;
const dy = (node.y ?? 0) - cogY;
const distance = Math.sqrt(dx * dx + dy * dy);
// Apply force proportional to distance
const force = distance * CONNECTED_GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
if (distance === 0) return;
// Apply force proportional to distance
const force = distance * CONNECTED_GRAVITY_STRENGTH * alpha;
updateNodeVelocity(node, (dx / distance) * force, (dy / distance) * force);
}
/**
* Sets up drag behavior for nodes
*
*
* This enables interactive dragging of nodes in the visualization.
*
*
* @param simulation - The D3 force simulation
* @param warmupClickEnergy - Alpha target when dragging starts (0-1)
* @returns D3 drag behavior configured for the simulation
*/
export function setupDragHandlers(
simulation: Simulation<NetworkNode, NetworkLink>,
warmupClickEnergy: number = 0.9
simulation: Simulation<NetworkNode, NetworkLink>,
warmupClickEnergy: number = 0.9,
) {
return d3
.drag()
.on("start", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => {
// Tag anchors and person anchors retain their anchor behavior
if (d.isTagAnchor || d.isPersonAnchor) {
// Still allow dragging but maintain anchor status
d.fx = d.x;
d.fy = d.y;
return;
}
// Warm up simulation if it's cooled down
if (!event.active) {
simulation.alphaTarget(warmupClickEnergy).restart();
}
// Fix node position at current location
d.fx = d.x;
d.fy = d.y;
})
.on("drag", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => {
// Update position for all nodes including anchors
// Update fixed position to mouse position
d.fx = event.x;
d.fy = event.y;
})
.on("end", (event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>, d: NetworkNode) => {
// Cool down simulation when drag ends
if (!event.active) {
simulation.alphaTarget(0);
}
// Keep all nodes fixed after dragging
// This allows users to manually position any node type
d.fx = d.x;
d.fy = d.y;
});
return d3
.drag()
.on(
"start",
(
event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
d: NetworkNode,
) => {
// Tag anchors and person anchors retain their anchor behavior
if (d.isTagAnchor || d.isPersonAnchor) {
// Still allow dragging but maintain anchor status
d.fx = d.x;
d.fy = d.y;
return;
}
// Warm up simulation if it's cooled down
if (!event.active) {
simulation.alphaTarget(warmupClickEnergy).restart();
}
// Fix node position at current location
d.fx = d.x;
d.fy = d.y;
},
)
.on(
"drag",
(
event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
d: NetworkNode,
) => {
// Update position for all nodes including anchors
// Update fixed position to mouse position
d.fx = event.x;
d.fy = event.y;
},
)
.on(
"end",
(
event: D3DragEvent<SVGGElement, NetworkNode, NetworkNode>,
d: NetworkNode,
) => {
// Cool down simulation when drag ends
if (!event.active) {
simulation.alphaTarget(0);
}
// Keep all nodes fixed after dragging
// This allows users to manually position any node type
d.fx = d.x;
d.fy = d.y;
},
);
}
/**
* Creates a D3 force simulation for the network
*
*
* @param nodes - Array of network nodes
* @param links - Array of network links
* @param nodeRadius - Radius of node circles
@ -216,34 +233,34 @@ export function setupDragHandlers( @@ -216,34 +233,34 @@ export function setupDragHandlers(
* @returns Configured D3 force simulation
*/
export function createSimulation(
nodes: NetworkNode[],
links: NetworkLink[],
nodeRadius: number,
linkDistance: number
nodes: NetworkNode[],
links: NetworkLink[],
nodeRadius: number,
linkDistance: number,
): Simulation<NetworkNode, NetworkLink> {
debug("Creating simulation", {
nodeCount: nodes.length,
linkCount: links.length,
nodeRadius,
linkDistance
});
try {
// Create the simulation with nodes
const simulation = d3
.forceSimulation(nodes)
.force(
"link",
d3.forceLink(links)
.id((d: NetworkNode) => d.id)
.distance(linkDistance * 0.1)
)
.force("collide", d3.forceCollide().radius(nodeRadius * 4));
debug("Simulation created successfully");
return simulation;
} catch (error) {
console.error("Error creating simulation:", error);
throw error;
}
debug("Creating simulation", {
nodeCount: nodes.length,
linkCount: links.length,
nodeRadius,
linkDistance,
});
try {
// Create the simulation with nodes
const simulation = d3
.forceSimulation(nodes)
.force(
"link",
d3.forceLink(links)
.id((d: NetworkNode) => d.id)
.distance(linkDistance * 0.1),
)
.force("collide", d3.forceCollide().radius(nodeRadius * 4));
debug("Simulation created successfully");
return simulation;
} catch (error) {
console.error("Error creating simulation:", error);
throw error;
}
}

439
src/lib/navigator/EventNetwork/utils/networkBuilder.ts

@ -1,16 +1,16 @@ @@ -1,16 +1,16 @@
/**
* Network Builder Utilities
*
*
* This module provides utilities for building a network graph from Nostr events.
* It handles the creation of nodes and links, and the processing of event relationships.
*/
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData, GraphState } from "../types";
import type { GraphData, GraphState, NetworkLink, NetworkNode } from "../types";
import { nip19 } from "nostr-tools";
import { communityRelays } from "$lib/consts";
import { getMatchingTags } from '$lib/utils/nostrUtils';
import { getDisplayNameSync } from '$lib/utils/profileCache';
import { getMatchingTags } from "$lib/utils/nostrUtils";
import { getDisplayNameSync } from "$lib/utils/profileCache";
import { createDebugFunction } from "./common";
// Configuration
@ -22,165 +22,173 @@ const debug = createDebugFunction("NetworkBuilder"); @@ -22,165 +22,173 @@ const debug = createDebugFunction("NetworkBuilder");
/**
* Creates a NetworkNode from an NDKEvent
*
*
* Extracts relevant information from the event and creates a node representation
* for the visualization.
*
*
* @param event - The Nostr event to convert to a node
* @param level - The hierarchy level of the node (default: 0)
* @returns A NetworkNode object representing the event
*/
export function createNetworkNode(
event: NDKEvent,
level: number = 0
event: NDKEvent,
level: number = 0,
): NetworkNode {
debug("Creating network node", { eventId: event.id, kind: event.kind, level });
const isContainer = event.kind === INDEX_EVENT_KIND;
const nodeType = isContainer ? "Index" : event.kind === CONTENT_EVENT_KIND || event.kind === 30818 ? "Content" : `Kind ${event.kind}`;
debug("Creating network node", {
eventId: event.id,
kind: event.kind,
level,
});
// Create the base node with essential properties
const node: NetworkNode = {
const isContainer = event.kind === INDEX_EVENT_KIND;
const nodeType = isContainer
? "Index"
: event.kind === CONTENT_EVENT_KIND || event.kind === 30818
? "Content"
: `Kind ${event.kind}`;
// Create the base node with essential properties
const node: NetworkNode = {
id: event.id,
event,
isContainer,
level,
title: event.getMatchingTags("title")?.[0]?.[1] || "Untitled",
content: event.content || "",
author: event.pubkey ? getDisplayNameSync(event.pubkey) : "",
kind: event.kind !== undefined ? event.kind : CONTENT_EVENT_KIND, // Default to content event kind only if truly undefined
type: nodeType as "Index" | "Content" | "TagAnchor",
};
// Add NIP-19 identifiers if possible
if (event.kind && event.pubkey) {
try {
const dTag = event.getMatchingTags("d")?.[0]?.[1] || "";
// Create naddr (NIP-19 address) for the event
node.naddr = nip19.naddrEncode({
pubkey: event.pubkey,
identifier: dTag,
kind: event.kind,
relays: communityRelays,
});
// Create nevent (NIP-19 event reference) for the event
node.nevent = nip19.neventEncode({
id: event.id,
event,
isContainer,
level,
title: event.getMatchingTags("title")?.[0]?.[1] || "Untitled",
content: event.content || "",
author: event.pubkey ? getDisplayNameSync(event.pubkey) : "",
kind: event.kind !== undefined ? event.kind : CONTENT_EVENT_KIND, // Default to content event kind only if truly undefined
type: nodeType as "Index" | "Content" | "TagAnchor",
};
// Add NIP-19 identifiers if possible
if (event.kind && event.pubkey) {
try {
const dTag = event.getMatchingTags("d")?.[0]?.[1] || "";
// Create naddr (NIP-19 address) for the event
node.naddr = nip19.naddrEncode({
pubkey: event.pubkey,
identifier: dTag,
kind: event.kind,
relays: communityRelays,
});
// Create nevent (NIP-19 event reference) for the event
node.nevent = nip19.neventEncode({
id: event.id,
relays: communityRelays,
kind: event.kind,
});
} catch (error) {
console.warn("Failed to generate identifiers for node:", error);
}
relays: communityRelays,
kind: event.kind,
});
} catch (error) {
console.warn("Failed to generate identifiers for node:", error);
}
}
return node;
return node;
}
/**
* Creates a map of event IDs to events for quick lookup
*
*
* @param events - Array of Nostr events
* @returns Map of event IDs to events
*/
export function createEventMap(events: NDKEvent[]): Map<string, NDKEvent> {
debug("Creating event map", { eventCount: events.length });
const eventMap = new Map<string, NDKEvent>();
events.forEach((event) => {
if (event.id) {
eventMap.set(event.id, event);
}
});
debug("Event map created", { mapSize: eventMap.size });
return eventMap;
debug("Creating event map", { eventCount: events.length });
const eventMap = new Map<string, NDKEvent>();
events.forEach((event) => {
if (event.id) {
eventMap.set(event.id, event);
}
});
debug("Event map created", { mapSize: eventMap.size });
return eventMap;
}
/**
* Extracts an event ID from an 'a' tag
*
*
* @param tag - The tag array from a Nostr event
* @returns The event ID or null if not found
*/
export function extractEventIdFromATag(tag: string[]): string | null {
return tag[3] || null;
return tag[3] || null;
}
/**
* Generates a deterministic color for an event based on its ID
*
*
* This creates visually distinct colors for different index events
* while ensuring the same event always gets the same color.
*
*
* @param eventId - The event ID to generate a color for
* @returns An HSL color string
*/
export function getEventColor(eventId: string): string {
// Use first 4 characters of event ID as a hex number
const num = parseInt(eventId.slice(0, 4), 16);
// Convert to a hue value (0-359)
const hue = num % 360;
// Use fixed saturation and lightness for pastel colors
const saturation = 70;
const lightness = 75;
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
// Use first 4 characters of event ID as a hex number
const num = parseInt(eventId.slice(0, 4), 16);
// Convert to a hue value (0-359)
const hue = num % 360;
// Use fixed saturation and lightness for pastel colors
const saturation = 70;
const lightness = 75;
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
}
/**
* Initializes the graph state from a set of events
*
*
* Creates nodes for all events and identifies referenced events.
*
*
* @param events - Array of Nostr events
* @returns Initial graph state
*/
export function initializeGraphState(events: NDKEvent[]): GraphState {
debug("Initializing graph state", { eventCount: events.length });
const nodeMap = new Map<string, NetworkNode>();
const eventMap = createEventMap(events);
// Create initial nodes for all events
events.forEach((event) => {
if (!event.id) return;
const node = createNetworkNode(event);
nodeMap.set(event.id, node);
debug("Initializing graph state", { eventCount: events.length });
const nodeMap = new Map<string, NetworkNode>();
const eventMap = createEventMap(events);
// Create initial nodes for all events
events.forEach((event) => {
if (!event.id) return;
const node = createNetworkNode(event);
nodeMap.set(event.id, node);
});
debug("Node map created", { nodeCount: nodeMap.size });
// Build set of referenced event IDs to identify root events
const referencedIds = new Set<string>();
events.forEach((event) => {
const aTags = getMatchingTags(event, "a");
debug("Processing a-tags for event", {
eventId: event.id,
aTagCount: aTags.length,
});
debug("Node map created", { nodeCount: nodeMap.size });
// Build set of referenced event IDs to identify root events
const referencedIds = new Set<string>();
events.forEach((event) => {
const aTags = getMatchingTags(event, "a");
debug("Processing a-tags for event", {
eventId: event.id,
aTagCount: aTags.length
});
aTags.forEach((tag) => {
const id = extractEventIdFromATag(tag);
if (id) referencedIds.add(id);
});
aTags.forEach((tag) => {
const id = extractEventIdFromATag(tag);
if (id) referencedIds.add(id);
});
debug("Referenced IDs set created", { referencedCount: referencedIds.size });
return {
nodeMap,
links: [],
eventMap,
referencedIds,
};
});
debug("Referenced IDs set created", { referencedCount: referencedIds.size });
return {
nodeMap,
links: [],
eventMap,
referencedIds,
};
}
/**
* Processes a sequence of nodes referenced by an index event
*
*
* Creates links between the index and its content, and between sequential content nodes.
* Also processes nested indices recursively up to the maximum level.
*
*
* @param sequence - Array of nodes in the sequence
* @param indexEvent - The index event referencing the sequence
* @param level - Current hierarchy level
@ -188,156 +196,157 @@ export function initializeGraphState(events: NDKEvent[]): GraphState { @@ -188,156 +196,157 @@ export function initializeGraphState(events: NDKEvent[]): GraphState {
* @param maxLevel - Maximum hierarchy level to process
*/
export function processSequence(
sequence: NetworkNode[],
indexEvent: NDKEvent,
level: number,
state: GraphState,
maxLevel: number,
sequence: NetworkNode[],
indexEvent: NDKEvent,
level: number,
state: GraphState,
maxLevel: number,
): void {
// Stop if we've reached max level or have no nodes
if (level >= maxLevel || sequence.length === 0) return;
// Stop if we've reached max level or have no nodes
if (level >= maxLevel || sequence.length === 0) return;
// Set levels for all nodes in the sequence
sequence.forEach((node) => {
node.level = level + 1;
});
// Set levels for all nodes in the sequence
sequence.forEach((node) => {
node.level = level + 1;
// Create link from index to first content node
const indexNode = state.nodeMap.get(indexEvent.id);
if (indexNode && sequence[0]) {
state.links.push({
source: indexNode,
target: sequence[0],
isSequential: true,
});
}
// Create link from index to first content node
const indexNode = state.nodeMap.get(indexEvent.id);
if (indexNode && sequence[0]) {
state.links.push({
source: indexNode,
target: sequence[0],
isSequential: true,
});
}
// Create sequential links between content nodes
for (let i = 0; i < sequence.length - 1; i++) {
const currentNode = sequence[i];
const nextNode = sequence[i + 1];
// Create sequential links between content nodes
for (let i = 0; i < sequence.length - 1; i++) {
const currentNode = sequence[i];
const nextNode = sequence[i + 1];
state.links.push({
source: currentNode,
target: nextNode,
isSequential: true,
});
// Process nested indices recursively
if (currentNode.isContainer) {
processNestedIndex(currentNode, level + 1, state, maxLevel);
}
}
state.links.push({
source: currentNode,
target: nextNode,
isSequential: true,
});
// Process the last node if it's an index
const lastNode = sequence[sequence.length - 1];
if (lastNode?.isContainer) {
processNestedIndex(lastNode, level + 1, state, maxLevel);
// Process nested indices recursively
if (currentNode.isContainer) {
processNestedIndex(currentNode, level + 1, state, maxLevel);
}
}
// Process the last node if it's an index
const lastNode = sequence[sequence.length - 1];
if (lastNode?.isContainer) {
processNestedIndex(lastNode, level + 1, state, maxLevel);
}
}
/**
* Processes a nested index node
*
*
* @param node - The index node to process
* @param level - Current hierarchy level
* @param state - Current graph state
* @param maxLevel - Maximum hierarchy level to process
*/
export function processNestedIndex(
node: NetworkNode,
level: number,
state: GraphState,
maxLevel: number,
node: NetworkNode,
level: number,
state: GraphState,
maxLevel: number,
): void {
if (!node.isContainer || level >= maxLevel) return;
if (!node.isContainer || level >= maxLevel) return;
const nestedEvent = state.eventMap.get(node.id);
if (nestedEvent) {
processIndexEvent(nestedEvent, level, state, maxLevel);
}
const nestedEvent = state.eventMap.get(node.id);
if (nestedEvent) {
processIndexEvent(nestedEvent, level, state, maxLevel);
}
}
/**
* Processes an index event and its referenced content
*
*
* @param indexEvent - The index event to process
* @param level - Current hierarchy level
* @param state - Current graph state
* @param maxLevel - Maximum hierarchy level to process
*/
export function processIndexEvent(
indexEvent: NDKEvent,
level: number,
state: GraphState,
maxLevel: number,
indexEvent: NDKEvent,
level: number,
state: GraphState,
maxLevel: number,
): void {
if (level >= maxLevel) return;
if (level >= maxLevel) return;
// Extract the sequence of nodes referenced by this index
const sequence = getMatchingTags(indexEvent, "a")
.map((tag) => extractEventIdFromATag(tag))
.filter((id): id is string => id !== null)
.map((id) => state.nodeMap.get(id))
.filter((node): node is NetworkNode => node !== undefined);
// Extract the sequence of nodes referenced by this index
const sequence = getMatchingTags(indexEvent, "a")
.map((tag) => extractEventIdFromATag(tag))
.filter((id): id is string => id !== null)
.map((id) => state.nodeMap.get(id))
.filter((node): node is NetworkNode => node !== undefined);
processSequence(sequence, indexEvent, level, state, maxLevel);
processSequence(sequence, indexEvent, level, state, maxLevel);
}
/**
* Generates a complete graph from a set of events
*
*
* This is the main entry point for building the network visualization.
*
*
* @param events - Array of Nostr events
* @param maxLevel - Maximum hierarchy level to process
* @returns Complete graph data for visualization
*/
export function generateGraph(
events: NDKEvent[],
maxLevel: number
events: NDKEvent[],
maxLevel: number,
): GraphData {
debug("Generating graph", { eventCount: events.length, maxLevel });
// Initialize the graph state
const state = initializeGraphState(events);
// Find root events (index events not referenced by others, and all non-publication events)
const publicationKinds = [30040, 30041, 30818];
const rootEvents = events.filter(
(e) => e.id && (
// Index events not referenced by others
(e.kind === INDEX_EVENT_KIND && !state.referencedIds.has(e.id)) ||
// All non-publication events are treated as roots
(e.kind !== undefined && !publicationKinds.includes(e.kind))
)
);
debug("Found root events", {
rootCount: rootEvents.length,
rootIds: rootEvents.map(e => e.id)
});
// Process each root event
rootEvents.forEach((rootEvent) => {
debug("Processing root event", {
rootId: rootEvent.id,
kind: rootEvent.kind,
aTags: getMatchingTags(rootEvent, "a").length
});
processIndexEvent(rootEvent, 0, state, maxLevel);
});
debug("Generating graph", { eventCount: events.length, maxLevel });
// Create the final graph data
const result = {
nodes: Array.from(state.nodeMap.values()),
links: state.links,
};
debug("Graph generation complete", {
nodeCount: result.nodes.length,
linkCount: result.links.length
// Initialize the graph state
const state = initializeGraphState(events);
// Find root events (index events not referenced by others, and all non-publication events)
const publicationKinds = [30040, 30041, 30818];
const rootEvents = events.filter(
(e) =>
e.id && (
// Index events not referenced by others
(e.kind === INDEX_EVENT_KIND && !state.referencedIds.has(e.id)) ||
// All non-publication events are treated as roots
(e.kind !== undefined && !publicationKinds.includes(e.kind))
),
);
debug("Found root events", {
rootCount: rootEvents.length,
rootIds: rootEvents.map((e) => e.id),
});
// Process each root event
rootEvents.forEach((rootEvent) => {
debug("Processing root event", {
rootId: rootEvent.id,
kind: rootEvent.kind,
aTags: getMatchingTags(rootEvent, "a").length,
});
return result;
processIndexEvent(rootEvent, 0, state, maxLevel);
});
// Create the final graph data
const result = {
nodes: Array.from(state.nodeMap.values()),
links: state.links,
};
debug("Graph generation complete", {
nodeCount: result.nodes.length,
linkCount: result.links.length,
});
return result;
}

100
src/lib/navigator/EventNetwork/utils/personNetworkBuilder.ts

@ -5,9 +5,9 @@ @@ -5,9 +5,9 @@
*/
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink } from "../types";
import type { NetworkLink, NetworkNode } from "../types";
import { getDisplayNameSync } from "$lib/utils/profileCache";
import { SeededRandom, createDebugFunction } from "./common";
import { createDebugFunction, SeededRandom } from "./common";
const PERSON_ANCHOR_RADIUS = 15;
const PERSON_ANCHOR_PLACEMENT_RADIUS = 1000;
@ -16,7 +16,6 @@ const MAX_PERSON_NODES = 20; // Default limit for person nodes @@ -16,7 +16,6 @@ const MAX_PERSON_NODES = 20; // Default limit for person nodes
// Debug function
const debug = createDebugFunction("PersonNetworkBuilder");
/**
* Creates a deterministic seed from a string
*/
@ -42,13 +41,16 @@ export interface PersonConnection { @@ -42,13 +41,16 @@ export interface PersonConnection {
*/
export function extractUniquePersons(
events: NDKEvent[],
followListEvents?: NDKEvent[]
followListEvents?: NDKEvent[],
): Map<string, PersonConnection> {
// Map of pubkey -> PersonConnection
const personMap = new Map<string, PersonConnection>();
debug("Extracting unique persons", { eventCount: events.length, followListCount: followListEvents?.length || 0 });
debug("Extracting unique persons", {
eventCount: events.length,
followListCount: followListEvents?.length || 0,
});
// First collect pubkeys from follow list events
const followListPubkeys = new Set<string>();
if (followListEvents && followListEvents.length > 0) {
@ -60,10 +62,10 @@ export function extractUniquePersons( @@ -60,10 +62,10 @@ export function extractUniquePersons(
// People in follow lists (p tags)
if (event.tags) {
event.tags
.filter(tag => {
tag[0] === 'p'
.filter((tag) => {
tag[0] === "p";
})
.forEach(tag => {
.forEach((tag) => {
followListPubkeys.add(tag[1]);
});
}
@ -79,7 +81,7 @@ export function extractUniquePersons( @@ -79,7 +81,7 @@ export function extractUniquePersons(
personMap.set(event.pubkey, {
signedByEventIds: new Set(),
referencedInEventIds: new Set(),
isFromFollowList: followListPubkeys.has(event.pubkey)
isFromFollowList: followListPubkeys.has(event.pubkey),
});
}
personMap.get(event.pubkey)!.signedByEventIds.add(event.id);
@ -87,14 +89,14 @@ export function extractUniquePersons( @@ -87,14 +89,14 @@ export function extractUniquePersons(
// Track referenced connections from "p" tags
if (event.tags) {
event.tags.forEach(tag => {
event.tags.forEach((tag) => {
if (tag[0] === "p" && tag[1]) {
const referencedPubkey = tag[1];
if (!personMap.has(referencedPubkey)) {
personMap.set(referencedPubkey, {
signedByEventIds: new Set(),
referencedInEventIds: new Set(),
isFromFollowList: followListPubkeys.has(referencedPubkey)
isFromFollowList: followListPubkeys.has(referencedPubkey),
});
}
personMap.get(referencedPubkey)!.referencedInEventIds.add(event.id);
@ -102,7 +104,7 @@ export function extractUniquePersons( @@ -102,7 +104,7 @@ export function extractUniquePersons(
});
}
});
debug("Extracted persons", { personCount: personMap.size });
return personMap;
@ -115,7 +117,7 @@ function buildEligiblePerson( @@ -115,7 +117,7 @@ function buildEligiblePerson(
pubkey: string,
connection: PersonConnection,
showSignedBy: boolean,
showReferenced: boolean
showReferenced: boolean,
): {
pubkey: string;
connection: PersonConnection;
@ -125,11 +127,11 @@ function buildEligiblePerson( @@ -125,11 +127,11 @@ function buildEligiblePerson(
const connectedEventIds = new Set<string>();
if (showSignedBy) {
connection.signedByEventIds.forEach(id => connectedEventIds.add(id));
connection.signedByEventIds.forEach((id) => connectedEventIds.add(id));
}
if (showReferenced) {
connection.referencedInEventIds.forEach(id => connectedEventIds.add(id));
connection.referencedInEventIds.forEach((id) => connectedEventIds.add(id));
}
if (connectedEventIds.size === 0) {
@ -140,7 +142,7 @@ function buildEligiblePerson( @@ -140,7 +142,7 @@ function buildEligiblePerson(
pubkey,
connection,
connectedEventIds,
totalConnections: connectedEventIds.size
totalConnections: connectedEventIds.size,
};
}
@ -155,7 +157,7 @@ function getEligiblePersons( @@ -155,7 +157,7 @@ function getEligiblePersons(
personMap: Map<string, PersonConnection>,
showSignedBy: boolean,
showReferenced: boolean,
limit: number
limit: number,
): EligiblePerson[] {
// Build eligible persons and keep only top N using a min-heap or partial sort
const eligible: EligiblePerson[] = [];
@ -163,16 +165,20 @@ function getEligiblePersons( @@ -163,16 +165,20 @@ function getEligiblePersons(
for (const [pubkey, connection] of personMap) {
let totalConnections = 0;
if (showSignedBy) totalConnections += connection.signedByEventIds.size;
if (showReferenced) totalConnections += connection.referencedInEventIds.size;
if (showReferenced) {
totalConnections += connection.referencedInEventIds.size;
}
if (totalConnections === 0) continue;
// Only build the set if this person is eligible
const connectedEventIds = new Set<string>();
if (showSignedBy) {
connection.signedByEventIds.forEach(id => connectedEventIds.add(id));
connection.signedByEventIds.forEach((id) => connectedEventIds.add(id));
}
if (showReferenced) {
connection.referencedInEventIds.forEach(id => connectedEventIds.add(id));
connection.referencedInEventIds.forEach((id) =>
connectedEventIds.add(id)
);
}
eligible.push({ pubkey, connection, totalConnections, connectedEventIds });
@ -192,22 +198,27 @@ export function createPersonAnchorNodes( @@ -192,22 +198,27 @@ export function createPersonAnchorNodes(
height: number,
showSignedBy: boolean,
showReferenced: boolean,
limit: number = MAX_PERSON_NODES
): { nodes: NetworkNode[], totalCount: number } {
limit: number = MAX_PERSON_NODES,
): { nodes: NetworkNode[]; totalCount: number } {
const anchorNodes: NetworkNode[] = [];
const centerX = width / 2;
const centerY = height / 2;
// Calculate eligible persons and their connection counts
const eligiblePersons = getEligiblePersons(personMap, showSignedBy, showReferenced, limit);
const eligiblePersons = getEligiblePersons(
personMap,
showSignedBy,
showReferenced,
limit,
);
// Create nodes for the limited set
debug("Creating person anchor nodes", {
eligibleCount: eligiblePersons.length,
debug("Creating person anchor nodes", {
eligibleCount: eligiblePersons.length,
limitedCount: eligiblePersons.length,
showSignedBy,
showReferenced
showReferenced,
});
eligiblePersons.forEach(({ pubkey, connection, connectedEventIds }) => {
@ -226,7 +237,8 @@ export function createPersonAnchorNodes( @@ -226,7 +237,8 @@ export function createPersonAnchorNodes(
const anchorNode: NetworkNode = {
id: `person-anchor-${pubkey}`,
title: displayName,
content: `${connection.signedByEventIds.size} signed, ${connection.referencedInEventIds.size} referenced`,
content:
`${connection.signedByEventIds.size} signed, ${connection.referencedInEventIds.size} referenced`,
author: "",
kind: 0, // Special kind for anchors
type: "PersonAnchor",
@ -245,11 +257,14 @@ export function createPersonAnchorNodes( @@ -245,11 +257,14 @@ export function createPersonAnchorNodes(
anchorNodes.push(anchorNode);
});
debug("Created person anchor nodes", { count: anchorNodes.length, totalEligible: eligiblePersons.length });
debug("Created person anchor nodes", {
count: anchorNodes.length,
totalEligible: eligiblePersons.length,
});
return {
nodes: anchorNodes,
totalCount: eligiblePersons.length
totalCount: eligiblePersons.length,
};
}
@ -264,10 +279,13 @@ export interface PersonLink extends NetworkLink { @@ -264,10 +279,13 @@ export interface PersonLink extends NetworkLink {
export function createPersonLinks(
personAnchors: NetworkNode[],
nodes: NetworkNode[],
personMap: Map<string, PersonConnection>
personMap: Map<string, PersonConnection>,
): PersonLink[] {
debug("Creating person links", { anchorCount: personAnchors.length, nodeCount: nodes.length });
debug("Creating person links", {
anchorCount: personAnchors.length,
nodeCount: nodes.length,
});
const nodeMap = new Map(nodes.map((n) => [n.id, n]));
const links: PersonLink[] = personAnchors.flatMap((anchor) => {
@ -286,11 +304,11 @@ export function createPersonLinks( @@ -286,11 +304,11 @@ export function createPersonLinks(
return undefined;
}
let connectionType: 'signed-by' | 'referenced' | undefined;
let connectionType: "signed-by" | "referenced" | undefined;
if (connection.signedByEventIds.has(nodeId)) {
connectionType = 'signed-by';
connectionType = "signed-by";
} else if (connection.referencedInEventIds.has(nodeId)) {
connectionType = 'referenced';
connectionType = "referenced";
}
const link: PersonLink = {
@ -299,7 +317,7 @@ export function createPersonLinks( @@ -299,7 +317,7 @@ export function createPersonLinks(
isSequential: false,
connectionType,
};
return link;
}).filter((link): link is PersonLink => link !== undefined); // Remove undefineds and type guard
});
@ -324,9 +342,9 @@ export interface PersonAnchorInfo { @@ -324,9 +342,9 @@ export interface PersonAnchorInfo {
*/
export function extractPersonAnchorInfo(
personAnchors: NetworkNode[],
personMap: Map<string, PersonConnection>
personMap: Map<string, PersonConnection>,
): PersonAnchorInfo[] {
return personAnchors.map(anchor => {
return personAnchors.map((anchor) => {
const connection = personMap.get(anchor.pubkey || "");
return {
pubkey: anchor.pubkey || "",
@ -336,4 +354,4 @@ export function extractPersonAnchorInfo( @@ -336,4 +354,4 @@ export function extractPersonAnchorInfo(
isFromFollowList: connection?.isFromFollowList || false,
};
});
}
}

83
src/lib/navigator/EventNetwork/utils/starForceSimulation.ts

@ -1,25 +1,25 @@ @@ -1,25 +1,25 @@
/**
* Star Network Force Simulation
*
*
* Custom force simulation optimized for star network layouts.
* Provides stronger connections between star centers and their content nodes,
* with specialized forces to maintain hierarchical structure.
*/
import * as d3 from "d3";
import type { NetworkNode, NetworkLink } from "../types";
import type { NetworkLink, NetworkNode } from "../types";
import type { Simulation } from "./forceSimulation";
import { createTagGravityForce } from "./tagNetworkBuilder";
// Configuration for star network forces
const STAR_CENTER_CHARGE = -300; // Stronger repulsion between star centers
const CONTENT_NODE_CHARGE = -50; // Weaker repulsion for content nodes
const STAR_LINK_STRENGTH = 0.5; // Moderate connection to star center
const STAR_CENTER_CHARGE = -300; // Stronger repulsion between star centers
const CONTENT_NODE_CHARGE = -50; // Weaker repulsion for content nodes
const STAR_LINK_STRENGTH = 0.5; // Moderate connection to star center
const INTER_STAR_LINK_STRENGTH = 0.2; // Weaker connection between stars
const STAR_LINK_DISTANCE = 80; // Fixed distance from center to content
const INTER_STAR_DISTANCE = 200; // Distance between star centers
const CENTER_GRAVITY = 0.02; // Gentle pull toward canvas center
const STAR_CENTER_WEIGHT = 10; // Weight multiplier for star centers
const STAR_LINK_DISTANCE = 80; // Fixed distance from center to content
const INTER_STAR_DISTANCE = 200; // Distance between star centers
const CENTER_GRAVITY = 0.02; // Gentle pull toward canvas center
const STAR_CENTER_WEIGHT = 10; // Weight multiplier for star centers
/**
* Creates a custom force simulation for star networks
@ -28,15 +28,18 @@ export function createStarSimulation( @@ -28,15 +28,18 @@ export function createStarSimulation(
nodes: NetworkNode[],
links: NetworkLink[],
width: number,
height: number
height: number,
): Simulation<NetworkNode, NetworkLink> {
// Create the simulation
const simulation = d3.forceSimulation(nodes) as any
const simulation = d3.forceSimulation(nodes) as any;
simulation
.force("center", d3.forceCenter(width / 2, height / 2).strength(CENTER_GRAVITY))
.force(
"center",
d3.forceCenter(width / 2, height / 2).strength(CENTER_GRAVITY),
)
.velocityDecay(0.2) // Lower decay for more responsive simulation
.alphaDecay(0.0001) // Much slower alpha decay to prevent freezing
.alphaMin(0.001); // Keep minimum energy to prevent complete freeze
.alphaDecay(0.0001) // Much slower alpha decay to prevent freezing
.alphaMin(0.001); // Keep minimum energy to prevent complete freeze
// Custom charge force that varies by node type
const chargeForce = d3.forceManyBody()
@ -91,9 +94,9 @@ export function createStarSimulation( @@ -91,9 +94,9 @@ export function createStarSimulation(
// Custom radial force to keep content nodes around their star center
simulation.force("radial", createRadialForce(nodes, links));
// Add tag gravity force if there are tag anchors
const hasTagAnchors = nodes.some(n => n.isTagAnchor);
const hasTagAnchors = nodes.some((n) => n.isTagAnchor);
if (hasTagAnchors) {
simulation.force("tagGravity", createTagGravityForce(nodes, links));
}
@ -122,9 +125,9 @@ function applyRadialForce( @@ -122,9 +125,9 @@ function applyRadialForce(
nodes: NetworkNode[],
nodeToCenter: Map<string, NetworkNode>,
targetDistance: number,
alpha: number
alpha: number,
): void {
nodes.forEach(node => {
nodes.forEach((node) => {
if (node.kind === 30041) {
const center = nodeToCenter.get(node.id);
if (
@ -157,7 +160,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any { @@ -157,7 +160,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any {
// Build a map of content nodes to their star centers
const nodeToCenter = new Map<string, NetworkNode>();
links.forEach(link => {
links.forEach((link) => {
const source = link.source as NetworkNode;
const target = link.target as NetworkNode;
if (source.kind === 30040 && target.kind === 30041) {
@ -169,7 +172,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any { @@ -169,7 +172,7 @@ function createRadialForce(nodes: NetworkNode[], links: NetworkLink[]): any {
applyRadialForce(nodes, nodeToCenter, STAR_LINK_DISTANCE, alpha);
}
force.initialize = function(_: NetworkNode[]) {
force.initialize = function (_: NetworkNode[]) {
nodes = _;
};
@ -183,14 +186,14 @@ export function applyInitialStarPositions( @@ -183,14 +186,14 @@ export function applyInitialStarPositions(
nodes: NetworkNode[],
links: NetworkLink[],
width: number,
height: number
height: number,
): void {
// Group nodes by their star centers
const starGroups = new Map<string, NetworkNode[]>();
const starCenters: NetworkNode[] = [];
// Identify star centers
nodes.forEach(node => {
nodes.forEach((node) => {
if (node.isContainer && node.kind === 30040) {
starCenters.push(node);
starGroups.set(node.id, []);
@ -198,7 +201,7 @@ export function applyInitialStarPositions( @@ -198,7 +201,7 @@ export function applyInitialStarPositions(
});
// Assign content nodes to their star centers
links.forEach(link => {
links.forEach((link) => {
const source = link.source as NetworkNode;
const target = link.target as NetworkNode;
if (source.kind === 30040 && target.kind === 30041) {
@ -222,7 +225,7 @@ export function applyInitialStarPositions( @@ -222,7 +225,7 @@ export function applyInitialStarPositions(
const centerY = height / 2;
const radius = Math.min(width, height) * 0.3;
const angleStep = (2 * Math.PI) / starCenters.length;
starCenters.forEach((center, i) => {
const angle = i * angleStep;
center.x = centerX + radius * Math.cos(angle);
@ -233,9 +236,9 @@ export function applyInitialStarPositions( @@ -233,9 +236,9 @@ export function applyInitialStarPositions(
// Position content nodes around their star centers
starGroups.forEach((contentNodes, centerId) => {
const center = nodes.find(n => n.id === centerId);
const center = nodes.find((n) => n.id === centerId);
if (!center) return;
const angleStep = (2 * Math.PI) / Math.max(contentNodes.length, 1);
contentNodes.forEach((node, i) => {
const angle = i * angleStep;
@ -252,7 +255,11 @@ export function applyInitialStarPositions( @@ -252,7 +255,11 @@ export function applyInitialStarPositions(
* @param d - The node being dragged
* @param simulation - The d3 force simulation instance
*/
function dragstarted(event: any, d: NetworkNode, simulation: Simulation<NetworkNode, NetworkLink>) {
function dragstarted(
event: any,
d: NetworkNode,
simulation: Simulation<NetworkNode, NetworkLink>,
) {
// If no other drag is active, set a low alpha target to keep the simulation running smoothly
if (!event.active) {
simulation.alphaTarget(0.1).restart();
@ -281,7 +288,11 @@ function dragged(event: any, d: NetworkNode) { @@ -281,7 +288,11 @@ function dragged(event: any, d: NetworkNode) {
* @param d - The node being dragged
* @param simulation - The d3 force simulation instance
*/
function dragended(event: any, d: NetworkNode, simulation: Simulation<NetworkNode, NetworkLink>) {
function dragended(
event: any,
d: NetworkNode,
simulation: Simulation<NetworkNode, NetworkLink>,
) {
// If no other drag is active, lower the alpha target to let the simulation cool down
if (!event.active) {
simulation.alphaTarget(0);
@ -297,12 +308,16 @@ function dragended(event: any, d: NetworkNode, simulation: Simulation<NetworkNod @@ -297,12 +308,16 @@ function dragended(event: any, d: NetworkNode, simulation: Simulation<NetworkNod
* @returns The d3 drag behavior
*/
export function createStarDragHandler(
simulation: Simulation<NetworkNode, NetworkLink>
simulation: Simulation<NetworkNode, NetworkLink>,
): any {
// These handlers are now top-level functions, so we use closures to pass simulation to them.
// This is a common pattern in JavaScript/TypeScript when you need to pass extra arguments to event handlers.
return d3.drag()
.on('start', function(event: any, d: NetworkNode) { dragstarted(event, d, simulation); })
.on('drag', dragged)
.on('end', function(event: any, d: NetworkNode) { dragended(event, d, simulation); });
}
.on("start", function (event: any, d: NetworkNode) {
dragstarted(event, d, simulation);
})
.on("drag", dragged)
.on("end", function (event: any, d: NetworkNode) {
dragended(event, d, simulation);
});
}

186
src/lib/navigator/EventNetwork/utils/starNetworkBuilder.ts

@ -1,19 +1,23 @@ @@ -1,19 +1,23 @@
/**
* Star Network Builder for NKBIP-01 Events
*
*
* This module provides utilities for building star network visualizations specifically
* for NKBIP-01 events (kinds 30040 and 30041). Unlike the sequential network builder,
* this creates star formations where index events (30040) are central nodes with
* this creates star formations where index events (30040) are central nodes with
* content events (30041) arranged around them.
*/
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData, GraphState } from "../types";
import { getMatchingTags } from '$lib/utils/nostrUtils';
import { createNetworkNode, createEventMap, extractEventIdFromATag, getEventColor } from './networkBuilder';
import { createDebugFunction } from './common';
import { wikiKind, indexKind, zettelKinds } from '$lib/consts';
import type { GraphData, GraphState, NetworkLink, NetworkNode } from "../types";
import { getMatchingTags } from "$lib/utils/nostrUtils";
import {
createEventMap,
createNetworkNode,
extractEventIdFromATag,
getEventColor,
} from "./networkBuilder";
import { createDebugFunction } from "./common";
import { indexKind, wikiKind, zettelKinds } from "$lib/consts";
// Debug function
const debug = createDebugFunction("StarNetworkBuilder");
@ -22,14 +26,14 @@ const debug = createDebugFunction("StarNetworkBuilder"); @@ -22,14 +26,14 @@ const debug = createDebugFunction("StarNetworkBuilder");
* Represents a star network with a central index node and peripheral content nodes
*/
export interface StarNetwork {
center: NetworkNode; // Central index node (30040)
center: NetworkNode; // Central index node (30040)
peripheralNodes: NetworkNode[]; // Content nodes (30041) and connected indices (30040)
links: NetworkLink[]; // Links within this star
links: NetworkLink[]; // Links within this star
}
/**
* Creates a star network from an index event and its references
*
*
* @param indexEvent - The central index event (30040)
* @param state - Current graph state
* @param level - Hierarchy level for this star
@ -38,10 +42,10 @@ export interface StarNetwork { @@ -38,10 +42,10 @@ export interface StarNetwork {
export function createStarNetwork(
indexEvent: NDKEvent,
state: GraphState,
level: number = 0
level: number = 0,
): StarNetwork | null {
debug("Creating star network", { indexId: indexEvent.id, level });
const centerNode = state.nodeMap.get(indexEvent.id);
if (!centerNode) {
debug("Center node not found for index event", indexEvent.id);
@ -50,32 +54,35 @@ export function createStarNetwork( @@ -50,32 +54,35 @@ export function createStarNetwork(
// Set the center node level
centerNode.level = level;
// Extract referenced event IDs from 'a' tags
const referencedIds = getMatchingTags(indexEvent, "a")
.map(tag => extractEventIdFromATag(tag))
.map((tag) => extractEventIdFromATag(tag))
.filter((id): id is string => id !== null);
debug("Found referenced IDs", { count: referencedIds.length, ids: referencedIds });
debug("Found referenced IDs", {
count: referencedIds.length,
ids: referencedIds,
});
// Get peripheral nodes (both content and nested indices)
const peripheralNodes: NetworkNode[] = [];
const links: NetworkLink[] = [];
referencedIds.forEach(id => {
referencedIds.forEach((id) => {
const node = state.nodeMap.get(id);
if (node) {
// Set the peripheral node level
node.level += 1;
peripheralNodes.push(node);
// Create link from center to peripheral node
links.push({
source: centerNode,
target: node,
isSequential: false // Star links are not sequential
isSequential: false, // Star links are not sequential
});
debug("Added peripheral node", { nodeId: id, nodeType: node.type });
}
});
@ -83,13 +90,13 @@ export function createStarNetwork( @@ -83,13 +90,13 @@ export function createStarNetwork(
return {
center: centerNode,
peripheralNodes,
links
links,
};
}
/**
* Processes all index events to create star networks
*
*
* @param events - Array of all events
* @param maxLevel - Maximum nesting level to process
* @returns Array of star networks
@ -97,17 +104,17 @@ export function createStarNetwork( @@ -97,17 +104,17 @@ export function createStarNetwork(
export function createStarNetworks(
events: NDKEvent[],
maxLevel: number,
existingNodeMap?: Map<string, NetworkNode>
existingNodeMap?: Map<string, NetworkNode>,
): StarNetwork[] {
debug("Creating star networks", { eventCount: events.length, maxLevel });
// Use existing node map or create new one
const nodeMap = existingNodeMap || new Map<string, NetworkNode>();
const eventMap = createEventMap(events);
// Create nodes for all events if not using existing map
if (!existingNodeMap) {
events.forEach(event => {
events.forEach((event) => {
if (!event.id) return;
const node = createNetworkNode(event);
nodeMap.set(event.id, node);
@ -118,16 +125,16 @@ export function createStarNetworks( @@ -118,16 +125,16 @@ export function createStarNetworks(
nodeMap,
links: [],
eventMap,
referencedIds: new Set<string>()
referencedIds: new Set<string>(),
};
// Find all index events and non-publication events
const publicationKinds = [wikiKind, indexKind, ...zettelKinds];
const indexEvents = events.filter(event => event.kind === indexKind);
const nonPublicationEvents = events.filter(event =>
const indexEvents = events.filter((event) => event.kind === indexKind);
const nonPublicationEvents = events.filter((event) =>
event.kind !== undefined && !publicationKinds.includes(event.kind)
);
debug("Found index events", { count: indexEvents.length });
debug("Found non-publication events", { count: nonPublicationEvents.length });
@ -135,34 +142,34 @@ export function createStarNetworks( @@ -135,34 +142,34 @@ export function createStarNetworks(
const processedIndices = new Set<string>();
// Process all index events regardless of level
indexEvents.forEach(indexEvent => {
indexEvents.forEach((indexEvent) => {
if (!indexEvent.id || processedIndices.has(indexEvent.id)) return;
const star = createStarNetwork(indexEvent, state, 0);
if (star && star.peripheralNodes.length > 0) {
starNetworks.push(star);
processedIndices.add(indexEvent.id);
debug("Created star network", {
centerId: star.center.id,
peripheralCount: star.peripheralNodes.length
debug("Created star network", {
centerId: star.center.id,
peripheralCount: star.peripheralNodes.length,
});
}
});
// Add non-publication events as standalone nodes (stars with no peripherals)
nonPublicationEvents.forEach(event => {
nonPublicationEvents.forEach((event) => {
if (!event.id || !nodeMap.has(event.id)) return;
const node = nodeMap.get(event.id)!;
const star: StarNetwork = {
center: node,
peripheralNodes: [],
links: []
links: [],
};
starNetworks.push(star);
debug("Created standalone star for non-publication event", {
debug("Created standalone star for non-publication event", {
eventId: event.id,
kind: event.kind
kind: event.kind,
});
});
@ -171,36 +178,40 @@ export function createStarNetworks( @@ -171,36 +178,40 @@ export function createStarNetworks(
/**
* Creates inter-star connections between star networks
*
*
* @param starNetworks - Array of star networks
* @returns Additional links connecting different star networks
*/
export function createInterStarConnections(starNetworks: StarNetwork[]): NetworkLink[] {
export function createInterStarConnections(
starNetworks: StarNetwork[],
): NetworkLink[] {
debug("Creating inter-star connections", { starCount: starNetworks.length });
const interStarLinks: NetworkLink[] = [];
// Create a map of center nodes for quick lookup
const centerNodeMap = new Map<string, NetworkNode>();
starNetworks.forEach(star => {
starNetworks.forEach((star) => {
centerNodeMap.set(star.center.id, star.center);
});
// For each star, check if any of its peripheral nodes are centers of other stars
starNetworks.forEach(star => {
star.peripheralNodes.forEach(peripheralNode => {
starNetworks.forEach((star) => {
star.peripheralNodes.forEach((peripheralNode) => {
// If this peripheral node is the center of another star, create an inter-star link
if (peripheralNode.isContainer && centerNodeMap.has(peripheralNode.id)) {
const targetStar = starNetworks.find(s => s.center.id === peripheralNode.id);
const targetStar = starNetworks.find((s) =>
s.center.id === peripheralNode.id
);
if (targetStar) {
interStarLinks.push({
source: star.center,
target: targetStar.center,
isSequential: false
isSequential: false,
});
debug("Created inter-star connection", {
from: star.center.id,
to: targetStar.center.id
debug("Created inter-star connection", {
from: star.center.id,
to: targetStar.center.id,
});
}
}
@ -212,7 +223,7 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network @@ -212,7 +223,7 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network
/**
* Applies star-specific positioning to nodes using a radial layout
*
*
* @param starNetworks - Array of star networks
* @param width - Canvas width
* @param height - Canvas height
@ -220,61 +231,62 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network @@ -220,61 +231,62 @@ export function createInterStarConnections(starNetworks: StarNetwork[]): Network
export function applyStarLayout(
starNetworks: StarNetwork[],
width: number,
height: number
height: number,
): void {
debug("Applying star layout", {
starCount: starNetworks.length,
dimensions: { width, height }
debug("Applying star layout", {
starCount: starNetworks.length,
dimensions: { width, height },
});
const centerX = width / 2;
const centerY = height / 2;
// If only one star, center it
if (starNetworks.length === 1) {
const star = starNetworks[0];
// Position center node
star.center.x = centerX;
star.center.y = centerY;
star.center.fx = centerX; // Fix center position
star.center.fy = centerY;
// Position peripheral nodes in a circle around center
const radius = Math.min(width, height) * 0.25;
const angleStep = (2 * Math.PI) / star.peripheralNodes.length;
star.peripheralNodes.forEach((node, index) => {
const angle = index * angleStep;
node.x = centerX + radius * Math.cos(angle);
node.y = centerY + radius * Math.sin(angle);
});
return;
}
// For multiple stars, arrange them in a grid or circle
const starsPerRow = Math.ceil(Math.sqrt(starNetworks.length));
const starSpacingX = width / (starsPerRow + 1);
const starSpacingY = height / (Math.ceil(starNetworks.length / starsPerRow) + 1);
const starSpacingY = height /
(Math.ceil(starNetworks.length / starsPerRow) + 1);
starNetworks.forEach((star, index) => {
const row = Math.floor(index / starsPerRow);
const col = index % starsPerRow;
const starCenterX = (col + 1) * starSpacingX;
const starCenterY = (row + 1) * starSpacingY;
// Position center node
star.center.x = starCenterX;
star.center.y = starCenterY;
star.center.fx = starCenterX; // Fix center position
star.center.fy = starCenterY;
// Position peripheral nodes around this star's center
const radius = Math.min(starSpacingX, starSpacingY) * 0.3;
const angleStep = (2 * Math.PI) / Math.max(star.peripheralNodes.length, 1);
star.peripheralNodes.forEach((node, nodeIndex) => {
const angle = nodeIndex * angleStep;
node.x = starCenterX + radius * Math.cos(angle);
@ -285,69 +297,69 @@ export function applyStarLayout( @@ -285,69 +297,69 @@ export function applyStarLayout(
/**
* Generates a complete star network graph from events
*
*
* @param events - Array of Nostr events
* @param maxLevel - Maximum hierarchy level to process
* @returns Complete graph data with star network layout
*/
export function generateStarGraph(
events: NDKEvent[],
maxLevel: number
maxLevel: number,
): GraphData {
debug("Generating star graph", { eventCount: events.length, maxLevel });
// Guard against empty events
if (!events || events.length === 0) {
return { nodes: [], links: [] };
}
// Initialize all nodes first
const nodeMap = new Map<string, NetworkNode>();
events.forEach(event => {
events.forEach((event) => {
if (!event.id) return;
const node = createNetworkNode(event);
nodeMap.set(event.id, node);
});
// Create star networks with the existing node map
const starNetworks = createStarNetworks(events, maxLevel, nodeMap);
// Create inter-star connections
const interStarLinks = createInterStarConnections(starNetworks);
// Collect nodes that are part of stars
const nodesInStars = new Set<string>();
const allLinks: NetworkLink[] = [];
// Add nodes and links from all stars
starNetworks.forEach(star => {
starNetworks.forEach((star) => {
nodesInStars.add(star.center.id);
star.peripheralNodes.forEach(node => {
star.peripheralNodes.forEach((node) => {
nodesInStars.add(node.id);
});
allLinks.push(...star.links);
});
// Add inter-star links
allLinks.push(...interStarLinks);
// Include orphaned nodes (those not in any star)
const allNodes: NetworkNode[] = [];
nodeMap.forEach((node, id) => {
allNodes.push(node);
});
const result = {
nodes: allNodes,
links: allLinks
links: allLinks,
};
debug("Star graph generation complete", {
nodeCount: result.nodes.length,
debug("Star graph generation complete", {
nodeCount: result.nodes.length,
linkCount: result.links.length,
starCount: starNetworks.length,
orphanedNodes: allNodes.length - nodesInStars.size
orphanedNodes: allNodes.length - nodesInStars.size,
});
return result;
}
}

29
src/lib/navigator/EventNetwork/utils/tagNetworkBuilder.ts

@ -6,9 +6,9 @@ @@ -6,9 +6,9 @@
*/
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { NetworkNode, NetworkLink, GraphData } from "../types";
import type { GraphData, NetworkLink, NetworkNode } from "../types";
import { getDisplayNameSync } from "$lib/utils/profileCache";
import { SeededRandom, createDebugFunction } from "./common";
import { createDebugFunction, SeededRandom } from "./common";
// Configuration
const TAG_ANCHOR_RADIUS = 15;
@ -18,7 +18,6 @@ const TAG_ANCHOR_PLACEMENT_RADIUS = 1250; // Radius from center within which to @@ -18,7 +18,6 @@ const TAG_ANCHOR_PLACEMENT_RADIUS = 1250; // Radius from center within which to
// Debug function
const debug = createDebugFunction("TagNetworkBuilder");
/**
* Creates a deterministic seed from a string
*/
@ -63,7 +62,10 @@ export function extractUniqueTagsForType( @@ -63,7 +62,10 @@ export function extractUniqueTagsForType(
): Map<string, Set<string>> {
// Map of tagValue -> Set of event IDs
const tagMap = new Map<string, Set<string>>();
debug("Extracting unique tags for type", { tagType, eventCount: events.length });
debug("Extracting unique tags for type", {
tagType,
eventCount: events.length,
});
events.forEach((event) => {
if (!event.tags || !event.id) return;
@ -83,7 +85,7 @@ export function extractUniqueTagsForType( @@ -83,7 +85,7 @@ export function extractUniqueTagsForType(
tagMap.get(tagValue)!.add(event.id);
});
});
debug("Extracted tags", { tagCount: tagMap.size });
return tagMap;
@ -110,7 +112,7 @@ export function createTagAnchorNodes( @@ -110,7 +112,7 @@ export function createTagAnchorNodes(
);
if (validTags.length === 0) return [];
// Sort all tags by number of connections (events) descending
validTags.sort((a, b) => b[1].size - a[1].size);
@ -172,8 +174,11 @@ export function createTagLinks( @@ -172,8 +174,11 @@ export function createTagLinks(
tagAnchors: NetworkNode[],
nodes: NetworkNode[],
): NetworkLink[] {
debug("Creating tag links", { anchorCount: tagAnchors.length, nodeCount: nodes.length });
debug("Creating tag links", {
anchorCount: tagAnchors.length,
nodeCount: nodes.length,
});
const links: NetworkLink[] = [];
const nodeMap = new Map(nodes.map((n) => [n.id, n]));
@ -208,13 +213,13 @@ export function enhanceGraphWithTags( @@ -208,13 +213,13 @@ export function enhanceGraphWithTags(
displayLimit?: number,
): GraphData {
debug("Enhancing graph with tags", { tagType, displayLimit });
// Extract unique tags for the specified type
const tagMap = extractUniqueTagsForType(events, tagType);
// Create tag anchor nodes
let tagAnchors = createTagAnchorNodes(tagMap, tagType, width, height);
// Apply display limit if provided
if (displayLimit && displayLimit > 0 && tagAnchors.length > displayLimit) {
// Sort by connection count (already done in createTagAnchorNodes)
@ -242,7 +247,7 @@ export function enhanceGraphWithTags( @@ -242,7 +247,7 @@ export function enhanceGraphWithTags(
export function applyTagGravity(
nodes: NetworkNode[],
nodeToAnchors: Map<string, NetworkNode[]>,
alpha: number
alpha: number,
): void {
nodes.forEach((node) => {
if (node.isTagAnchor) return; // Tag anchors don't move
@ -301,7 +306,7 @@ export function createTagGravityForce( @@ -301,7 +306,7 @@ export function createTagGravityForce(
});
debug("Creating tag gravity force");
function force(alpha: number) {
applyTagGravity(nodes, nodeToAnchors, alpha);
}

298
src/lib/ndk.ts

@ -1,27 +1,27 @@ @@ -1,27 +1,27 @@
import NDK, {
NDKEvent,
NDKNip07Signer,
NDKRelay,
NDKRelayAuthPolicies,
NDKRelaySet,
NDKUser,
NDKEvent,
} from "@nostr-dev-kit/ndk";
import { writable, get, type Writable } from "svelte/store";
import {
loginStorageKey,
anonymousRelays,
} from "./consts.ts";
import { get, type Writable, writable } from "svelte/store";
import { anonymousRelays, loginStorageKey } from "./consts.ts";
import {
buildCompleteRelaySet,
testRelayConnection,
deduplicateRelayUrls,
testRelayConnection,
} from "./utils/relay_management.ts";
// Re-export testRelayConnection for components that need it
export { testRelayConnection };
import { userStore } from "./stores/userStore.ts";
import { userPubkey } from "./stores/authStore.Svelte.ts";
import { startNetworkStatusMonitoring, stopNetworkStatusMonitoring } from "./stores/networkStore.ts";
import {
startNetworkStatusMonitoring,
stopNetworkStatusMonitoring,
} from "./stores/networkStore.ts";
import { WebSocketPool } from "./data_structures/websocket_pool.ts";
export const ndkInstance: Writable<NDK> = writable();
@ -35,34 +35,39 @@ export const activeInboxRelays = writable<string[]>([]); @@ -35,34 +35,39 @@ export const activeInboxRelays = writable<string[]>([]);
export const activeOutboxRelays = writable<string[]>([]);
// AI-NOTE: 2025-01-08 - Persistent relay storage to avoid recalculation
let persistentRelaySet: { inboxRelays: string[]; outboxRelays: string[] } | null = null;
let persistentRelaySet:
| { inboxRelays: string[]; outboxRelays: string[] }
| null = null;
let relaySetLastUpdated: number = 0;
const RELAY_SET_CACHE_DURATION = 5 * 60 * 1000; // 5 minutes
const RELAY_SET_STORAGE_KEY = 'alexandria/relay_set_cache';
const RELAY_SET_STORAGE_KEY = "alexandria/relay_set_cache";
/**
* Load persistent relay set from localStorage
*/
function loadPersistentRelaySet(): { relaySet: { inboxRelays: string[]; outboxRelays: string[] } | null; lastUpdated: number } {
function loadPersistentRelaySet(): {
relaySet: { inboxRelays: string[]; outboxRelays: string[] } | null;
lastUpdated: number;
} {
// Only load from localStorage on client-side
if (typeof window === 'undefined') return { relaySet: null, lastUpdated: 0 };
if (typeof window === "undefined") return { relaySet: null, lastUpdated: 0 };
try {
const stored = localStorage.getItem(RELAY_SET_STORAGE_KEY);
if (!stored) return { relaySet: null, lastUpdated: 0 };
const data = JSON.parse(stored);
const now = Date.now();
// Check if cache is expired
if (now - data.timestamp > RELAY_SET_CACHE_DURATION) {
localStorage.removeItem(RELAY_SET_STORAGE_KEY);
return { relaySet: null, lastUpdated: 0 };
}
return { relaySet: data.relaySet, lastUpdated: data.timestamp };
} catch (error) {
console.warn('[NDK.ts] Failed to load persistent relay set:', error);
console.warn("[NDK.ts] Failed to load persistent relay set:", error);
localStorage.removeItem(RELAY_SET_STORAGE_KEY);
return { relaySet: null, lastUpdated: 0 };
}
@ -71,18 +76,20 @@ function loadPersistentRelaySet(): { relaySet: { inboxRelays: string[]; outboxRe @@ -71,18 +76,20 @@ function loadPersistentRelaySet(): { relaySet: { inboxRelays: string[]; outboxRe
/**
* Save persistent relay set to localStorage
*/
function savePersistentRelaySet(relaySet: { inboxRelays: string[]; outboxRelays: string[] }): void {
function savePersistentRelaySet(
relaySet: { inboxRelays: string[]; outboxRelays: string[] },
): void {
// Only save to localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
try {
const data = {
relaySet,
timestamp: Date.now()
timestamp: Date.now(),
};
localStorage.setItem(RELAY_SET_STORAGE_KEY, JSON.stringify(data));
} catch (error) {
console.warn('[NDK.ts] Failed to save persistent relay set:', error);
console.warn("[NDK.ts] Failed to save persistent relay set:", error);
}
}
@ -91,12 +98,12 @@ function savePersistentRelaySet(relaySet: { inboxRelays: string[]; outboxRelays: @@ -91,12 +98,12 @@ function savePersistentRelaySet(relaySet: { inboxRelays: string[]; outboxRelays:
*/
function clearPersistentRelaySet(): void {
// Only clear from localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
try {
localStorage.removeItem(RELAY_SET_STORAGE_KEY);
} catch (error) {
console.warn('[NDK.ts] Failed to clear persistent relay set:', error);
console.warn("[NDK.ts] Failed to clear persistent relay set:", error);
}
}
@ -230,8 +237,7 @@ class CustomRelayAuthPolicy { @@ -230,8 +237,7 @@ class CustomRelayAuthPolicy {
export function checkEnvironmentForWebSocketDowngrade(): void {
console.debug("[NDK.ts] Environment Check for WebSocket Protocol:");
const isLocalhost =
globalThis.location.hostname === "localhost" ||
const isLocalhost = globalThis.location.hostname === "localhost" ||
globalThis.location.hostname === "127.0.0.1";
const isHttp = globalThis.location.protocol === "http:";
const isHttps = globalThis.location.protocol === "https:";
@ -281,8 +287,6 @@ export function checkWebSocketSupport(): void { @@ -281,8 +287,6 @@ export function checkWebSocketSupport(): void {
}
}
/**
* Gets the user's pubkey from local storage, if it exists.
* @returns The user's pubkey, or null if there is no logged-in user.
@ -291,8 +295,8 @@ export function checkWebSocketSupport(): void { @@ -291,8 +295,8 @@ export function checkWebSocketSupport(): void {
*/
export function getPersistedLogin(): string | null {
// Only access localStorage on client-side
if (typeof window === 'undefined') return null;
if (typeof window === "undefined") return null;
const pubkey = localStorage.getItem(loginStorageKey);
return pubkey;
}
@ -305,8 +309,8 @@ export function getPersistedLogin(): string | null { @@ -305,8 +309,8 @@ export function getPersistedLogin(): string | null {
*/
export function persistLogin(user: NDKUser): void {
// Only access localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
localStorage.setItem(loginStorageKey, user.pubkey);
}
@ -316,8 +320,8 @@ export function persistLogin(user: NDKUser): void { @@ -316,8 +320,8 @@ export function persistLogin(user: NDKUser): void {
*/
export function clearLogin(): void {
// Only access localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
localStorage.removeItem(loginStorageKey);
}
@ -333,8 +337,8 @@ function getRelayStorageKey(user: NDKUser, type: "inbox" | "outbox"): string { @@ -333,8 +337,8 @@ function getRelayStorageKey(user: NDKUser, type: "inbox" | "outbox"): string {
export function clearPersistedRelays(user: NDKUser): void {
// Only access localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
localStorage.removeItem(getRelayStorageKey(user, "inbox"));
localStorage.removeItem(getRelayStorageKey(user, "outbox"));
}
@ -346,11 +350,11 @@ export function clearPersistedRelays(user: NDKUser): void { @@ -346,11 +350,11 @@ export function clearPersistedRelays(user: NDKUser): void {
*/
function ensureSecureWebSocket(url: string): string {
// For localhost, always use ws:// (never wss://)
if (url.includes('localhost') || url.includes('127.0.0.1')) {
if (url.includes("localhost") || url.includes("127.0.0.1")) {
// Convert any wss://localhost to ws://localhost
return url.replace(/^wss:\/\//, "ws://");
}
// Replace ws:// with wss:// for remote relays
const secureUrl = url.replace(/^ws:\/\//, "wss://");
@ -369,7 +373,7 @@ function ensureSecureWebSocket(url: string): string { @@ -369,7 +373,7 @@ function ensureSecureWebSocket(url: string): string {
function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
try {
// Reduce verbosity in development - only log relay creation if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) {
if (process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS) {
console.debug(`[NDK.ts] Creating relay with URL: ${url}`);
}
@ -387,7 +391,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay { @@ -387,7 +391,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
const connectionTimeout = setTimeout(() => {
try {
// Only log connection timeouts if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) {
if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Connection timeout for ${secureUrl}`);
}
relay.disconnect();
@ -402,7 +408,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay { @@ -402,7 +408,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
relay.on("connect", () => {
try {
// Only log successful connections if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) {
if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Relay connected: ${secureUrl}`);
}
clearTimeout(connectionTimeout);
@ -415,7 +423,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay { @@ -415,7 +423,9 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
relay.on("connect", () => {
try {
// Only log successful connections if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) {
if (
process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS
) {
console.debug(`[NDK.ts] Relay connected: ${secureUrl}`);
}
clearTimeout(connectionTimeout);
@ -438,46 +448,66 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay { @@ -438,46 +448,66 @@ function createRelayWithAuth(url: string, ndk: NDK): NDKRelay {
return relay;
} catch (error) {
// If relay creation fails, try to use an anonymous relay as fallback
console.debug(`[NDK.ts] Failed to create relay for ${url}, trying anonymous relay fallback`);
console.debug(
`[NDK.ts] Failed to create relay for ${url}, trying anonymous relay fallback`,
);
// Find an anonymous relay that's not the same as the failed URL
const fallbackUrl = anonymousRelays.find(relay => relay !== url) || anonymousRelays[0];
const fallbackUrl = anonymousRelays.find((relay) => relay !== url) ||
anonymousRelays[0];
if (fallbackUrl) {
console.debug(`[NDK.ts] Using anonymous relay as fallback: ${fallbackUrl}`);
console.debug(
`[NDK.ts] Using anonymous relay as fallback: ${fallbackUrl}`,
);
try {
const fallbackRelay = new NDKRelay(fallbackUrl, NDKRelayAuthPolicies.signIn({ ndk }), ndk);
const fallbackRelay = new NDKRelay(
fallbackUrl,
NDKRelayAuthPolicies.signIn({ ndk }),
ndk,
);
return fallbackRelay;
} catch (fallbackError) {
console.debug(`[NDK.ts] Fallback relay creation also failed: ${fallbackError}`);
console.debug(
`[NDK.ts] Fallback relay creation also failed: ${fallbackError}`,
);
}
}
// If all else fails, create a minimal relay that will fail gracefully
console.debug(`[NDK.ts] All fallback attempts failed, creating minimal relay for ${url}`);
console.debug(
`[NDK.ts] All fallback attempts failed, creating minimal relay for ${url}`,
);
const minimalRelay = new NDKRelay(url, undefined, ndk);
return minimalRelay;
}
}
/**
* Gets the active relay set for the current user
* @param ndk NDK instance
* @returns Promise that resolves to object with inbox and outbox relay arrays
*/
export async function getActiveRelaySet(ndk: NDK): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> {
export async function getActiveRelaySet(
ndk: NDK,
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> {
const user = get(userStore);
console.debug('[NDK.ts] getActiveRelaySet: User state:', { signedIn: user.signedIn, hasNdkUser: !!user.ndkUser, pubkey: user.pubkey });
console.debug("[NDK.ts] getActiveRelaySet: User state:", {
signedIn: user.signedIn,
hasNdkUser: !!user.ndkUser,
pubkey: user.pubkey,
});
if (user.signedIn && user.ndkUser) {
console.debug('[NDK.ts] getActiveRelaySet: Building relay set for authenticated user:', user.ndkUser.pubkey);
console.debug(
"[NDK.ts] getActiveRelaySet: Building relay set for authenticated user:",
user.ndkUser.pubkey,
);
return await buildCompleteRelaySet(ndk, user.ndkUser);
} else {
console.debug('[NDK.ts] getActiveRelaySet: Building relay set for anonymous user');
console.debug(
"[NDK.ts] getActiveRelaySet: Building relay set for anonymous user",
);
return await buildCompleteRelaySet(ndk, null);
}
}
@ -487,61 +517,88 @@ export async function getActiveRelaySet(ndk: NDK): Promise<{ inboxRelays: string @@ -487,61 +517,88 @@ export async function getActiveRelaySet(ndk: NDK): Promise<{ inboxRelays: string
* @param ndk NDK instance
* @param forceUpdate Force update even if cached (default: false)
*/
export async function updateActiveRelayStores(ndk: NDK, forceUpdate: boolean = false): Promise<void> {
export async function updateActiveRelayStores(
ndk: NDK,
forceUpdate: boolean = false,
): Promise<void> {
try {
// AI-NOTE: 2025-01-08 - Use persistent relay set to avoid recalculation
const now = Date.now();
const cacheExpired = now - relaySetLastUpdated > RELAY_SET_CACHE_DURATION;
// Load from persistent storage if not already loaded
if (!persistentRelaySet) {
const loaded = loadPersistentRelaySet();
persistentRelaySet = loaded.relaySet;
relaySetLastUpdated = loaded.lastUpdated;
}
if (!forceUpdate && persistentRelaySet && !cacheExpired) {
console.debug('[NDK.ts] updateActiveRelayStores: Using cached relay set');
console.debug("[NDK.ts] updateActiveRelayStores: Using cached relay set");
activeInboxRelays.set(persistentRelaySet.inboxRelays);
activeOutboxRelays.set(persistentRelaySet.outboxRelays);
return;
}
console.debug('[NDK.ts] updateActiveRelayStores: Starting relay store update');
console.debug(
"[NDK.ts] updateActiveRelayStores: Starting relay store update",
);
// Get the active relay set from the relay management system
const relaySet = await getActiveRelaySet(ndk);
console.debug('[NDK.ts] updateActiveRelayStores: Got relay set:', relaySet);
console.debug("[NDK.ts] updateActiveRelayStores: Got relay set:", relaySet);
// Cache the relay set
persistentRelaySet = relaySet;
relaySetLastUpdated = now;
savePersistentRelaySet(relaySet); // Save to persistent storage
// Update the stores with the new relay configuration
activeInboxRelays.set(relaySet.inboxRelays);
activeOutboxRelays.set(relaySet.outboxRelays);
console.debug('[NDK.ts] updateActiveRelayStores: Updated stores with inbox:', relaySet.inboxRelays.length, 'outbox:', relaySet.outboxRelays.length);
console.debug(
"[NDK.ts] updateActiveRelayStores: Updated stores with inbox:",
relaySet.inboxRelays.length,
"outbox:",
relaySet.outboxRelays.length,
);
// Add relays to NDK pool (deduplicated)
const allRelayUrls = deduplicateRelayUrls([...relaySet.inboxRelays, ...relaySet.outboxRelays]);
const allRelayUrls = deduplicateRelayUrls([
...relaySet.inboxRelays,
...relaySet.outboxRelays,
]);
// Reduce verbosity in development - only log relay addition if debug mode is enabled
if (process.env.NODE_ENV === 'development' && process.env.DEBUG_RELAYS) {
console.debug('[NDK.ts] updateActiveRelayStores: Adding', allRelayUrls.length, 'relays to NDK pool');
if (process.env.NODE_ENV === "development" && process.env.DEBUG_RELAYS) {
console.debug(
"[NDK.ts] updateActiveRelayStores: Adding",
allRelayUrls.length,
"relays to NDK pool",
);
}
for (const url of allRelayUrls) {
try {
const relay = createRelayWithAuth(url, ndk);
ndk.pool?.addRelay(relay);
} catch (error) {
console.debug('[NDK.ts] updateActiveRelayStores: Failed to add relay', url, ':', error);
console.debug(
"[NDK.ts] updateActiveRelayStores: Failed to add relay",
url,
":",
error,
);
}
}
console.debug('[NDK.ts] updateActiveRelayStores: Relay store update completed');
console.debug(
"[NDK.ts] updateActiveRelayStores: Relay store update completed",
);
} catch (error) {
console.warn('[NDK.ts] updateActiveRelayStores: Error updating relay stores:', error);
console.warn(
"[NDK.ts] updateActiveRelayStores: Error updating relay stores:",
error,
);
}
}
@ -551,23 +608,25 @@ export async function updateActiveRelayStores(ndk: NDK, forceUpdate: boolean = f @@ -551,23 +608,25 @@ export async function updateActiveRelayStores(ndk: NDK, forceUpdate: boolean = f
export function logCurrentRelayConfiguration(): void {
const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays);
console.log('🔌 Current Relay Configuration:');
console.log('📥 Inbox Relays:', inboxRelays);
console.log('📤 Outbox Relays:', outboxRelays);
console.log(`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`);
console.log("🔌 Current Relay Configuration:");
console.log("📥 Inbox Relays:", inboxRelays);
console.log("📤 Outbox Relays:", outboxRelays);
console.log(
`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`,
);
}
/**
* Clears the relay set cache to force a rebuild
*/
export function clearRelaySetCache(): void {
console.debug('[NDK.ts] Clearing relay set cache');
console.debug("[NDK.ts] Clearing relay set cache");
persistentRelaySet = null;
relaySetLastUpdated = 0;
// Clear from localStorage as well (client-side only)
if (typeof window !== 'undefined') {
localStorage.removeItem('alexandria/relay_set_cache');
if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/relay_set_cache");
}
}
@ -576,7 +635,7 @@ export function clearRelaySetCache(): void { @@ -576,7 +635,7 @@ export function clearRelaySetCache(): void {
* @param ndk NDK instance
*/
export async function refreshRelayStores(ndk: NDK): Promise<void> {
console.debug('[NDK.ts] Refreshing relay stores due to user state change');
console.debug("[NDK.ts] Refreshing relay stores due to user state change");
clearRelaySetCache(); // Clear cache when user state changes
await updateActiveRelayStores(ndk, true); // Force update
}
@ -585,8 +644,12 @@ export async function refreshRelayStores(ndk: NDK): Promise<void> { @@ -585,8 +644,12 @@ export async function refreshRelayStores(ndk: NDK): Promise<void> {
* Updates relay stores when network condition changes
* @param ndk NDK instance
*/
export async function refreshRelayStoresOnNetworkChange(ndk: NDK): Promise<void> {
console.debug('[NDK.ts] Refreshing relay stores due to network condition change');
export async function refreshRelayStoresOnNetworkChange(
ndk: NDK,
): Promise<void> {
console.debug(
"[NDK.ts] Refreshing relay stores due to network condition change",
);
await updateActiveRelayStores(ndk);
}
@ -606,10 +669,10 @@ export function startNetworkMonitoringForRelays(): void { @@ -606,10 +669,10 @@ export function startNetworkMonitoringForRelays(): void {
* @returns NDKRelaySet
*/
function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet {
const relays = relayUrls.map(url =>
const relays = relayUrls.map((url) =>
new NDKRelay(url, NDKRelayAuthPolicies.signIn({ ndk }), ndk)
);
return new NDKRelaySet(new Set(relays), ndk);
}
@ -621,11 +684,11 @@ function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet { @@ -621,11 +684,11 @@ function createRelaySetFromUrls(relayUrls: string[], ndk: NDK): NDKRelaySet {
*/
export async function getActiveRelaySetAsNDKRelaySet(
ndk: NDK,
useInbox: boolean = true
useInbox: boolean = true,
): Promise<NDKRelaySet> {
const relaySet = await getActiveRelaySet(ndk);
const urls = useInbox ? relaySet.inboxRelays : relaySet.outboxRelays;
return createRelaySetFromUrls(urls, ndk);
}
@ -650,11 +713,11 @@ export function initNdk(): NDK { @@ -650,11 +713,11 @@ export function initNdk(): NDK {
const attemptConnection = async () => {
// Only attempt connection on client-side
if (typeof window === 'undefined') {
if (typeof window === "undefined") {
console.debug("[NDK.ts] Skipping NDK connection during SSR");
return;
}
try {
await ndk.connect();
console.debug("[NDK.ts] NDK connected successfully");
@ -664,17 +727,21 @@ export function initNdk(): NDK { @@ -664,17 +727,21 @@ export function initNdk(): NDK {
startNetworkMonitoringForRelays();
} catch (error) {
console.warn("[NDK.ts] Failed to connect NDK:", error);
// Only retry a limited number of times
if (retryCount < maxRetries) {
retryCount++;
console.debug(`[NDK.ts] Attempting to reconnect (${retryCount}/${maxRetries})...`);
console.debug(
`[NDK.ts] Attempting to reconnect (${retryCount}/${maxRetries})...`,
);
// Use a more reasonable retry delay and prevent memory leaks
setTimeout(() => {
attemptConnection();
}, 2000 * retryCount); // Exponential backoff
} else {
console.warn("[NDK.ts] Max retries reached, continuing with limited functionality");
console.warn(
"[NDK.ts] Max retries reached, continuing with limited functionality",
);
// Still try to update relay stores even if connection failed
try {
await updateActiveRelayStores(ndk);
@ -687,21 +754,24 @@ export function initNdk(): NDK { @@ -687,21 +754,24 @@ export function initNdk(): NDK {
};
// Only attempt connection on client-side
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
attemptConnection();
}
// AI-NOTE: Set up userStore subscription after NDK initialization to prevent initialization errors
userStore.subscribe(async (userState) => {
ndkSignedIn.set(userState.signedIn);
// Refresh relay stores when user state changes
const ndk = get(ndkInstance);
if (ndk) {
try {
await refreshRelayStores(ndk);
} catch (error) {
console.warn('[NDK.ts] Failed to refresh relay stores on user state change:', error);
console.warn(
"[NDK.ts] Failed to refresh relay stores on user state change:",
error,
);
}
}
});
@ -715,7 +785,7 @@ export function initNdk(): NDK { @@ -715,7 +785,7 @@ export function initNdk(): NDK {
*/
export function cleanupNdk(): void {
console.debug("[NDK.ts] Cleaning up NDK resources");
const ndk = get(ndkInstance);
if (ndk) {
try {
@ -725,13 +795,13 @@ export function cleanupNdk(): void { @@ -725,13 +795,13 @@ export function cleanupNdk(): void {
relay.disconnect();
}
}
// Drain the WebSocket pool
WebSocketPool.instance.drain();
// Stop network monitoring
stopNetworkStatusMonitoring();
console.debug("[NDK.ts] NDK cleanup completed");
} catch (error) {
console.warn("[NDK.ts] Error during NDK cleanup:", error);
@ -761,7 +831,7 @@ export async function loginWithExtension( @@ -761,7 +831,7 @@ export async function loginWithExtension(
userPubkey.set(signerUser.pubkey);
const user = ndk.getUser({ pubkey: signerUser.pubkey });
// Update relay stores with the new system
await updateActiveRelayStores(ndk);
@ -787,22 +857,20 @@ export function logout(user: NDKUser): void { @@ -787,22 +857,20 @@ export function logout(user: NDKUser): void {
activePubkey.set(null);
userPubkey.set(null);
ndkSignedIn.set(false);
// Clear relay stores
activeInboxRelays.set([]);
activeOutboxRelays.set([]);
// AI-NOTE: 2025-01-08 - Clear persistent relay set on logout
persistentRelaySet = null;
relaySetLastUpdated = 0;
clearPersistentRelaySet(); // Clear persistent storage
// Stop network monitoring
stopNetworkStatusMonitoring();
// Re-initialize with anonymous instance
const newNdk = initNdk();
ndkInstance.set(newNdk);
}

8
src/lib/parser.ts

@ -7,11 +7,11 @@ import type { @@ -7,11 +7,11 @@ import type {
Block,
Document,
Extensions,
Section,
ProcessorOptions,
Section,
} from "asciidoctor";
import he from "he";
import { writable, type Writable } from "svelte/store";
import { type Writable, writable } from "svelte/store";
import { zettelKinds } from "./consts.ts";
import { getMatchingTags } from "./utils/nostrUtils.ts";
@ -906,13 +906,13 @@ export default class Pharos { @@ -906,13 +906,13 @@ export default class Pharos {
["#d", nodeId],
...this.extractAndNormalizeWikilinks(content!),
];
// Extract image from content if present
const imageUrl = this.extractImageFromContent(content!);
if (imageUrl) {
event.tags.push(["image", imageUrl]);
}
event.created_at = Date.now();
event.pubkey = pubkey;

28
src/lib/services/event_search_service.ts

@ -8,33 +8,37 @@ export class EventSearchService { @@ -8,33 +8,37 @@ export class EventSearchService {
*/
getSearchType(query: string): { type: string; term: string } | null {
const lowerQuery = query.toLowerCase();
if (lowerQuery.startsWith("d:")) {
const dTag = query.slice(2).trim().toLowerCase();
return dTag ? { type: "d", term: dTag } : null;
}
if (lowerQuery.startsWith("t:")) {
const searchTerm = query.slice(2).trim();
return searchTerm ? { type: "t", term: searchTerm } : null;
}
if (lowerQuery.startsWith("n:")) {
const searchTerm = query.slice(2).trim();
return searchTerm ? { type: "n", term: searchTerm } : null;
}
if (query.includes("@")) {
return { type: "nip05", term: query };
}
return null;
}
/**
* Checks if a search value matches the current event
*/
isCurrentEventMatch(searchValue: string, event: any, relays: string[]): boolean {
isCurrentEventMatch(
searchValue: string,
event: any,
relays: string[],
): boolean {
const currentEventId = event.id;
let currentNaddr = null;
let currentNevent = null;
@ -42,21 +46,23 @@ export class EventSearchService { @@ -42,21 +46,23 @@ export class EventSearchService {
let currentNprofile = null;
try {
const { neventEncode, naddrEncode, nprofileEncode } = require("$lib/utils");
const { neventEncode, naddrEncode, nprofileEncode } = require(
"$lib/utils",
);
const { getMatchingTags, toNpub } = require("$lib/utils/nostrUtils");
currentNevent = neventEncode(event, relays);
} catch {}
try {
const { naddrEncode } = require("$lib/utils");
const { getMatchingTags } = require("$lib/utils/nostrUtils");
currentNaddr = getMatchingTags(event, "d")[0]?.[1]
? naddrEncode(event, relays)
: null;
} catch {}
try {
const { toNpub } = require("$lib/utils/nostrUtils");
currentNpub = event.kind === 0 ? toNpub(event.pubkey) : null;

57
src/lib/services/publisher.ts

@ -1,8 +1,11 @@ @@ -1,8 +1,11 @@
import { get } from "svelte/store";
import { ndkInstance } from "../ndk.ts";
import { getMimeTags } from "../utils/mime.ts";
import { parseAsciiDocWithMetadata, metadataToTags } from "../utils/asciidoc_metadata.ts";
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk";
import {
metadataToTags,
parseAsciiDocWithMetadata,
} from "../utils/asciidoc_metadata.ts";
import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { nip19 } from "nostr-tools";
export interface PublishResult {
@ -97,8 +100,9 @@ export async function publishZettel( @@ -97,8 +100,9 @@ export async function publishZettel(
throw new Error("Failed to publish to any relays");
}
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : "Unknown error";
const errorMessage = error instanceof Error
? error.message
: "Unknown error";
onError?.(errorMessage);
return { success: false, error: errorMessage };
}
@ -115,14 +119,14 @@ export async function publishMultipleZettels( @@ -115,14 +119,14 @@ export async function publishMultipleZettels(
const { content, kind = 30041, onError } = options;
if (!content.trim()) {
const error = 'Please enter some content';
const error = "Please enter some content";
onError?.(error);
return [{ success: false, error }];
}
const ndk = get(ndkInstance);
if (!ndk?.activeUser) {
const error = 'Please log in first';
const error = "Please log in first";
onError?.(error);
return [{ success: false, error }];
}
@ -130,12 +134,14 @@ export async function publishMultipleZettels( @@ -130,12 +134,14 @@ export async function publishMultipleZettels(
try {
const parsed = parseAsciiDocWithMetadata(content);
if (parsed.sections.length === 0) {
throw new Error('No valid sections found in content');
throw new Error("No valid sections found in content");
}
const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map((r) => r.url);
const allRelayUrls = Array.from(ndk.pool?.relays.values() || []).map((r) =>
r.url
);
if (allRelayUrls.length === 0) {
throw new Error('No relays available in NDK pool');
throw new Error("No relays available in NDK pool");
}
const relaySet = NDKRelaySet.fromRelayUrls(allRelayUrls, ndk);
@ -164,31 +170,42 @@ export async function publishMultipleZettels( @@ -164,31 +170,42 @@ export async function publishMultipleZettels(
results.push({ success: true, eventId: ndkEvent.id });
publishedEvents.push(ndkEvent);
} else {
results.push({ success: false, error: 'Failed to publish to any relays' });
results.push({
success: false,
error: "Failed to publish to any relays",
});
}
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error';
const errorMessage = err instanceof Error
? err.message
: "Unknown error";
results.push({ success: false, error: errorMessage });
}
}
// Debug: extract and log 'e' and 'a' tags from all published events
publishedEvents.forEach(ev => {
publishedEvents.forEach((ev) => {
// Extract d-tag from tags
const dTagEntry = ev.tags.find(t => t[0] === 'd');
const dTag = dTagEntry ? dTagEntry[1] : '';
const dTagEntry = ev.tags.find((t) => t[0] === "d");
const dTag = dTagEntry ? dTagEntry[1] : "";
const aTag = `${ev.kind}:${ev.pubkey}:${dTag}`;
console.log(`Event ${ev.id} tags:`);
console.log(' e:', ev.id);
console.log(' a:', aTag);
console.log(" e:", ev.id);
console.log(" a:", aTag);
// Print nevent and naddr using nip19
const nevent = nip19.neventEncode({ id: ev.id });
const naddr = nip19.naddrEncode({ kind: ev.kind, pubkey: ev.pubkey, identifier: dTag });
console.log(' nevent:', nevent);
console.log(' naddr:', naddr);
const naddr = nip19.naddrEncode({
kind: ev.kind,
pubkey: ev.pubkey,
identifier: dTag,
});
console.log(" nevent:", nevent);
console.log(" naddr:", naddr);
});
return results;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorMessage = error instanceof Error
? error.message
: "Unknown error";
onError?.(errorMessage);
return [{ success: false, error: errorMessage }];
}

20
src/lib/services/search_state_manager.ts

@ -13,7 +13,7 @@ export class SearchStateManager { @@ -13,7 +13,7 @@ export class SearchStateManager {
searchResultCount: number | null;
searchResultType: string | null;
},
onLoadingChange?: (loading: boolean) => void
onLoadingChange?: (loading: boolean) => void,
): void {
if (onLoadingChange) {
onLoadingChange(state.searching);
@ -25,10 +25,16 @@ export class SearchStateManager { @@ -25,10 +25,16 @@ export class SearchStateManager {
*/
resetSearchState(
callbacks: {
onSearchResults: (events: any[], secondOrder: any[], tTagEvents: any[], eventIds: Set<string>, addresses: Set<string>) => void;
onSearchResults: (
events: any[],
secondOrder: any[],
tTagEvents: any[],
eventIds: Set<string>,
addresses: Set<string>,
) => void;
cleanupSearch: () => void;
clearTimeout: () => void;
}
},
): void {
callbacks.cleanupSearch();
callbacks.onSearchResults([], [], [], new Set(), new Set());
@ -46,16 +52,18 @@ export class SearchStateManager { @@ -46,16 +52,18 @@ export class SearchStateManager {
cleanupSearch: () => void;
updateSearchState: (state: any) => void;
resetProcessingFlags: () => void;
}
},
): void {
const errorMessage = error instanceof Error ? error.message : defaultMessage;
const errorMessage = error instanceof Error
? error.message
: defaultMessage;
callbacks.setLocalError(errorMessage);
callbacks.cleanupSearch();
callbacks.updateSearchState({
searching: false,
searchCompleted: false,
searchResultCount: null,
searchResultType: null
searchResultType: null,
});
callbacks.resetProcessingFlags();
}

2
src/lib/state.ts

@ -1,5 +1,5 @@ @@ -1,5 +1,5 @@
import { browser } from "$app/environment";
import { writable, type Writable } from "svelte/store";
import { type Writable, writable } from "svelte/store";
import type { Tab } from "./types.ts";
export const pathLoaded: Writable<boolean> = writable(false);

2
src/lib/stores/authStore.Svelte.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { writable, derived } from "svelte/store";
import { derived, writable } from "svelte/store";
/**
* Stores the user's public key if logged in, or null otherwise.

26
src/lib/stores/networkStore.ts

@ -1,8 +1,14 @@ @@ -1,8 +1,14 @@
import { writable } from "svelte/store";
import { detectNetworkCondition, NetworkCondition, startNetworkMonitoring } from '../utils/network_detection.ts';
import {
detectNetworkCondition,
NetworkCondition,
startNetworkMonitoring,
} from "../utils/network_detection.ts";
// Network status store
export const networkCondition = writable<NetworkCondition>(NetworkCondition.ONLINE);
export const networkCondition = writable<NetworkCondition>(
NetworkCondition.ONLINE,
);
export const isNetworkChecking = writable<boolean>(false);
// Network monitoring state
@ -16,14 +22,16 @@ export function startNetworkStatusMonitoring(): void { @@ -16,14 +22,16 @@ export function startNetworkStatusMonitoring(): void {
return; // Already monitoring
}
console.debug('[networkStore.ts] Starting network status monitoring');
console.debug("[networkStore.ts] Starting network status monitoring");
stopNetworkMonitoring = startNetworkMonitoring(
(condition: NetworkCondition) => {
console.debug(`[networkStore.ts] Network condition changed to: ${condition}`);
console.debug(
`[networkStore.ts] Network condition changed to: ${condition}`,
);
networkCondition.set(condition);
},
60000 // Check every 60 seconds to reduce spam
60000, // Check every 60 seconds to reduce spam
);
}
@ -32,7 +40,7 @@ export function startNetworkStatusMonitoring(): void { @@ -32,7 +40,7 @@ export function startNetworkStatusMonitoring(): void {
*/
export function stopNetworkStatusMonitoring(): void {
if (stopNetworkMonitoring) {
console.debug('[networkStore.ts] Stopping network status monitoring');
console.debug("[networkStore.ts] Stopping network status monitoring");
stopNetworkMonitoring();
stopNetworkMonitoring = null;
}
@ -47,9 +55,9 @@ export async function checkNetworkStatus(): Promise<void> { @@ -47,9 +55,9 @@ export async function checkNetworkStatus(): Promise<void> {
const condition = await detectNetworkCondition();
networkCondition.set(condition);
} catch (error) {
console.warn('[networkStore.ts] Failed to check network status:', error);
console.warn("[networkStore.ts] Failed to check network status:", error);
networkCondition.set(NetworkCondition.OFFLINE);
} finally {
isNetworkChecking.set(false);
}
}
}

119
src/lib/stores/userStore.ts

@ -1,14 +1,19 @@ @@ -1,14 +1,19 @@
import { writable, get } from "svelte/store";
import { get, writable } from "svelte/store";
import type { NostrProfile } from "../utils/nostrUtils.ts";
import type { NDKUser, NDKSigner } from "@nostr-dev-kit/ndk";
import type { NDKSigner, NDKUser } from "@nostr-dev-kit/ndk";
import NDK, {
NDKNip07Signer,
NDKRelay,
NDKRelayAuthPolicies,
NDKRelaySet,
NDKRelay,
} from "@nostr-dev-kit/ndk";
import { getUserMetadata } from "../utils/nostrUtils.ts";
import { ndkInstance, activeInboxRelays, activeOutboxRelays, updateActiveRelayStores } from "../ndk.ts";
import {
activeInboxRelays,
activeOutboxRelays,
ndkInstance,
updateActiveRelayStores,
} from "../ndk.ts";
import { loginStorageKey } from "../consts.ts";
import { nip19 } from "nostr-tools";
import { userPubkey } from "../stores/authStore.Svelte.ts";
@ -46,8 +51,8 @@ function persistRelays( @@ -46,8 +51,8 @@ function persistRelays(
outboxes: Set<NDKRelay>,
): void {
// Only access localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
localStorage.setItem(
getRelayStorageKey(user, "inbox"),
JSON.stringify(Array.from(inboxes).map((relay) => relay.url)),
@ -60,10 +65,10 @@ function persistRelays( @@ -60,10 +65,10 @@ function persistRelays(
function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] {
// Only access localStorage on client-side
if (typeof window === 'undefined') {
if (typeof window === "undefined") {
return [new Set<string>(), new Set<string>()];
}
const inboxes = new Set<string>(
JSON.parse(localStorage.getItem(getRelayStorageKey(user, "inbox")) ?? "[]"),
);
@ -79,7 +84,10 @@ function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] { @@ -79,7 +84,10 @@ function getPersistedRelays(user: NDKUser): [Set<string>, Set<string>] {
async function getUserPreferredRelays(
ndk: NDK,
user: NDKUser,
fallbacks: readonly string[] = [...get(activeInboxRelays), ...get(activeOutboxRelays)],
fallbacks: readonly string[] = [
...get(activeInboxRelays),
...get(activeOutboxRelays),
],
): Promise<[Set<NDKRelay>, Set<NDKRelay>]> {
const relayList = await ndk.fetchEvent(
{
@ -144,8 +152,8 @@ export const loginMethodStorageKey = "alexandria/login/method"; @@ -144,8 +152,8 @@ export const loginMethodStorageKey = "alexandria/login/method";
function persistLogin(user: NDKUser, method: "extension" | "amber" | "npub") {
// Only access localStorage on client-side
if (typeof window === 'undefined') return;
if (typeof window === "undefined") return;
localStorage.setItem(loginStorageKey, user.pubkey);
localStorage.setItem(loginMethodStorageKey, method);
}
@ -165,9 +173,9 @@ export async function loginWithExtension() { @@ -165,9 +173,9 @@ export async function loginWithExtension() {
const signer = new NDKNip07Signer();
const user = await signer.user();
const npub = user.npub;
console.log("Login with extension - fetching profile for npub:", npub);
// Try to fetch user metadata, but don't fail if it times out
let profile: NostrProfile | null = null;
try {
@ -183,7 +191,7 @@ export async function loginWithExtension() { @@ -183,7 +191,7 @@ export async function loginWithExtension() {
};
console.log("Login with extension - using fallback profile:", profile);
}
// Fetch user's preferred relays
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user);
for (const relay of persistedInboxes) {
@ -193,7 +201,7 @@ export async function loginWithExtension() { @@ -193,7 +201,7 @@ export async function loginWithExtension() {
persistRelays(user, inboxes, outboxes);
ndk.signer = signer;
ndk.activeUser = user;
const userState = {
pubkey: user.pubkey,
npub,
@ -209,22 +217,27 @@ export async function loginWithExtension() { @@ -209,22 +217,27 @@ export async function loginWithExtension() {
signer,
signedIn: true,
};
console.log("Login with extension - setting userStore with:", userState);
userStore.set(userState);
userPubkey.set(user.pubkey);
// Update relay stores with the new user's relays
try {
console.debug('[userStore.ts] loginWithExtension: Updating relay stores for authenticated user');
console.debug(
"[userStore.ts] loginWithExtension: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user
} catch (error) {
console.warn('[userStore.ts] loginWithExtension: Failed to update relay stores:', error);
console.warn(
"[userStore.ts] loginWithExtension: Failed to update relay stores:",
error,
);
}
clearLogin();
// Only access localStorage on client-side
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag");
}
persistLogin(user, "extension");
@ -238,9 +251,9 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { @@ -238,9 +251,9 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
if (!ndk) throw new Error("NDK not initialized");
// Only clear previous login state after successful login
const npub = user.npub;
console.log("Login with Amber - fetching profile for npub:", npub);
let profile: NostrProfile | null = null;
try {
profile = await getUserMetadata(npub, true); // Force fresh fetch
@ -254,7 +267,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { @@ -254,7 +267,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
};
console.log("Login with Amber - using fallback profile:", profile);
}
const [persistedInboxes, persistedOutboxes] = getPersistedRelays(user);
for (const relay of persistedInboxes) {
ndk.addExplicitRelay(relay);
@ -263,7 +276,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { @@ -263,7 +276,7 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
persistRelays(user, inboxes, outboxes);
ndk.signer = amberSigner;
ndk.activeUser = user;
const userState = {
pubkey: user.pubkey,
npub,
@ -279,22 +292,27 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) { @@ -279,22 +292,27 @@ export async function loginWithAmber(amberSigner: NDKSigner, user: NDKUser) {
signer: amberSigner,
signedIn: true,
};
console.log("Login with Amber - setting userStore with:", userState);
userStore.set(userState);
userPubkey.set(user.pubkey);
// Update relay stores with the new user's relays
try {
console.debug('[userStore.ts] loginWithAmber: Updating relay stores for authenticated user');
console.debug(
"[userStore.ts] loginWithAmber: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk, true); // Force update to rebuild relay set for authenticated user
} catch (error) {
console.warn('[userStore.ts] loginWithAmber: Failed to update relay stores:', error);
console.warn(
"[userStore.ts] loginWithAmber: Failed to update relay stores:",
error,
);
}
clearLogin();
// Only access localStorage on client-side
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag");
}
persistLogin(user, "amber");
@ -331,23 +349,28 @@ export async function loginWithNpub(pubkeyOrNpub: string) { @@ -331,23 +349,28 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
console.error("Failed to encode npub from hex pubkey:", hexPubkey, e);
throw e;
}
console.log("Login with npub - fetching profile for npub:", npub);
const user = ndk.getUser({ npub });
let profile: NostrProfile | null = null;
// First, update relay stores to ensure we have relays available
try {
console.debug('[userStore.ts] loginWithNpub: Updating relay stores for authenticated user');
console.debug(
"[userStore.ts] loginWithNpub: Updating relay stores for authenticated user",
);
await updateActiveRelayStores(ndk);
} catch (error) {
console.warn('[userStore.ts] loginWithNpub: Failed to update relay stores:', error);
console.warn(
"[userStore.ts] loginWithNpub: Failed to update relay stores:",
error,
);
}
// Wait a moment for relay stores to be properly initialized
await new Promise(resolve => setTimeout(resolve, 500));
await new Promise((resolve) => setTimeout(resolve, 500));
try {
profile = await getUserMetadata(npub, true); // Force fresh fetch
console.log("Login with npub - fetched profile:", profile);
@ -360,10 +383,10 @@ export async function loginWithNpub(pubkeyOrNpub: string) { @@ -360,10 +383,10 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
};
console.log("Login with npub - using fallback profile:", profile);
}
ndk.signer = undefined;
ndk.activeUser = user;
const userState = {
pubkey: user.pubkey,
npub,
@ -374,14 +397,14 @@ export async function loginWithNpub(pubkeyOrNpub: string) { @@ -374,14 +397,14 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
signer: null,
signedIn: true,
};
console.log("Login with npub - setting userStore with:", userState);
userStore.set(userState);
userPubkey.set(user.pubkey);
clearLogin();
// Only access localStorage on client-side
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
localStorage.removeItem("alexandria/logout/flag");
}
persistLogin(user, "npub");
@ -393,13 +416,15 @@ export async function loginWithNpub(pubkeyOrNpub: string) { @@ -393,13 +416,15 @@ export async function loginWithNpub(pubkeyOrNpub: string) {
export function logoutUser() {
console.log("Logging out user...");
const currentUser = get(userStore);
// Only access localStorage on client-side
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
if (currentUser.ndkUser) {
// Clear persisted relays for the user
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "inbox"));
localStorage.removeItem(getRelayStorageKey(currentUser.ndkUser, "outbox"));
localStorage.removeItem(
getRelayStorageKey(currentUser.ndkUser, "outbox"),
);
}
// Clear all possible login states from localStorage

43
src/lib/stores/visualizationConfig.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { writable, derived, get } from "svelte/store";
import { derived, get, writable } from "svelte/store";
export interface EventKindConfig {
kind: number;
@ -39,8 +39,10 @@ function createVisualizationConfig() { @@ -39,8 +39,10 @@ function createVisualizationConfig() {
eventConfigs: DEFAULT_EVENT_CONFIGS,
searchThroughFetched: true,
};
const { subscribe, set, update } = writable<VisualizationConfig>(initialConfig);
const { subscribe, set, update } = writable<VisualizationConfig>(
initialConfig,
);
function reset() {
set(initialConfig);
@ -52,19 +54,19 @@ function createVisualizationConfig() { @@ -52,19 +54,19 @@ function createVisualizationConfig() {
if (config.eventConfigs.some((ec) => ec.kind === kind)) {
return config;
}
const newConfig: EventKindConfig = { kind, limit, enabled: true };
// Add nestedLevels for 30040
if (kind === 30040) {
newConfig.nestedLevels = 1;
}
// Add depth for kind 3
if (kind === 3) {
newConfig.depth = 0;
}
return {
...config,
eventConfigs: [...config.eventConfigs, newConfig],
@ -83,7 +85,7 @@ function createVisualizationConfig() { @@ -83,7 +85,7 @@ function createVisualizationConfig() {
update((config) => ({
...config,
eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, limit } : ec,
ec.kind === kind ? { ...ec, limit } : ec
),
}));
}
@ -92,7 +94,7 @@ function createVisualizationConfig() { @@ -92,7 +94,7 @@ function createVisualizationConfig() {
update((config) => ({
...config,
eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === 30040 ? { ...ec, nestedLevels: levels } : ec,
ec.kind === 30040 ? { ...ec, nestedLevels: levels } : ec
),
}));
}
@ -101,7 +103,7 @@ function createVisualizationConfig() { @@ -101,7 +103,7 @@ function createVisualizationConfig() {
update((config) => ({
...config,
eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === 3 ? { ...ec, depth: depth } : ec,
ec.kind === 3 ? { ...ec, depth: depth } : ec
),
}));
}
@ -110,7 +112,7 @@ function createVisualizationConfig() { @@ -110,7 +112,7 @@ function createVisualizationConfig() {
update((config) => ({
...config,
eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, showAll: !ec.showAll } : ec,
ec.kind === kind ? { ...ec, showAll: !ec.showAll } : ec
),
}));
}
@ -134,7 +136,7 @@ function createVisualizationConfig() { @@ -134,7 +136,7 @@ function createVisualizationConfig() {
update((config) => ({
...config,
eventConfigs: config.eventConfigs.map((ec) =>
ec.kind === kind ? { ...ec, enabled: !ec.enabled } : ec,
ec.kind === kind ? { ...ec, enabled: !ec.enabled } : ec
),
}));
}
@ -158,10 +160,12 @@ function createVisualizationConfig() { @@ -158,10 +160,12 @@ function createVisualizationConfig() {
export const visualizationConfig = createVisualizationConfig();
// Helper to get all enabled event kinds
export const enabledEventKinds = derived(visualizationConfig, ($config) =>
$config.eventConfigs
.filter((ec) => ec.enabled !== false)
.map((ec) => ec.kind),
export const enabledEventKinds = derived(
visualizationConfig,
($config) =>
$config.eventConfigs
.filter((ec) => ec.enabled !== false)
.map((ec) => ec.kind),
);
/**
@ -169,7 +173,10 @@ export const enabledEventKinds = derived(visualizationConfig, ($config) => @@ -169,7 +173,10 @@ export const enabledEventKinds = derived(visualizationConfig, ($config) =>
* @param config - The VisualizationConfig object.
* @param kind - The event kind number to check.
*/
export function isKindEnabledFn(config: VisualizationConfig, kind: number): boolean {
export function isKindEnabledFn(
config: VisualizationConfig,
kind: number,
): boolean {
const eventConfig = config.eventConfigs.find((ec) => ec.kind === kind);
// If not found, return false. Otherwise, return true unless explicitly disabled.
return !!eventConfig && eventConfig.enabled !== false;
@ -178,5 +185,5 @@ export function isKindEnabledFn(config: VisualizationConfig, kind: number): bool @@ -178,5 +185,5 @@ export function isKindEnabledFn(config: VisualizationConfig, kind: number): bool
// Derived store: returns a function that checks if a kind is enabled in the current config.
export const isKindEnabledStore = derived(
visualizationConfig,
($config) => (kind: number) => isKindEnabledFn($config, kind)
($config) => (kind: number) => isKindEnabledFn($config, kind),
);

24
src/lib/utils.ts

@ -26,7 +26,7 @@ export function neventEncode(event: NDKEvent, relays: string[]) { @@ -26,7 +26,7 @@ export function neventEncode(event: NDKEvent, relays: string[]) {
relays,
author: event.pubkey,
});
return nevent;
} catch (error) {
console.error(`[neventEncode] Error encoding nevent:`, error);
@ -54,7 +54,10 @@ export function naddrEncode(event: NDKEvent, relays: string[]) { @@ -54,7 +54,10 @@ export function naddrEncode(event: NDKEvent, relays: string[]) {
* @param relays Optional relay list for the address
* @returns A tag address string
*/
export function createTagAddress(event: NostrEvent, relays: string[] = []): string {
export function createTagAddress(
event: NostrEvent,
relays: string[] = [],
): string {
const dTag = event.tags.find((tag: string[]) => tag[0] === "d")?.[1];
if (!dTag) {
throw new Error("Event does not have a d tag");
@ -144,10 +147,9 @@ export function next(): number { @@ -144,10 +147,9 @@ export function next(): number {
export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) {
function scrollTab() {
const element =
typeof el === "string"
? document.querySelector(`[id^="wikitab-v0-${el}"]`)
: el;
const element = typeof el === "string"
? document.querySelector(`[id^="wikitab-v0-${el}"]`)
: el;
if (!element) return;
element.scrollIntoView({
@ -166,10 +168,9 @@ export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) { @@ -166,10 +168,9 @@ export function scrollTabIntoView(el: string | HTMLElement, wait: boolean) {
}
export function isElementInViewport(el: string | HTMLElement) {
const element =
typeof el === "string"
? document.querySelector(`[id^="wikitab-v0-${el}"]`)
: el;
const element = typeof el === "string"
? document.querySelector(`[id^="wikitab-v0-${el}"]`)
: el;
if (!element) return;
const rect = element.getBoundingClientRect();
@ -179,7 +180,8 @@ export function isElementInViewport(el: string | HTMLElement) { @@ -179,7 +180,8 @@ export function isElementInViewport(el: string | HTMLElement) {
rect.left >= 0 &&
rect.bottom <=
(globalThis.innerHeight || document.documentElement.clientHeight) &&
rect.right <= (globalThis.innerWidth || document.documentElement.clientWidth)
rect.right <=
(globalThis.innerWidth || document.documentElement.clientWidth)
);
}

10
src/lib/utils/ZettelParser.ts

@ -41,7 +41,7 @@ export function parseZettelSection(section: string): ZettelSection { @@ -41,7 +41,7 @@ export function parseZettelSection(section: string): ZettelSection {
const trimmed = line.trim();
if (trimmed.startsWith("==")) {
title = trimmed.replace(/^==+/, "").trim();
// Process header metadata (everything after title until blank line)
let j = i + 1;
while (j < lines.length && lines[j].trim() !== "") {
@ -54,12 +54,12 @@ export function parseZettelSection(section: string): ZettelSection { @@ -54,12 +54,12 @@ export function parseZettelSection(section: string): ZettelSection {
j++;
}
}
// Skip the blank line
if (j < lines.length && lines[j].trim() === "") {
j++;
}
// Everything after the blank line is content
for (let k = j; k < lines.length; k++) {
contentLines.push(lines[k]);
@ -100,13 +100,13 @@ export function extractTags(content: string): string[][] { @@ -100,13 +100,13 @@ export function extractTags(content: string): string[][] {
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
const trimmed = line.trim();
if (trimmed.startsWith("==")) {
// Process header metadata (everything after title until blank line)
let j = i + 1;
while (j < lines.length && lines[j].trim() !== "") {
const headerLine = lines[j].trim();
if (headerLine.startsWith(":")) {
// Parse AsciiDoc attribute format: :tagname: value
const match = headerLine.match(/^:([^:]+):\s*(.*)$/);

273
src/lib/utils/asciidoc_metadata.ts

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
/**
* AsciiDoc Metadata Extraction Service using Asciidoctor
*
*
* Thin wrapper around Asciidoctor's built-in metadata extraction capabilities.
* Leverages the existing Pharos parser to avoid duplication.
*/
@ -23,7 +23,7 @@ export interface AsciiDocMetadata { @@ -23,7 +23,7 @@ export interface AsciiDocMetadata {
source?: string;
publishedBy?: string;
type?: string;
autoUpdate?: 'yes' | 'ask' | 'no';
autoUpdate?: "yes" | "ask" | "no";
}
export type SectionMetadata = AsciiDocMetadata;
@ -41,29 +41,29 @@ export interface ParsedAsciiDoc { @@ -41,29 +41,29 @@ export interface ParsedAsciiDoc {
// Shared attribute mapping based on Asciidoctor standard attributes
const ATTRIBUTE_MAP: Record<string, keyof AsciiDocMetadata> = {
// Standard Asciidoctor attributes
'author': 'authors',
'description': 'summary',
'keywords': 'tags',
'revnumber': 'version',
'revdate': 'publicationDate',
'revremark': 'edition',
'title': 'title',
"author": "authors",
"description": "summary",
"keywords": "tags",
"revnumber": "version",
"revdate": "publicationDate",
"revremark": "edition",
"title": "title",
// Custom attributes for Alexandria
'published_by': 'publishedBy',
'publisher': 'publisher',
'summary': 'summary',
'image': 'coverImage',
'cover': 'coverImage',
'isbn': 'isbn',
'source': 'source',
'type': 'type',
'auto-update': 'autoUpdate',
'version': 'version',
'edition': 'edition',
'published_on': 'publicationDate',
'date': 'publicationDate',
'version-label': 'version',
"published_by": "publishedBy",
"publisher": "publisher",
"summary": "summary",
"image": "coverImage",
"cover": "coverImage",
"isbn": "isbn",
"source": "source",
"type": "type",
"auto-update": "autoUpdate",
"version": "version",
"edition": "edition",
"published_on": "publicationDate",
"date": "publicationDate",
"version-label": "version",
};
/**
@ -78,37 +78,41 @@ function createProcessor() { @@ -78,37 +78,41 @@ function createProcessor() {
*/
function extractTagsFromAttributes(attributes: Record<string, any>): string[] {
const tags: string[] = [];
const attrTags = attributes['tags'];
const attrKeywords = attributes['keywords'];
if (attrTags && typeof attrTags === 'string') {
tags.push(...attrTags.split(',').map(tag => tag.trim()));
const attrTags = attributes["tags"];
const attrKeywords = attributes["keywords"];
if (attrTags && typeof attrTags === "string") {
tags.push(...attrTags.split(",").map((tag) => tag.trim()));
}
if (attrKeywords && typeof attrKeywords === 'string') {
tags.push(...attrKeywords.split(',').map(tag => tag.trim()));
if (attrKeywords && typeof attrKeywords === "string") {
tags.push(...attrKeywords.split(",").map((tag) => tag.trim()));
}
return [...new Set(tags)]; // Remove duplicates
}
/**
* Maps attributes to metadata with special handling for authors and tags
*/
function mapAttributesToMetadata(attributes: Record<string, any>, metadata: AsciiDocMetadata, isDocument: boolean = false): void {
function mapAttributesToMetadata(
attributes: Record<string, any>,
metadata: AsciiDocMetadata,
isDocument: boolean = false,
): void {
for (const [key, value] of Object.entries(attributes)) {
const metadataKey = ATTRIBUTE_MAP[key.toLowerCase()];
if (metadataKey && value && typeof value === 'string') {
if (metadataKey === 'authors' && isDocument) {
if (metadataKey && value && typeof value === "string") {
if (metadataKey === "authors" && isDocument) {
// Skip author mapping for documents since it's handled manually
continue;
} else if (metadataKey === 'authors' && !isDocument) {
} else if (metadataKey === "authors" && !isDocument) {
// For sections, append author to existing authors array
if (!metadata.authors) {
metadata.authors = [];
}
metadata.authors.push(value);
} else if (metadataKey === 'tags') {
} else if (metadataKey === "tags") {
// Skip tags mapping since it's handled by extractTagsFromAttributes
continue;
} else {
@ -121,11 +125,14 @@ function mapAttributesToMetadata(attributes: Record<string, any>, metadata: Asci @@ -121,11 +125,14 @@ function mapAttributesToMetadata(attributes: Record<string, any>, metadata: Asci
/**
* Extracts authors from header line (document or section)
*/
function extractAuthorsFromHeader(sourceContent: string, isSection: boolean = false): string[] {
function extractAuthorsFromHeader(
sourceContent: string,
isSection: boolean = false,
): string[] {
const authors: string[] = [];
const lines = sourceContent.split(/\r?\n/);
const headerPattern = isSection ? /^==\s+/ : /^=\s+/;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line.match(headerPattern)) {
@ -133,51 +140,60 @@ function extractAuthorsFromHeader(sourceContent: string, isSection: boolean = fa @@ -133,51 +140,60 @@ function extractAuthorsFromHeader(sourceContent: string, isSection: boolean = fa
let j = i + 1;
while (j < lines.length) {
const authorLine = lines[j];
// Stop if we hit a blank line or content that's not an author
if (authorLine.trim() === '') {
if (authorLine.trim() === "") {
break;
}
if (authorLine.includes('<') && !authorLine.startsWith(':')) {
if (authorLine.includes("<") && !authorLine.startsWith(":")) {
// This is an author line like "John Doe <john@example.com>"
const authorName = authorLine.split('<')[0].trim();
const authorName = authorLine.split("<")[0].trim();
if (authorName) {
authors.push(authorName);
}
} else if (isSection && authorLine.match(/^[A-Za-z\s]+$/) && authorLine.trim() !== '' && authorLine.trim().split(/\s+/).length <= 2) {
} else if (
isSection && authorLine.match(/^[A-Za-z\s]+$/) &&
authorLine.trim() !== "" && authorLine.trim().split(/\s+/).length <= 2
) {
// This is a simple author name without email (for sections)
authors.push(authorLine.trim());
} else if (authorLine.startsWith(':')) {
} else if (authorLine.startsWith(":")) {
// This is an attribute line, skip it - attributes are handled by mapAttributesToMetadata
// Don't break here, continue to next line
} else {
// Not an author line, stop looking
break;
}
j++;
}
break;
}
}
return authors;
}
/**
* Strips header and attribute lines from content
*/
function stripHeaderAndAttributes(content: string, isSection: boolean = false): string {
function stripHeaderAndAttributes(
content: string,
isSection: boolean = false,
): string {
const lines = content.split(/\r?\n/);
let contentStart = 0;
const headerPattern = isSection ? /^==\s+/ : /^=\s+/;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
// Skip title line, author line, revision line, and attribute lines
if (!line.match(headerPattern) && !line.includes('<') && !line.match(/^.+,\s*.+:\s*.+$/) &&
!line.match(/^:[^:]+:\s*.+$/) && line.trim() !== '') {
if (
!line.match(headerPattern) && !line.includes("<") &&
!line.match(/^.+,\s*.+:\s*.+$/) &&
!line.match(/^:[^:]+:\s*.+$/) && line.trim() !== ""
) {
contentStart = i;
break;
}
@ -185,20 +201,26 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false): @@ -185,20 +201,26 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false):
// Filter out all attribute lines and author lines from the content
const contentLines = lines.slice(contentStart);
const filteredLines = contentLines.filter(line => {
const filteredLines = contentLines.filter((line) => {
// Skip attribute lines
if (line.match(/^:[^:]+:\s*.+$/)) {
return false;
}
// Skip author lines (simple names without email)
if (isSection && line.match(/^[A-Za-z\s]+$/) && line.trim() !== '' && line.trim().split(/\s+/).length <= 2) {
if (
isSection && line.match(/^[A-Za-z\s]+$/) && line.trim() !== "" &&
line.trim().split(/\s+/).length <= 2
) {
return false;
}
return true;
});
// Remove extra blank lines and normalize newlines
return filteredLines.join('\n').replace(/\n\s*\n\s*\n/g, '\n\n').replace(/\n\s*\n/g, '\n').trim();
return filteredLines.join("\n").replace(/\n\s*\n\s*\n/g, "\n\n").replace(
/\n\s*\n/g,
"\n",
).trim();
}
/**
@ -207,7 +229,7 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false): @@ -207,7 +229,7 @@ function stripHeaderAndAttributes(content: string, isSection: boolean = false):
function parseSectionAttributes(sectionContent: string): Record<string, any> {
const attributes: Record<string, any> = {};
const lines = sectionContent.split(/\r?\n/);
for (const line of lines) {
const match = line.match(/^:([^:]+):\s*(.+)$/);
if (match) {
@ -215,14 +237,10 @@ function parseSectionAttributes(sectionContent: string): Record<string, any> { @@ -215,14 +237,10 @@ function parseSectionAttributes(sectionContent: string): Record<string, any> {
attributes[key.trim()] = value.trim();
}
}
return attributes;
}
/**
* Extracts metadata from AsciiDoc document using Asciidoctor
*/
@ -231,7 +249,9 @@ export function extractDocumentMetadata(inputContent: string): { @@ -231,7 +249,9 @@ export function extractDocumentMetadata(inputContent: string): {
content: string;
} {
const asciidoctor = createProcessor();
const document = asciidoctor.load(inputContent, { standalone: false }) as Document;
const document = asciidoctor.load(inputContent, {
standalone: false,
}) as Document;
const metadata: AsciiDocMetadata = {};
const attributes = document.getAttributes();
@ -242,13 +262,16 @@ export function extractDocumentMetadata(inputContent: string): { @@ -242,13 +262,16 @@ export function extractDocumentMetadata(inputContent: string): {
// Handle multiple authors - combine header line and attributes
const authors = extractAuthorsFromHeader(document.getSource());
// Get authors from attributes (but avoid duplicates)
const attrAuthor = attributes['author'];
if (attrAuthor && typeof attrAuthor === 'string' && !authors.includes(attrAuthor)) {
const attrAuthor = attributes["author"];
if (
attrAuthor && typeof attrAuthor === "string" &&
!authors.includes(attrAuthor)
) {
authors.push(attrAuthor);
}
if (authors.length > 0) {
metadata.authors = [...new Set(authors)]; // Remove duplicates
}
@ -265,12 +288,12 @@ export function extractDocumentMetadata(inputContent: string): { @@ -265,12 +288,12 @@ export function extractDocumentMetadata(inputContent: string): {
// Map attributes to metadata (but skip version and publishedBy if we already have them from revision)
mapAttributesToMetadata(attributes, metadata, true);
// If we got version from revision, don't override it with attribute
if (revisionNumber) {
metadata.version = revisionNumber;
}
// If we got publishedBy from revision, don't override it with attribute
if (revisionRemark) {
metadata.publishedBy = revisionRemark;
@ -295,17 +318,19 @@ export function extractSectionMetadata(inputSectionContent: string): { @@ -295,17 +318,19 @@ export function extractSectionMetadata(inputSectionContent: string): {
title: string;
} {
const asciidoctor = createProcessor();
const document = asciidoctor.load(`= Temp\n\n${inputSectionContent}`, { standalone: false }) as Document;
const document = asciidoctor.load(`= Temp\n\n${inputSectionContent}`, {
standalone: false,
}) as Document;
const sections = document.getSections();
if (sections.length === 0) {
return { metadata: {}, content: inputSectionContent, title: '' };
return { metadata: {}, content: inputSectionContent, title: "" };
}
const section = sections[0];
const title = section.getTitle() || '';
const title = section.getTitle() || "";
const metadata: SectionMetadata = { title };
// Parse attributes from the section content
const attributes = parseSectionAttributes(inputSectionContent);
@ -335,7 +360,7 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc { @@ -335,7 +360,7 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
const asciidoctor = createProcessor();
const document = asciidoctor.load(content, { standalone: false }) as Document;
const { metadata: docMetadata } = extractDocumentMetadata(content);
// Parse the original content to find section attributes
const lines = content.split(/\r?\n/);
const sectionsWithMetadata: Array<{
@ -345,15 +370,15 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc { @@ -345,15 +370,15 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
}> = [];
let currentSection: string | null = null;
let currentSectionContent: string[] = [];
for (const line of lines) {
if (line.match(/^==\s+/)) {
// Save previous section if exists
if (currentSection) {
const sectionContent = currentSectionContent.join('\n');
const sectionContent = currentSectionContent.join("\n");
sectionsWithMetadata.push(extractSectionMetadata(sectionContent));
}
// Start new section
currentSection = line;
currentSectionContent = [line];
@ -361,42 +386,46 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc { @@ -361,42 +386,46 @@ export function parseAsciiDocWithMetadata(content: string): ParsedAsciiDoc {
currentSectionContent.push(line);
}
}
// Save the last section
if (currentSection) {
const sectionContent = currentSectionContent.join('\n');
const sectionContent = currentSectionContent.join("\n");
sectionsWithMetadata.push(extractSectionMetadata(sectionContent));
}
return {
metadata: docMetadata,
content: document.getSource(),
sections: sectionsWithMetadata
sections: sectionsWithMetadata,
};
}
/**
* Converts metadata to Nostr event tags
*/
export function metadataToTags(metadata: AsciiDocMetadata | SectionMetadata): [string, string][] {
export function metadataToTags(
metadata: AsciiDocMetadata | SectionMetadata,
): [string, string][] {
const tags: [string, string][] = [];
if (metadata.title) tags.push(['title', metadata.title]);
if (metadata.title) tags.push(["title", metadata.title]);
if (metadata.authors?.length) {
metadata.authors.forEach(author => tags.push(['author', author]));
metadata.authors.forEach((author) => tags.push(["author", author]));
}
if (metadata.version) tags.push(["version", metadata.version]);
if (metadata.edition) tags.push(["edition", metadata.edition]);
if (metadata.publicationDate) {
tags.push(["published_on", metadata.publicationDate]);
}
if (metadata.version) tags.push(['version', metadata.version]);
if (metadata.edition) tags.push(['edition', metadata.edition]);
if (metadata.publicationDate) tags.push(['published_on', metadata.publicationDate]);
if (metadata.publishedBy) tags.push(['published_by', metadata.publishedBy]);
if (metadata.summary) tags.push(['summary', metadata.summary]);
if (metadata.coverImage) tags.push(['image', metadata.coverImage]);
if (metadata.isbn) tags.push(['i', metadata.isbn]);
if (metadata.source) tags.push(['source', metadata.source]);
if (metadata.type) tags.push(['type', metadata.type]);
if (metadata.autoUpdate) tags.push(['auto-update', metadata.autoUpdate]);
if (metadata.publishedBy) tags.push(["published_by", metadata.publishedBy]);
if (metadata.summary) tags.push(["summary", metadata.summary]);
if (metadata.coverImage) tags.push(["image", metadata.coverImage]);
if (metadata.isbn) tags.push(["i", metadata.isbn]);
if (metadata.source) tags.push(["source", metadata.source]);
if (metadata.type) tags.push(["type", metadata.type]);
if (metadata.autoUpdate) tags.push(["auto-update", metadata.autoUpdate]);
if (metadata.tags?.length) {
metadata.tags.forEach(tag => tags.push(['t', tag]));
metadata.tags.forEach((tag) => tags.push(["t", tag]));
}
return tags;
@ -408,7 +437,7 @@ export function metadataToTags(metadata: AsciiDocMetadata | SectionMetadata): [s @@ -408,7 +437,7 @@ export function metadataToTags(metadata: AsciiDocMetadata | SectionMetadata): [s
export function removeMetadataFromContent(content: string): string {
const { content: cleanedContent } = extractDocumentMetadata(content);
return cleanedContent;
}
}
/**
* Extracts metadata from content that only contains sections (no document header)
@ -424,19 +453,19 @@ export function extractMetadataFromSectionsOnly(content: string): { @@ -424,19 +453,19 @@ export function extractMetadataFromSectionsOnly(content: string): {
content: string;
title: string;
}> = [];
let currentSection: string | null = null;
let currentSectionContent: string[] = [];
// Parse sections from the content
for (const line of lines) {
if (line.match(/^==\s+/)) {
// Save previous section if exists
if (currentSection) {
const sectionContent = currentSectionContent.join('\n');
const sectionContent = currentSectionContent.join("\n");
sections.push(extractSectionMetadata(sectionContent));
}
// Start new section
currentSection = line;
currentSectionContent = [line];
@ -444,20 +473,20 @@ export function extractMetadataFromSectionsOnly(content: string): { @@ -444,20 +473,20 @@ export function extractMetadataFromSectionsOnly(content: string): {
currentSectionContent.push(line);
}
}
// Save the last section
if (currentSection) {
const sectionContent = currentSectionContent.join('\n');
const sectionContent = currentSectionContent.join("\n");
sections.push(extractSectionMetadata(sectionContent));
}
// For section-only content, we don't have document metadata
// Return the first section's title as the document title if available
const metadata: AsciiDocMetadata = {};
if (sections.length > 0 && sections[0].title) {
metadata.title = sections[0].title;
}
return { metadata, content };
}
@ -470,31 +499,31 @@ export function extractSmartMetadata(content: string): { @@ -470,31 +499,31 @@ export function extractSmartMetadata(content: string): {
} {
// Check if content has a document header
const hasDocumentHeader = content.match(/^=\s+/m);
if (hasDocumentHeader) {
// Check if it's a minimal document header (just title, no other metadata)
const lines = content.split(/\r?\n/);
const titleLine = lines.find(line => line.match(/^=\s+/));
const hasOtherMetadata = lines.some(line =>
line.includes('<') || // author line
const titleLine = lines.find((line) => line.match(/^=\s+/));
const hasOtherMetadata = lines.some((line) =>
line.includes("<") || // author line
line.match(/^.+,\s*.+:\s*.+$/) // revision line
);
if (hasOtherMetadata) {
// Full document with metadata - use standard extraction
return extractDocumentMetadata(content);
} else {
// Minimal document header (just title) - preserve the title line for 30040 events
const title = titleLine?.replace(/^=\s+/, '').trim();
const metadata: AsciiDocMetadata = {};
if (title) {
metadata.title = title;
}
// Keep the title line in content for 30040 events
return { metadata, content };
}
} else {
// Minimal document header (just title) - preserve the title line for 30040 events
const title = titleLine?.replace(/^=\s+/, "").trim();
const metadata: AsciiDocMetadata = {};
if (title) {
metadata.title = title;
}
// Keep the title line in content for 30040 events
return { metadata, content };
}
} else {
return extractMetadataFromSectionsOnly(content);
}
}
}

4
src/lib/utils/community_checker.ts

@ -43,7 +43,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> { @@ -43,7 +43,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> {
}
};
});
if (result) {
return true;
}
@ -52,7 +52,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> { @@ -52,7 +52,7 @@ export async function checkCommunity(pubkey: string): Promise<boolean> {
continue;
}
}
// If we get here, no relay found the user
communityCache.set(pubkey, false);
return false;

59
src/lib/utils/displayLimits.ts

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
import type { NDKEvent } from '@nostr-dev-kit/ndk';
import type { VisualizationConfig } from '$lib/stores/visualizationConfig';
import { isEventId, isCoordinate, parseCoordinate } from './nostr_identifiers';
import type { NostrEventId } from './nostr_identifiers';
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import type { VisualizationConfig } from "$lib/stores/visualizationConfig";
import { isCoordinate, isEventId, parseCoordinate } from "./nostr_identifiers";
import type { NostrEventId } from "./nostr_identifiers";
/**
* Filters events based on visualization configuration
@ -9,7 +9,10 @@ import type { NostrEventId } from './nostr_identifiers'; @@ -9,7 +9,10 @@ import type { NostrEventId } from './nostr_identifiers';
* @param config - Visualization configuration
* @returns Filtered events that should be displayed
*/
export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationConfig): NDKEvent[] {
export function filterByDisplayLimits(
events: NDKEvent[],
config: VisualizationConfig,
): NDKEvent[] {
const result: NDKEvent[] = [];
const kindCounts = new Map<number, number>();
@ -18,13 +21,13 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC @@ -18,13 +21,13 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC
if (kind === undefined) continue;
// Get the config for this event kind
const eventConfig = config.eventConfigs.find(ec => ec.kind === kind);
const eventConfig = config.eventConfigs.find((ec) => ec.kind === kind);
// Skip if the kind is disabled
if (eventConfig && eventConfig.enabled === false) {
continue;
}
const limit = eventConfig?.limit;
// Special handling for content kinds (30041, 30818) with showAll option
@ -58,48 +61,48 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC @@ -58,48 +61,48 @@ export function filterByDisplayLimits(events: NDKEvent[], config: VisualizationC
* @returns Set of missing event identifiers
*/
export function detectMissingEvents(
events: NDKEvent[],
events: NDKEvent[],
existingIds: Set<NostrEventId>,
existingCoordinates?: Map<string, NDKEvent>
existingCoordinates?: Map<string, NDKEvent>,
): Set<string> {
const missing = new Set<string>();
for (const event of events) {
// Check 'e' tags for direct event references (hex IDs)
const eTags = event.getMatchingTags('e');
const eTags = event.getMatchingTags("e");
for (const eTag of eTags) {
if (eTag.length < 2) continue;
const eventId = eTag[1];
// Type check: ensure it's a valid hex event ID
if (!isEventId(eventId)) {
console.warn('Invalid event ID in e tag:', eventId);
console.warn("Invalid event ID in e tag:", eventId);
continue;
}
if (!existingIds.has(eventId)) {
missing.add(eventId);
}
}
// Check 'a' tags for NIP-33 references (kind:pubkey:d-tag)
const aTags = event.getMatchingTags('a');
const aTags = event.getMatchingTags("a");
for (const aTag of aTags) {
if (aTag.length < 2) continue;
const identifier = aTag[1];
// Type check: ensure it's a valid coordinate
if (!isCoordinate(identifier)) {
console.warn('Invalid coordinate in a tag:', identifier);
console.warn("Invalid coordinate in a tag:", identifier);
continue;
}
// Parse the coordinate
const parsed = parseCoordinate(identifier);
if (!parsed) continue;
// If we have existing coordinates, check if this one exists
if (existingCoordinates) {
if (!existingCoordinates.has(identifier)) {
@ -108,7 +111,10 @@ export function detectMissingEvents( @@ -108,7 +111,10 @@ export function detectMissingEvents(
} else {
// Without coordinate map, we can't detect missing NIP-33 events
// This is a limitation when we only have hex IDs
console.debug('Cannot detect missing NIP-33 events without coordinate map:', identifier);
console.debug(
"Cannot detect missing NIP-33 events without coordinate map:",
identifier,
);
}
}
}
@ -123,20 +129,19 @@ export function detectMissingEvents( @@ -123,20 +129,19 @@ export function detectMissingEvents(
*/
export function buildCoordinateMap(events: NDKEvent[]): Map<string, NDKEvent> {
const coordinateMap = new Map<string, NDKEvent>();
for (const event of events) {
// Only process replaceable events (kinds 30000-39999)
if (event.kind && event.kind >= 30000 && event.kind < 40000) {
const dTag = event.tagValue('d');
const dTag = event.tagValue("d");
const author = event.pubkey;
if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`;
coordinateMap.set(coordinate, event);
}
}
}
return coordinateMap;
}

101
src/lib/utils/eventColors.ts

@ -13,11 +13,11 @@ const GOLDEN_RATIO = (1 + Math.sqrt(5)) / 2; @@ -13,11 +13,11 @@ const GOLDEN_RATIO = (1 + Math.sqrt(5)) / 2;
export function getEventKindColor(kind: number): string {
// Use golden ratio for better distribution
const hue = (kind * GOLDEN_RATIO * 360) % 360;
// Use different saturation/lightness for better visibility
const saturation = 65 + (kind % 20); // 65-85%
const lightness = 55 + ((kind * 3) % 15); // 55-70%
return `hsl(${Math.round(hue)}, ${saturation}%, ${lightness}%)`;
}
@ -28,55 +28,54 @@ export function getEventKindColor(kind: number): string { @@ -28,55 +28,54 @@ export function getEventKindColor(kind: number): string {
*/
export function getEventKindName(kind: number): string {
const kindNames: Record<number, string> = {
0: 'Metadata',
1: 'Text Note',
2: 'Recommend Relay',
3: 'Contact List',
4: 'Encrypted DM',
5: 'Event Deletion',
6: 'Repost',
7: 'Reaction',
8: 'Badge Award',
16: 'Generic Repost',
40: 'Channel Creation',
41: 'Channel Metadata',
42: 'Channel Message',
43: 'Channel Hide Message',
44: 'Channel Mute User',
1984: 'Reporting',
9734: 'Zap Request',
9735: 'Zap',
10000: 'Mute List',
10001: 'Pin List',
10002: 'Relay List',
22242: 'Client Authentication',
24133: 'Nostr Connect',
27235: 'HTTP Auth',
30000: 'Categorized People List',
30001: 'Categorized Bookmark List',
30008: 'Profile Badges',
30009: 'Badge Definition',
30017: 'Create or update a stall',
30018: 'Create or update a product',
30023: 'Long-form Content',
30024: 'Draft Long-form Content',
30040: 'Publication Index',
30041: 'Publication Content',
30078: 'Application-specific Data',
30311: 'Live Event',
30402: 'Classified Listing',
30403: 'Draft Classified Listing',
30617: 'Repository',
30818: 'Wiki Page',
31922: 'Date-Based Calendar Event',
31923: 'Time-Based Calendar Event',
31924: 'Calendar',
31925: 'Calendar Event RSVP',
31989: 'Handler recommendation',
31990: 'Handler information',
34550: 'Community Definition',
0: "Metadata",
1: "Text Note",
2: "Recommend Relay",
3: "Contact List",
4: "Encrypted DM",
5: "Event Deletion",
6: "Repost",
7: "Reaction",
8: "Badge Award",
16: "Generic Repost",
40: "Channel Creation",
41: "Channel Metadata",
42: "Channel Message",
43: "Channel Hide Message",
44: "Channel Mute User",
1984: "Reporting",
9734: "Zap Request",
9735: "Zap",
10000: "Mute List",
10001: "Pin List",
10002: "Relay List",
22242: "Client Authentication",
24133: "Nostr Connect",
27235: "HTTP Auth",
30000: "Categorized People List",
30001: "Categorized Bookmark List",
30008: "Profile Badges",
30009: "Badge Definition",
30017: "Create or update a stall",
30018: "Create or update a product",
30023: "Long-form Content",
30024: "Draft Long-form Content",
30040: "Publication Index",
30041: "Publication Content",
30078: "Application-specific Data",
30311: "Live Event",
30402: "Classified Listing",
30403: "Draft Classified Listing",
30617: "Repository",
30818: "Wiki Page",
31922: "Date-Based Calendar Event",
31923: "Time-Based Calendar Event",
31924: "Calendar",
31925: "Calendar Event RSVP",
31989: "Handler recommendation",
31990: "Handler information",
34550: "Community Definition",
};
return kindNames[kind] || `Kind ${kind}`;
}

152
src/lib/utils/eventDeduplication.ts

@ -1,69 +1,88 @@ @@ -1,69 +1,88 @@
import type { NDKEvent } from '@nostr-dev-kit/ndk';
import type { NDKEvent } from "@nostr-dev-kit/ndk";
/**
* Deduplicate content events by keeping only the most recent version
* @param contentEventSets Array of event sets from different sources
* @returns Map of coordinate to most recent event
*/
export function deduplicateContentEvents(contentEventSets: Set<NDKEvent>[]): Map<string, NDKEvent> {
export function deduplicateContentEvents(
contentEventSets: Set<NDKEvent>[],
): Map<string, NDKEvent> {
const eventsByCoordinate = new Map<string, NDKEvent>();
// Track statistics for debugging
let totalEvents = 0;
let duplicateCoordinates = 0;
const duplicateDetails: Array<{ coordinate: string; count: number; events: string[] }> = [];
const duplicateDetails: Array<
{ coordinate: string; count: number; events: string[] }
> = [];
contentEventSets.forEach((eventSet) => {
eventSet.forEach(event => {
eventSet.forEach((event) => {
totalEvents++;
const dTag = event.tagValue("d");
const author = event.pubkey;
const kind = event.kind;
if (dTag && author && kind) {
const coordinate = `${kind}:${author}:${dTag}`;
const existing = eventsByCoordinate.get(coordinate);
if (existing) {
// We found a duplicate coordinate
duplicateCoordinates++;
// Track details for the first few duplicates
if (duplicateDetails.length < 5) {
const existingDetails = duplicateDetails.find(d => d.coordinate === coordinate);
const existingDetails = duplicateDetails.find((d) =>
d.coordinate === coordinate
);
if (existingDetails) {
existingDetails.count++;
existingDetails.events.push(`${event.id} (created_at: ${event.created_at})`);
existingDetails.events.push(
`${event.id} (created_at: ${event.created_at})`,
);
} else {
duplicateDetails.push({
coordinate,
count: 2, // existing + current
events: [
`${existing.id} (created_at: ${existing.created_at})`,
`${event.id} (created_at: ${event.created_at})`
]
`${event.id} (created_at: ${event.created_at})`,
],
});
}
}
}
// Keep the most recent event (highest created_at)
if (!existing || (event.created_at !== undefined && existing.created_at !== undefined && event.created_at > existing.created_at)) {
if (
!existing ||
(event.created_at !== undefined &&
existing.created_at !== undefined &&
event.created_at > existing.created_at)
) {
eventsByCoordinate.set(coordinate, event);
}
}
});
});
// Log deduplication results if any duplicates were found
if (duplicateCoordinates > 0) {
console.log(`[eventDeduplication] Found ${duplicateCoordinates} duplicate events out of ${totalEvents} total events`);
console.log(`[eventDeduplication] Reduced to ${eventsByCoordinate.size} unique coordinates`);
console.log(
`[eventDeduplication] Found ${duplicateCoordinates} duplicate events out of ${totalEvents} total events`,
);
console.log(
`[eventDeduplication] Reduced to ${eventsByCoordinate.size} unique coordinates`,
);
console.log(`[eventDeduplication] Duplicate details:`, duplicateDetails);
} else if (totalEvents > 0) {
console.log(`[eventDeduplication] No duplicates found in ${totalEvents} events`);
console.log(
`[eventDeduplication] No duplicates found in ${totalEvents} events`,
);
}
return eventsByCoordinate;
}
@ -77,83 +96,95 @@ export function deduplicateContentEvents(contentEventSets: Set<NDKEvent>[]): Map @@ -77,83 +96,95 @@ export function deduplicateContentEvents(contentEventSets: Set<NDKEvent>[]): Map
export function deduplicateAndCombineEvents(
nonPublicationEvents: NDKEvent[],
validIndexEvents: Set<NDKEvent>,
contentEvents: Set<NDKEvent>
contentEvents: Set<NDKEvent>,
): NDKEvent[] {
// Track statistics for debugging
const initialCount = nonPublicationEvents.length + validIndexEvents.size + contentEvents.size;
const initialCount = nonPublicationEvents.length + validIndexEvents.size +
contentEvents.size;
let replaceableEventsProcessed = 0;
let duplicateCoordinatesFound = 0;
const duplicateDetails: Array<{ coordinate: string; count: number; events: string[] }> = [];
const duplicateDetails: Array<
{ coordinate: string; count: number; events: string[] }
> = [];
// First, build coordinate map for replaceable events
const coordinateMap = new Map<string, NDKEvent>();
const allEventsToProcess = [
...nonPublicationEvents, // Non-publication events fetched earlier
...Array.from(validIndexEvents),
...Array.from(contentEvents)
...Array.from(validIndexEvents),
...Array.from(contentEvents),
];
// First pass: identify the most recent version of each replaceable event
allEventsToProcess.forEach(event => {
allEventsToProcess.forEach((event) => {
if (!event.id) return;
// For replaceable events (30000-39999), track by coordinate
if (event.kind && event.kind >= 30000 && event.kind < 40000) {
replaceableEventsProcessed++;
const dTag = event.tagValue("d");
const author = event.pubkey;
if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`;
const existing = coordinateMap.get(coordinate);
if (existing) {
// We found a duplicate coordinate
duplicateCoordinatesFound++;
// Track details for the first few duplicates
if (duplicateDetails.length < 5) {
const existingDetails = duplicateDetails.find(d => d.coordinate === coordinate);
const existingDetails = duplicateDetails.find((d) =>
d.coordinate === coordinate
);
if (existingDetails) {
existingDetails.count++;
existingDetails.events.push(`${event.id} (created_at: ${event.created_at})`);
existingDetails.events.push(
`${event.id} (created_at: ${event.created_at})`,
);
} else {
duplicateDetails.push({
coordinate,
count: 2, // existing + current
events: [
`${existing.id} (created_at: ${existing.created_at})`,
`${event.id} (created_at: ${event.created_at})`
]
`${event.id} (created_at: ${event.created_at})`,
],
});
}
}
}
// Keep the most recent version
if (!existing || (event.created_at !== undefined && existing.created_at !== undefined && event.created_at > existing.created_at)) {
if (
!existing ||
(event.created_at !== undefined &&
existing.created_at !== undefined &&
event.created_at > existing.created_at)
) {
coordinateMap.set(coordinate, event);
}
}
}
});
// Second pass: build final event map
const finalEventMap = new Map<string, NDKEvent>();
const seenCoordinates = new Set<string>();
allEventsToProcess.forEach(event => {
allEventsToProcess.forEach((event) => {
if (!event.id) return;
// For replaceable events, only add if it's the chosen version
if (event.kind && event.kind >= 30000 && event.kind < 40000) {
const dTag = event.tagValue("d");
const author = event.pubkey;
if (dTag && author) {
const coordinate = `${event.kind}:${author}:${dTag}`;
const chosenEvent = coordinateMap.get(coordinate);
// Only add this event if it's the chosen one for this coordinate
if (chosenEvent && chosenEvent.id === event.id) {
if (!seenCoordinates.has(coordinate)) {
@ -164,23 +195,32 @@ export function deduplicateAndCombineEvents( @@ -164,23 +195,32 @@ export function deduplicateAndCombineEvents(
return;
}
}
// Non-replaceable events are added directly
finalEventMap.set(event.id, event);
});
const finalCount = finalEventMap.size;
const reduction = initialCount - finalCount;
// Log deduplication results if any duplicates were found
if (duplicateCoordinatesFound > 0) {
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Found ${duplicateCoordinatesFound} duplicate coordinates out of ${replaceableEventsProcessed} replaceable events`);
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Reduced from ${initialCount} to ${finalCount} events (${reduction} removed)`);
console.log(`[eventDeduplication] deduplicateAndCombineEvents: Duplicate details:`, duplicateDetails);
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: Found ${duplicateCoordinatesFound} duplicate coordinates out of ${replaceableEventsProcessed} replaceable events`,
);
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: Reduced from ${initialCount} to ${finalCount} events (${reduction} removed)`,
);
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: Duplicate details:`,
duplicateDetails,
);
} else if (replaceableEventsProcessed > 0) {
console.log(`[eventDeduplication] deduplicateAndCombineEvents: No duplicates found in ${replaceableEventsProcessed} replaceable events`);
console.log(
`[eventDeduplication] deduplicateAndCombineEvents: No duplicates found in ${replaceableEventsProcessed} replaceable events`,
);
}
return Array.from(finalEventMap.values());
}
@ -202,13 +242,13 @@ export function getEventCoordinate(event: NDKEvent): string | null { @@ -202,13 +242,13 @@ export function getEventCoordinate(event: NDKEvent): string | null {
if (!isReplaceableEvent(event)) {
return null;
}
const dTag = event.tagValue("d");
const author = event.pubkey;
if (!dTag || !author) {
return null;
}
return `${event.kind}:${author}:${dTag}`;
}
}

72
src/lib/utils/event_input_utils.ts

@ -3,12 +3,12 @@ import { get } from "svelte/store"; @@ -3,12 +3,12 @@ import { get } from "svelte/store";
import { ndkInstance } from "../ndk.ts";
import { NDKEvent as NDKEventClass } from "@nostr-dev-kit/ndk";
import { EVENT_KINDS } from "./search_constants";
import {
extractDocumentMetadata,
extractSectionMetadata,
parseAsciiDocWithMetadata,
import {
extractDocumentMetadata,
extractSectionMetadata,
metadataToTags,
removeMetadataFromContent
parseAsciiDocWithMetadata,
removeMetadataFromContent,
} from "./asciidoc_metadata";
// =========================
@ -92,12 +92,14 @@ export function validate30040EventSet(content: string): { @@ -92,12 +92,14 @@ export function validate30040EventSet(content: string): {
const lines = content.split(/\r?\n/);
const { metadata } = extractDocumentMetadata(content);
const documentTitle = metadata.title;
const nonEmptyLines = lines.filter(line => line.trim() !== "").map(line => line.trim());
const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
const nonEmptyLines = lines.filter((line) => line.trim() !== "").map((line) =>
line.trim()
);
const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
nonEmptyLines[1].toLowerCase() === "index card";
if (isIndexCardFormat) {
return { valid: true };
}
@ -125,18 +127,20 @@ export function validate30040EventSet(content: string): { @@ -125,18 +127,20 @@ export function validate30040EventSet(content: string): {
if (documentHeaderMatches && documentHeaderMatches.length > 1) {
return {
valid: false,
reason: '30040 events must have exactly one document title ("="). Found multiple document headers.',
reason:
'30040 events must have exactly one document title ("="). Found multiple document headers.',
};
}
// Parse the content to check sections
const parsed = parseAsciiDocWithMetadata(content);
const hasSections = parsed.sections.length > 0;
if (!hasSections) {
return {
valid: true,
warning: "No section headers (==) found. This will create a 30040 index event and a single 30041 preamble section. Continue?",
warning:
"No section headers (==) found. This will create a 30040 index event and a single 30041 preamble section. Continue?",
};
}
@ -147,7 +151,9 @@ export function validate30040EventSet(content: string): { @@ -147,7 +151,9 @@ export function validate30040EventSet(content: string): {
}
// Check for empty sections
const emptySections = parsed.sections.filter(section => section.content.trim() === "");
const emptySections = parsed.sections.filter((section) =>
section.content.trim() === ""
);
if (emptySections.length > 0) {
return {
valid: true,
@ -226,21 +232,23 @@ export function build30040EventSet( @@ -226,21 +232,23 @@ export function build30040EventSet(
// Check if this is an "index card" format (no sections, just title + "index card")
const lines = content.split(/\r?\n/);
const documentTitle = parsed.metadata.title;
// For index card format, the content should be exactly: title + "index card"
const nonEmptyLines = lines.filter(line => line.trim() !== "").map(line => line.trim());
const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
const nonEmptyLines = lines.filter((line) => line.trim() !== "").map((line) =>
line.trim()
);
const isIndexCardFormat = documentTitle &&
nonEmptyLines.length === 2 &&
nonEmptyLines[0].startsWith("=") &&
nonEmptyLines[1].toLowerCase() === "index card";
if (isIndexCardFormat) {
console.log("Creating index card format (no sections)");
const indexDTag = normalizeDTagValue(documentTitle);
// Convert document metadata to tags
const metadataTags = metadataToTags(parsed.metadata);
const indexEvent: NDKEvent = new NDKEventClass(ndk, {
kind: 30040,
content: "",
@ -253,7 +261,7 @@ export function build30040EventSet( @@ -253,7 +261,7 @@ export function build30040EventSet(
pubkey: baseEvent.pubkey,
created_at: baseEvent.created_at,
});
console.log("Final index event (index card):", indexEvent);
console.log("=== build30040EventSet completed (index card) ===");
return { indexEvent, sectionEvents: [] };
@ -266,24 +274,24 @@ export function build30040EventSet( @@ -266,24 +274,24 @@ export function build30040EventSet(
// Create section events with their metadata
const sectionEvents: NDKEvent[] = parsed.sections.map((section, i) => {
const sectionDTag = `${indexDTag}-${normalizeDTagValue(section.title)}`;
console.log(`Creating section ${i}:`, {
title: section.title,
dTag: sectionDTag,
console.log(`Creating section ${i}:`, {
title: section.title,
dTag: sectionDTag,
content: section.content,
metadata: section.metadata
metadata: section.metadata,
});
// Convert section metadata to tags
const sectionMetadataTags = metadataToTags(section.metadata);
return new NDKEventClass(ndk, {
kind: 30041,
content: section.content,
tags: [
...tags,
...sectionMetadataTags,
["d", sectionDTag],
["title", section.title]
["d", sectionDTag],
["title", section.title],
],
pubkey: baseEvent.pubkey,
created_at: baseEvent.created_at,
@ -291,7 +299,7 @@ export function build30040EventSet( @@ -291,7 +299,7 @@ export function build30040EventSet(
});
// Create proper a tags with format: kind:pubkey:d-tag
const aTags = sectionEvents.map(event => {
const aTags = sectionEvents.map((event) => {
const dTag = event.tags.find(([k]) => k === "d")?.[1];
return ["a", `30041:${baseEvent.pubkey}:${dTag}`] as [string, string];
});

71
src/lib/utils/event_kind_utils.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import type { EventKindConfig } from '$lib/stores/visualizationConfig';
import type { EventKindConfig } from "$lib/stores/visualizationConfig";
/**
* Validates an event kind input value.
@ -7,29 +7,29 @@ import type { EventKindConfig } from '$lib/stores/visualizationConfig'; @@ -7,29 +7,29 @@ import type { EventKindConfig } from '$lib/stores/visualizationConfig';
* @returns The validated kind number, or null if validation fails.
*/
export function validateEventKind(
value: string | number,
existingKinds: number[]
value: string | number,
existingKinds: number[],
): { kind: number | null; error: string } {
// Convert to string for consistent handling
const strValue = String(value);
if (strValue === null || strValue === undefined || strValue.trim() === '') {
return { kind: null, error: '' };
if (strValue === null || strValue === undefined || strValue.trim() === "") {
return { kind: null, error: "" };
}
const kind = parseInt(strValue.trim());
if (isNaN(kind)) {
return { kind: null, error: 'Must be a number' };
return { kind: null, error: "Must be a number" };
}
if (kind < 0) {
return { kind: null, error: 'Must be non-negative' };
return { kind: null, error: "Must be non-negative" };
}
if (existingKinds.includes(kind)) {
return { kind: null, error: 'Already added' };
return { kind: null, error: "Already added" };
}
return { kind, error: '' };
return { kind, error: "" };
}
/**
@ -44,20 +44,20 @@ export function handleAddEventKind( @@ -44,20 +44,20 @@ export function handleAddEventKind(
newKind: string,
existingKinds: number[],
addKindFunction: (kind: number) => void,
resetStateFunction: () => void
resetStateFunction: () => void,
): { success: boolean; error: string } {
console.log('[handleAddEventKind] called with:', newKind);
console.log("[handleAddEventKind] called with:", newKind);
const validation = validateEventKind(newKind, existingKinds);
console.log('[handleAddEventKind] Validation result:', validation);
console.log("[handleAddEventKind] Validation result:", validation);
if (validation.kind !== null) {
console.log('[handleAddEventKind] Adding event kind:', validation.kind);
console.log("[handleAddEventKind] Adding event kind:", validation.kind);
addKindFunction(validation.kind);
resetStateFunction();
return { success: true, error: '' };
return { success: true, error: "" };
} else {
console.log('[handleAddEventKind] Validation failed:', validation.error);
console.log("[handleAddEventKind] Validation failed:", validation.error);
return { success: false, error: validation.error };
}
}
@ -71,11 +71,11 @@ export function handleAddEventKind( @@ -71,11 +71,11 @@ export function handleAddEventKind(
export function handleEventKindKeydown(
e: KeyboardEvent,
onEnter: () => void,
onEscape: () => void
onEscape: () => void,
): void {
if (e.key === 'Enter') {
if (e.key === "Enter") {
onEnter();
} else if (e.key === 'Escape') {
} else if (e.key === "Escape") {
onEscape();
}
}
@ -87,12 +87,19 @@ export function handleEventKindKeydown( @@ -87,12 +87,19 @@ export function handleEventKindKeydown(
*/
export function getEventKindDisplayName(kind: number): string {
switch (kind) {
case 30040: return 'Publication Index';
case 30041: return 'Publication Content';
case 30818: return 'Wiki';
case 1: return 'Text Note';
case 0: return 'Metadata';
case 3: return 'Follow List';
default: return `Kind ${kind}`;
case 30040:
return "Publication Index";
case 30041:
return "Publication Content";
case 30818:
return "Wiki";
case 1:
return "Text Note";
case 0:
return "Metadata";
case 3:
return "Follow List";
default:
return `Kind ${kind}`;
}
}
}

78
src/lib/utils/event_search.ts

@ -4,7 +4,7 @@ import { nip19 } from "nostr-tools"; @@ -4,7 +4,7 @@ import { nip19 } from "nostr-tools";
import { NDKEvent } from "@nostr-dev-kit/ndk";
import type { Filter } from "./search_types.ts";
import { get } from "svelte/store";
import { wellKnownUrl, isValidNip05Address } from "./search_utils.ts";
import { isValidNip05Address, wellKnownUrl } from "./search_utils.ts";
import { TIMEOUTS, VALIDATION } from "./search_constants.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
@ -22,31 +22,39 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> { @@ -22,31 +22,39 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> {
// This ensures searches can proceed even if some relay types are not available
let attempts = 0;
const maxAttempts = 5; // Reduced since we'll use fallback relays
while (attempts < maxAttempts) {
// Check if we have any relays in the pool
if (ndk.pool.relays.size > 0) {
console.log(`[Search] Found ${ndk.pool.relays.size} relays in NDK pool`);
break;
}
// Also check if we have any active relays
const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays);
if (inboxRelays.length > 0 || outboxRelays.length > 0) {
console.log(`[Search] Found active relays - inbox: ${inboxRelays.length}, outbox: ${outboxRelays.length}`);
console.log(
`[Search] Found active relays - inbox: ${inboxRelays.length}, outbox: ${outboxRelays.length}`,
);
break;
}
console.log(`[Search] Waiting for relays to be available (attempt ${attempts + 1}/${maxAttempts})`);
await new Promise(resolve => setTimeout(resolve, 500));
console.log(
`[Search] Waiting for relays to be available (attempt ${
attempts + 1
}/${maxAttempts})`,
);
await new Promise((resolve) => setTimeout(resolve, 500));
attempts++;
}
// AI-NOTE: 2025-01-24 - Don't fail if no relays are available, let fetchEventWithFallback handle fallbacks
// The fetchEventWithFallback function will use all available relays including fallback relays
if (ndk.pool.relays.size === 0) {
console.warn("[Search] No relays in pool, but proceeding with search - fallback relays will be used");
console.warn(
"[Search] No relays in pool, but proceeding with search - fallback relays will be used",
);
}
// Clean the query and normalize to lowercase
@ -89,50 +97,70 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> { @@ -89,50 +97,70 @@ export async function searchEvent(query: string): Promise<NDKEvent | null> {
try {
const decoded = nip19.decode(cleanedQuery);
if (!decoded) throw new Error("Invalid identifier");
console.log(`[Search] Decoded identifier:`, {
type: decoded.type,
data: decoded.data,
query: cleanedQuery
query: cleanedQuery,
});
switch (decoded.type) {
case "nevent":
console.log(`[Search] Processing nevent:`, {
id: decoded.data.id,
kind: decoded.data.kind,
relays: decoded.data.relays
relays: decoded.data.relays,
});
// Use the relays from the nevent if available
if (decoded.data.relays && decoded.data.relays.length > 0) {
console.log(`[Search] Using relays from nevent:`, decoded.data.relays);
console.log(
`[Search] Using relays from nevent:`,
decoded.data.relays,
);
// Try to fetch the event using the nevent's relays
try {
// Create a temporary relay set for this search
const neventRelaySet = NDKRelaySetFromNDK.fromRelayUrls(decoded.data.relays, ndk);
const neventRelaySet = NDKRelaySetFromNDK.fromRelayUrls(
decoded.data.relays,
ndk,
);
if (neventRelaySet.relays.size > 0) {
console.log(`[Search] Created relay set with ${neventRelaySet.relays.size} relays from nevent`);
console.log(
`[Search] Created relay set with ${neventRelaySet.relays.size} relays from nevent`,
);
// Try to fetch the event using the nevent's relays
const event = await ndk
.fetchEvent({ ids: [decoded.data.id] }, undefined, neventRelaySet)
.fetchEvent(
{ ids: [decoded.data.id] },
undefined,
neventRelaySet,
)
.withTimeout(TIMEOUTS.EVENT_FETCH);
if (event) {
console.log(`[Search] Found event using nevent relays:`, event.id);
console.log(
`[Search] Found event using nevent relays:`,
event.id,
);
return event;
} else {
console.log(`[Search] Event not found on nevent relays, trying default relays`);
console.log(
`[Search] Event not found on nevent relays, trying default relays`,
);
}
}
} catch (error) {
console.warn(`[Search] Error fetching from nevent relays:`, error);
console.warn(
`[Search] Error fetching from nevent relays:`,
error,
);
}
}
filterOrId = decoded.data.id;
break;
case "note":

12
src/lib/utils/image_utils.ts

@ -11,14 +11,16 @@ export function generateDarkPastelColor(seed: string): string { @@ -11,14 +11,16 @@ export function generateDarkPastelColor(seed: string): string {
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
// Use the hash to generate lighter pastel colors
// Keep values in the 120-200 range for better pastel effect
const r = Math.abs(hash) % 80 + 120; // 120-200 range
const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range
const g = Math.abs(hash >> 8) % 80 + 120; // 120-200 range
const b = Math.abs(hash >> 16) % 80 + 120; // 120-200 range
return `#${r.toString(16).padStart(2, '0')}${g.toString(16).padStart(2, '0')}${b.toString(16).padStart(2, '0')}`;
return `#${r.toString(16).padStart(2, "0")}${
g.toString(16).padStart(2, "0")
}${b.toString(16).padStart(2, "0")}`;
}
/**
@ -28,4 +30,4 @@ export function generateDarkPastelColor(seed: string): string { @@ -28,4 +30,4 @@ export function generateDarkPastelColor(seed: string): string {
*/
export function testColorGeneration(eventId: string): string {
return generateDarkPastelColor(eventId);
}
}

76
src/lib/utils/kind24_utils.ts

@ -18,7 +18,7 @@ import { buildCompleteRelaySet } from "./relay_management"; @@ -18,7 +18,7 @@ import { buildCompleteRelaySet } from "./relay_management";
*/
export async function getKind24RelaySet(
senderPubkey: string,
recipientPubkey: string
recipientPubkey: string,
): Promise<string[]> {
const ndk = get(ndkInstance);
if (!ndk) {
@ -27,14 +27,16 @@ export async function getKind24RelaySet( @@ -27,14 +27,16 @@ export async function getKind24RelaySet(
const senderPrefix = senderPubkey.slice(0, 8);
const recipientPrefix = recipientPubkey.slice(0, 8);
console.log(`[getKind24RelaySet] Getting relays for ${senderPrefix} -> ${recipientPrefix}`);
console.log(
`[getKind24RelaySet] Getting relays for ${senderPrefix} -> ${recipientPrefix}`,
);
try {
// Fetch both users' complete relay sets using existing utilities
const [senderRelaySet, recipientRelaySet] = await Promise.all([
buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: senderPubkey })),
buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: recipientPubkey }))
buildCompleteRelaySet(ndk, ndk.getUser({ pubkey: recipientPubkey })),
]);
// Use sender's outbox relays and recipient's inbox relays
@ -42,24 +44,33 @@ export async function getKind24RelaySet( @@ -42,24 +44,33 @@ export async function getKind24RelaySet(
const recipientInboxRelays = recipientRelaySet.inboxRelays;
// Prioritize common relays for better privacy
const commonRelays = senderOutboxRelays.filter(relay =>
const commonRelays = senderOutboxRelays.filter((relay) =>
recipientInboxRelays.includes(relay)
);
const senderOnlyRelays = senderOutboxRelays.filter(relay =>
const senderOnlyRelays = senderOutboxRelays.filter((relay) =>
!recipientInboxRelays.includes(relay)
);
const recipientOnlyRelays = recipientInboxRelays.filter(relay =>
const recipientOnlyRelays = recipientInboxRelays.filter((relay) =>
!senderOutboxRelays.includes(relay)
);
// Prioritize: common relays first, then sender outbox, then recipient inbox
const finalRelays = [...commonRelays, ...senderOnlyRelays, ...recipientOnlyRelays];
console.log(`[getKind24RelaySet] ${senderPrefix}->${recipientPrefix} - Common: ${commonRelays.length}, Sender-only: ${senderOnlyRelays.length}, Recipient-only: ${recipientOnlyRelays.length}, Total: ${finalRelays.length}`);
const finalRelays = [
...commonRelays,
...senderOnlyRelays,
...recipientOnlyRelays,
];
console.log(
`[getKind24RelaySet] ${senderPrefix}->${recipientPrefix} - Common: ${commonRelays.length}, Sender-only: ${senderOnlyRelays.length}, Recipient-only: ${recipientOnlyRelays.length}, Total: ${finalRelays.length}`,
);
return finalRelays;
} catch (error) {
console.error(`[getKind24RelaySet] Error getting relay set for ${senderPrefix}->${recipientPrefix}:`, error);
console.error(
`[getKind24RelaySet] Error getting relay set for ${senderPrefix}->${recipientPrefix}:`,
error,
);
throw error;
}
}
@ -74,8 +85,10 @@ export async function getKind24RelaySet( @@ -74,8 +85,10 @@ export async function getKind24RelaySet(
export async function createKind24Reply(
content: string,
recipientPubkey: string,
originalEvent?: NDKEvent
): Promise<{ success: boolean; eventId?: string; error?: string; relays?: string[] }> {
originalEvent?: NDKEvent,
): Promise<
{ success: boolean; eventId?: string; error?: string; relays?: string[] }
> {
const ndk = get(ndkInstance);
if (!ndk?.activeUser) {
return { success: false, error: "Not logged in" };
@ -87,49 +100,56 @@ export async function createKind24Reply( @@ -87,49 +100,56 @@ export async function createKind24Reply(
try {
// Get optimal relay set for this sender-recipient pair
const targetRelays = await getKind24RelaySet(ndk.activeUser.pubkey, recipientPubkey);
const targetRelays = await getKind24RelaySet(
ndk.activeUser.pubkey,
recipientPubkey,
);
if (targetRelays.length === 0) {
return { success: false, error: "No relays available for publishing" };
}
// Build tags for the kind 24 event
const tags: string[][] = [
["p", recipientPubkey, targetRelays[0]] // Use first relay as primary
["p", recipientPubkey, targetRelays[0]], // Use first relay as primary
];
// Add q tag if replying to an original event
if (originalEvent) {
tags.push(["q", originalEvent.id, targetRelays[0] || anonymousRelays[0]]);
}
// Create and sign the event
const { event: signedEventData } = await createSignedEvent(
content,
ndk.activeUser.pubkey,
24,
tags
tags,
);
// Create NDKEvent and publish
const event = new NDKEvent(ndk, signedEventData);
const relaySet = NDKRelaySet.fromRelayUrls(targetRelays, ndk);
const publishedToRelays = await event.publish(relaySet);
if (publishedToRelays.size > 0) {
console.log(`[createKind24Reply] Successfully published to ${publishedToRelays.size} relays`);
console.log(
`[createKind24Reply] Successfully published to ${publishedToRelays.size} relays`,
);
return { success: true, eventId: event.id, relays: targetRelays };
} else {
console.warn(`[createKind24Reply] Failed to publish to any relays`);
return { success: false, error: "Failed to publish to any relays", relays: targetRelays };
return {
success: false,
error: "Failed to publish to any relays",
relays: targetRelays,
};
}
} catch (error) {
console.error("[createKind24Reply] Error creating kind 24 reply:", error);
return {
success: false,
error: error instanceof Error ? error.message : "Unknown error"
return {
success: false,
error: error instanceof Error ? error.message : "Unknown error",
};
}
}

58
src/lib/utils/markup/MarkupInfo.md

@ -1,10 +1,14 @@ @@ -1,10 +1,14 @@
# Markup Support in Alexandria
Alexandria supports multiple markup formats for different use cases. Below is a summary of the supported tags and features for each parser, as well as the formats used for publications and wikis.
Alexandria supports multiple markup formats for different use cases. Below is a
summary of the supported tags and features for each parser, as well as the
formats used for publications and wikis.
## Basic Markup Parser
The **basic markup parser** follows the [Nostr best-practice guidelines](https://github.com/nostrability/nostrability/issues/146) and supports:
The **basic markup parser** follows the
[Nostr best-practice guidelines](https://github.com/nostrability/nostrability/issues/146)
and supports:
- **Headers:**
- ATX-style: `# H1` through `###### H6`
@ -18,7 +22,8 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/ @@ -18,7 +22,8 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/
- **Links:** `[text](url)`
- **Images:** `![alt](url)`
- **Hashtags:** `#hashtag`
- **Nostr identifiers:** npub, nprofile, nevent, naddr, note, with or without `nostr:` prefix (note is deprecated)
- **Nostr identifiers:** npub, nprofile, nevent, naddr, note, with or without
`nostr:` prefix (note is deprecated)
- **Emoji shortcodes:** `:smile:` will render as 😄
## Advanced Markup Parser
@ -26,17 +31,25 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/ @@ -26,17 +31,25 @@ The **basic markup parser** follows the [Nostr best-practice guidelines](https:/
The **advanced markup parser** includes all features of the basic parser, plus:
- **Inline code:** `` `code` ``
- **Syntax highlighting:** for code blocks in many programming languages (from [highlight.js](https://highlightjs.org/))
- **Syntax highlighting:** for code blocks in many programming languages (from
[highlight.js](https://highlightjs.org/))
- **Tables:** Pipe-delimited tables with or without headers
- **Footnotes:** `[^1]` or `[^Smith]`, which should appear where the footnote shall be placed, and will be displayed as unique, consecutive numbers
- **Footnote References:** `[^1]: footnote text` or `[^Smith]: Smith, Adam. 1984 "The Wiggle Mysteries`, which will be listed in order, at the bottom of the event, with back-reference links to the footnote, and text footnote labels appended
- **Wikilinks:** `[[NIP-54]]` will render as a hyperlink and goes to [NIP-54](./events?d=nip-54)
- **Footnotes:** `[^1]` or `[^Smith]`, which should appear where the footnote
shall be placed, and will be displayed as unique, consecutive numbers
- **Footnote References:** `[^1]: footnote text` or
`[^Smith]: Smith, Adam. 1984 "The Wiggle Mysteries`, which will be listed in
order, at the bottom of the event, with back-reference links to the footnote,
and text footnote labels appended
- **Wikilinks:** `[[NIP-54]]` will render as a hyperlink and goes to
[NIP-54](./events?d=nip-54)
## Publications and Wikis
**Publications** and **wikis** in Alexandria use **AsciiDoc** as their primary markup language, not Markdown.
**Publications** and **wikis** in Alexandria use **AsciiDoc** as their primary
markup language, not Markdown.
AsciiDoc supports a much broader set of formatting, semantic, and structural features, including:
AsciiDoc supports a much broader set of formatting, semantic, and structural
features, including:
- Section and document structure
- Advanced tables, callouts, admonitions
@ -48,7 +61,8 @@ AsciiDoc supports a much broader set of formatting, semantic, and structural fea @@ -48,7 +61,8 @@ AsciiDoc supports a much broader set of formatting, semantic, and structural fea
### Advanced Content Types
Alexandria supports rendering of advanced content types commonly used in academic, technical, and business documents:
Alexandria supports rendering of advanced content types commonly used in
academic, technical, and business documents:
#### Math Rendering
@ -113,18 +127,26 @@ TikZ diagrams for mathematical illustrations: @@ -113,18 +127,26 @@ TikZ diagrams for mathematical illustrations:
### Rendering Features
- **Automatic Detection**: Content types are automatically detected based on syntax
- **Fallback Display**: If rendering fails, the original source code is displayed
- **Automatic Detection**: Content types are automatically detected based on
syntax
- **Fallback Display**: If rendering fails, the original source code is
displayed
- **Source Code**: Click "Show source" to view the original code
- **Responsive Design**: All rendered content is responsive and works on mobile devices
- **Responsive Design**: All rendered content is responsive and works on mobile
devices
For more information on AsciiDoc, see the [AsciiDoc documentation](https://asciidoc.org/).
For more information on AsciiDoc, see the
[AsciiDoc documentation](https://asciidoc.org/).
---
**Note:**
- The markdown parsers are primarily used for comments, issues, and other user-generated content.
- Publications and wikis are rendered using AsciiDoc for maximum expressiveness and compatibility.
- All URLs are sanitized to remove tracking parameters, and YouTube links are presented in a clean, privacy-friendly format.
- [Here is a test markup file](/tests/integration/markupTestfile.md) that you can use to test out the parser and see how things should be formatted.
- The markdown parsers are primarily used for comments, issues, and other
user-generated content.
- Publications and wikis are rendered using AsciiDoc for maximum expressiveness
and compatibility.
- All URLs are sanitized to remove tracking parameters, and YouTube links are
presented in a clean, privacy-friendly format.
- [Here is a test markup file](/tests/integration/markupTestfile.md) that you
can use to test out the parser and see how things should be formatted.

3
src/lib/utils/markup/advancedAsciidoctorPostProcessor.ts

@ -188,7 +188,8 @@ function processPlantUMLBlocks(html: string): string { @@ -188,7 +188,8 @@ function processPlantUMLBlocks(html: string): string {
try {
const rawContent = decodeHTMLEntities(content);
const encoded = plantumlEncoder.encode(rawContent);
const plantUMLUrl = `https://www.plantuml.com/plantuml/svg/${encoded}`;
const plantUMLUrl =
`https://www.plantuml.com/plantuml/svg/${encoded}`;
return `<div class="plantuml-block my-4">
<img src="${plantUMLUrl}" alt="PlantUML diagram"
class="plantuml-diagram max-w-full h-auto rounded-lg shadow-lg"

44
src/lib/utils/markup/advancedMarkupParser.ts

@ -10,8 +10,9 @@ hljs.configure({ @@ -10,8 +10,9 @@ hljs.configure({
// Escapes HTML characters for safe display
function escapeHtml(text: string): string {
const div =
typeof document !== "undefined" ? document.createElement("div") : null;
const div = typeof document !== "undefined"
? document.createElement("div")
: null;
if (div) {
div.textContent = text;
return div.innerHTML;
@ -100,8 +101,8 @@ function processTables(content: string): string { @@ -100,8 +101,8 @@ function processTables(content: string): string {
};
// Check if second row is a delimiter row (only hyphens)
const hasHeader =
rows.length > 1 && rows[1].trim().match(/^\|[-\s|]+\|$/);
const hasHeader = rows.length > 1 &&
rows[1].trim().match(/^\|[-\s|]+\|$/);
// Extract header and body rows
let headerCells: string[] = [];
@ -124,7 +125,8 @@ function processTables(content: string): string { @@ -124,7 +125,8 @@ function processTables(content: string): string {
if (hasHeader) {
html += "<thead>\n<tr>\n";
headerCells.forEach((cell) => {
html += `<th class="py-2 px-4 text-left border-b-2 border-gray-200 dark:border-gray-700 font-semibold">${cell}</th>\n`;
html +=
`<th class="py-2 px-4 text-left border-b-2 border-gray-200 dark:border-gray-700 font-semibold">${cell}</th>\n`;
});
html += "</tr>\n</thead>\n";
}
@ -135,7 +137,8 @@ function processTables(content: string): string { @@ -135,7 +137,8 @@ function processTables(content: string): string {
const cells = processCells(row);
html += "<tr>\n";
cells.forEach((cell) => {
html += `<td class="py-2 px-4 text-left border-b border-gray-200 dark:border-gray-700">${cell}</td>\n`;
html +=
`<td class="py-2 px-4 text-left border-b border-gray-200 dark:border-gray-700">${cell}</td>\n`;
});
html += "</tr>\n";
});
@ -197,7 +200,9 @@ function processFootnotes(content: string): string { @@ -197,7 +200,9 @@ function processFootnotes(content: string): string {
if (!referenceMap.has(id)) referenceMap.set(id, []);
referenceMap.get(id)!.push(refNum);
referenceOrder.push({ id, refNum, label: id });
return `<sup><a href="#fn-${id}" id="fnref-${id}-${referenceMap.get(id)!.length}" class="text-primary-600 hover:underline">[${refNum}]</a></sup>`;
return `<sup><a href="#fn-${id}" id="fnref-${id}-${
referenceMap.get(id)!.length
}" class="text-primary-600 hover:underline">[${refNum}]</a></sup>`;
},
);
@ -216,12 +221,15 @@ function processFootnotes(content: string): string { @@ -216,12 +221,15 @@ function processFootnotes(content: string): string {
const backrefs = refs
.map(
(num, i) =>
`<a href=\"#fnref-${id}-${i + 1}\" class=\"text-primary-600 hover:underline footnote-backref\">↩${num}</a>`,
`<a href=\"#fnref-${id}-${
i + 1
}\" class=\"text-primary-600 hover:underline footnote-backref\">${num}</a>`,
)
.join(" ");
// If label is not a number, show it after all backrefs
const labelSuffix = isNaN(Number(label)) ? ` ${label}` : "";
processedContent += `<li id=\"fn-${id}\"><span class=\"marker\">${text}</span> ${backrefs}${labelSuffix}</li>\n`;
processedContent +=
`<li id=\"fn-${id}\"><span class=\"marker\">${text}</span> ${backrefs}${labelSuffix}</li>\n`;
}
processedContent += "</ol>";
}
@ -233,8 +241,6 @@ function processFootnotes(content: string): string { @@ -233,8 +241,6 @@ function processFootnotes(content: string): string {
}
}
/**
* Process code blocks by finding consecutive code lines and preserving their content
*/
@ -357,13 +363,17 @@ function restoreCodeBlocks(text: string, blocks: Map<string, string>): string { @@ -357,13 +363,17 @@ function restoreCodeBlocks(text: string, blocks: Map<string, string>): string {
language,
ignoreIllegals: true,
}).value;
html = `<pre class="code-block"><code class="hljs language-${language}">${highlighted}</code></pre>`;
html =
`<pre class="code-block"><code class="hljs language-${language}">${highlighted}</code></pre>`;
} catch (e: unknown) {
console.warn("Failed to highlight code block:", e);
html = `<pre class="code-block"><code class="hljs ${language ? `language-${language}` : ""}">${code}</code></pre>`;
html = `<pre class="code-block"><code class="hljs ${
language ? `language-${language}` : ""
}">${code}</code></pre>`;
}
} else {
html = `<pre class="code-block"><code class="hljs">${code}</code></pre>`;
html =
`<pre class="code-block"><code class="hljs">${code}</code></pre>`;
}
result = result.replace(id, html);
@ -672,8 +682,6 @@ function isLaTeXContent(content: string): boolean { @@ -672,8 +682,6 @@ function isLaTeXContent(content: string): boolean {
return latexPatterns.some((pattern) => pattern.test(trimmed));
}
/**
* Parse markup text with advanced formatting
*/
@ -711,6 +719,8 @@ export async function parseAdvancedmarkup(text: string): Promise<string> { @@ -711,6 +719,8 @@ export async function parseAdvancedmarkup(text: string): Promise<string> {
return processedText;
} catch (e: unknown) {
console.error("Error in parseAdvancedmarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`;
return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
}
}

24
src/lib/utils/markup/asciidoctorPostProcessor.ts

@ -1,6 +1,9 @@ @@ -1,6 +1,9 @@
import { processImageWithReveal, processNostrIdentifiersInText, processWikilinks, processAsciiDocAnchors } from "./markupServices";
import {
processAsciiDocAnchors,
processImageWithReveal,
processNostrIdentifiersInText,
processWikilinks,
} from "./markupServices";
/**
* Processes nostr addresses in HTML content, but skips addresses that are
@ -41,8 +44,7 @@ async function processNostrAddresses(html: string): Promise<string> { @@ -41,8 +44,7 @@ async function processNostrAddresses(html: string): Promise<string> {
const processedMatch = await processNostrIdentifiersInText(fullMatch);
// Replace the match in the HTML
processedHtml =
processedHtml.slice(0, matchIndex) +
processedHtml = processedHtml.slice(0, matchIndex) +
processedMatch +
processedHtml.slice(matchIndex + fullMatch.length);
}
@ -61,18 +63,18 @@ function processImageBlocks(html: string): string { @@ -61,18 +63,18 @@ function processImageBlocks(html: string): string {
// Extract src and alt from img attributes
const srcMatch = imgAttributes.match(/src="([^"]+)"/);
const altMatch = imgAttributes.match(/alt="([^"]*)"/);
const src = srcMatch ? srcMatch[1] : '';
const alt = altMatch ? altMatch[1] : '';
const titleHtml = title ? `<div class="title">${title}</div>` : '';
const src = srcMatch ? srcMatch[1] : "";
const alt = altMatch ? altMatch[1] : "";
const titleHtml = title ? `<div class="title">${title}</div>` : "";
return `<div class="imageblock">
<div class="content">
${processImageWithReveal(src, alt)}
</div>
${titleHtml}
</div>`;
}
},
);
}

51
src/lib/utils/markup/basicMarkupParser.ts

@ -1,16 +1,16 @@ @@ -1,16 +1,16 @@
import * as emoji from "node-emoji";
import { nip19 } from "nostr-tools";
import {
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processEmojiShortcodes,
processWebSocketUrls,
processHashtags,
import {
processBasicTextFormatting,
processBlockquotes,
processEmojiShortcodes,
processHashtags,
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processWebSocketUrls,
processWikilinks,
stripTrackingParams
stripTrackingParams,
} from "./markupServices";
/* Regex constants for basic markup parsing */
@ -21,8 +21,6 @@ const MARKUP_IMAGE = /!\[([^\]]*)\]\(([^)]+)\)/g; @@ -21,8 +21,6 @@ const MARKUP_IMAGE = /!\[([^\]]*)\]\(([^)]+)\)/g;
// AI-NOTE: 2025-01-24 - Added negative lookbehind (?<!\]\() to prevent processing URLs in markdown syntax
const DIRECT_LINK = /(?<!["'=])(?<!\]\()(https?:\/\/[^\s<>"]+)(?!["'])/g;
// Add this helper function near the top:
function replaceAlexandriaNostrLinks(text: string): string {
// Regex for Alexandria/localhost URLs
@ -82,12 +80,6 @@ function replaceAlexandriaNostrLinks(text: string): string { @@ -82,12 +80,6 @@ function replaceAlexandriaNostrLinks(text: string): string {
return text;
}
function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
function parseList(
start: number,
@ -96,7 +88,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string { @@ -96,7 +88,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
): [string, number] {
let html = "";
let i = start;
html += `<${type} class="${type === "ol" ? "list-decimal" : "list-disc"} ml-6 mb-2">`;
html += `<${type} class="${
type === "ol" ? "list-decimal" : "list-disc"
} ml-6 mb-2">`;
while (i < lines.length) {
const line = lines[i];
const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/);
@ -168,7 +162,9 @@ function processBasicFormatting(content: string): string { @@ -168,7 +162,9 @@ function processBasicFormatting(content: string): string {
processedText = processedText.replace(
MARKUP_LINK,
(_match, text, url) =>
`<a href="${stripTrackingParams(url)}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
`<a href="${
stripTrackingParams(url)
}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
);
// Process WebSocket URLs using shared services
@ -181,7 +177,7 @@ function processBasicFormatting(content: string): string { @@ -181,7 +177,7 @@ function processBasicFormatting(content: string): string {
// Process text formatting using shared services
processedText = processBasicTextFormatting(processedText);
// Process hashtags using shared services
processedText = processHashtags(processedText);
@ -220,12 +216,6 @@ function processBasicFormatting(content: string): string { @@ -220,12 +216,6 @@ function processBasicFormatting(content: string): string {
return processedText;
}
export async function parseBasicmarkup(text: string): Promise<string> {
if (!text) return "";
@ -249,9 +239,10 @@ export async function parseBasicmarkup(text: string): Promise<string> { @@ -249,9 +239,10 @@ export async function parseBasicmarkup(text: string): Promise<string> {
// AI-NOTE: 2025-01-24 - Added img tag to skip wrapping to prevent image rendering issues
// Skip wrapping if para already contains block-level elements, math blocks, or images
if (
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i.test(
para,
)
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i
.test(
para,
)
) {
return para;
}
@ -268,6 +259,8 @@ export async function parseBasicmarkup(text: string): Promise<string> { @@ -268,6 +259,8 @@ export async function parseBasicmarkup(text: string): Promise<string> {
return processedText;
} catch (e: unknown) {
console.error("Error in parseBasicmarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`;
return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
}
}

56
src/lib/utils/markup/embeddedMarkupParser.ts

@ -1,18 +1,18 @@ @@ -1,18 +1,18 @@
import * as emoji from "node-emoji";
import { nip19 } from "nostr-tools";
import {
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processEmojiShortcodes,
processWebSocketUrls,
processHashtags,
import {
processBasicTextFormatting,
processBlockquotes,
processWikilinks,
processEmojiShortcodes,
processHashtags,
processImageWithReveal,
processMediaUrl,
processNostrIdentifiersInText,
processNostrIdentifiersWithEmbeddedEvents,
stripTrackingParams
} from "./markupServices";
processWebSocketUrls,
processWikilinks,
stripTrackingParams,
} from "./markupServices.ts";
/* Regex constants for basic markup parsing */
@ -89,7 +89,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string { @@ -89,7 +89,9 @@ function renderListGroup(lines: string[], typeHint?: "ol" | "ul"): string {
): [string, number] {
let html = "";
let i = start;
html += `<${type} class="${type === "ol" ? "list-decimal" : "list-disc"} ml-6 mb-2">`;
html += `<${type} class="${
type === "ol" ? "list-decimal" : "list-disc"
} ml-6 mb-2">`;
while (i < lines.length) {
const line = lines[i];
const match = line.match(/^([ \t]*)([*+-]|\d+\.)[ \t]+(.*)$/);
@ -161,7 +163,9 @@ function processBasicFormatting(content: string): string { @@ -161,7 +163,9 @@ function processBasicFormatting(content: string): string {
processedText = processedText.replace(
MARKUP_LINK,
(_match, text, url) =>
`<a href="${stripTrackingParams(url)}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
`<a href="${
stripTrackingParams(url)
}" class="text-primary-600 dark:text-primary-500 hover:underline" target="_blank" rel="noopener noreferrer">${text}</a>`,
);
// Process WebSocket URLs using shared services
@ -174,7 +178,7 @@ function processBasicFormatting(content: string): string { @@ -174,7 +178,7 @@ function processBasicFormatting(content: string): string {
// Process text formatting using shared services
processedText = processBasicTextFormatting(processedText);
// Process hashtags using shared services
processedText = processHashtags(processedText);
@ -218,7 +222,10 @@ function processBasicFormatting(content: string): string { @@ -218,7 +222,10 @@ function processBasicFormatting(content: string): string {
* AI-NOTE: 2025-01-24 - Enhanced markup parser that supports nested Nostr event embedding
* Up to 3 levels of nesting are supported, after which events are shown as links
*/
export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0): Promise<string> {
export async function parseEmbeddedMarkup(
text: string,
nestingLevel: number = 0,
): Promise<string> {
if (!text) return "";
try {
@ -233,29 +240,30 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0 @@ -233,29 +240,30 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0
// Process paragraphs - split by double newlines and wrap in p tags
// Skip wrapping if content already contains block-level elements
const blockLevelEls =
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i;
processedText = processedText
.split(/\n\n+/)
.map((para) => para.trim())
.filter((para) => para.length > 0)
.map((para) => {
// AI-NOTE: 2025-01-24 - Added img tag to skip wrapping to prevent image rendering issues
// Skip wrapping if para already contains block-level elements, math blocks, or images
if (
/(<div[^>]*class=["'][^"']*math-block[^"']*["'])|<(div|h[1-6]|blockquote|table|pre|ul|ol|hr|img)/i.test(
para,
)
) {
if (blockLevelEls.test(para)) {
return para;
}
return `<p class="my-1">${para}</p>`;
})
.join("\n");
// Process profile identifiers (npub, nprofile) first using the regular processor
processedText = await processNostrIdentifiersInText(processedText);
// Then process event identifiers with embedded events (only event-related identifiers)
processedText = processNostrIdentifiersWithEmbeddedEvents(processedText, nestingLevel);
processedText = processNostrIdentifiersWithEmbeddedEvents(
processedText,
nestingLevel,
);
// Replace wikilinks
processedText = processWikilinks(processedText);
@ -263,6 +271,8 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0 @@ -263,6 +271,8 @@ export async function parseEmbeddedMarkup(text: string, nestingLevel: number = 0
return processedText;
} catch (e: unknown) {
console.error("Error in parseEmbeddedMarkup:", e);
return `<div class="text-red-500">Error processing markup: ${(e as Error)?.message ?? "Unknown error"}</div>`;
return `<div class="text-red-500">Error processing markup: ${
(e as Error)?.message ?? "Unknown error"
}</div>`;
}
}

147
src/lib/utils/markup/markupServices.ts

@ -1,18 +1,25 @@ @@ -1,18 +1,25 @@
import { processNostrIdentifiers, NOSTR_PROFILE_REGEX } from "../nostrUtils.ts";
import {
createProfileLink,
getUserMetadata,
NOSTR_PROFILE_REGEX,
} from "../nostrUtils.ts";
import * as emoji from "node-emoji";
// Media URL patterns
const IMAGE_EXTENSIONS = /\.(jpg|jpeg|gif|png|webp|svg)$/i;
const VIDEO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp4|webm|mov|avi)(?:[^\s<]*)?/i;
const AUDIO_URL_REGEX = /https?:\/\/[^\s<]+\.(?:mp3|wav|ogg|m4a)(?:[^\s<]*)?/i;
const YOUTUBE_URL_REGEX = /https?:\/\/(?:www\.)?(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})(?:[^\s<]*)?/;
const YOUTUBE_URL_REGEX =
/https?:\/\/(?:www\.)?(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})(?:[^\s<]*)?/;
/**
* Shared service for processing images with expand functionality
*/
export function processImageWithReveal(src: string, alt: string = "Image"): string {
export function processImageWithReveal(
src: string,
alt: string = "Image",
): string {
if (!src || !IMAGE_EXTENSIONS.test(src.split("?")[0])) {
return `<img src="${src}" alt="${alt}">`;
}
@ -43,26 +50,32 @@ export function processImageWithReveal(src: string, alt: string = "Image"): stri @@ -43,26 +50,32 @@ export function processImageWithReveal(src: string, alt: string = "Image"): stri
*/
export function processMediaUrl(url: string, alt?: string): string {
const clean = stripTrackingParams(url);
if (YOUTUBE_URL_REGEX.test(clean)) {
const videoId = extractYouTubeVideoId(clean);
if (videoId) {
return `<iframe class="w-full aspect-video rounded-lg shadow-lg my-2" src="https://www.youtube-nocookie.com/embed/${videoId}" title="${alt || "YouTube video"}" frameborder="0" allow="fullscreen" sandbox="allow-scripts allow-same-origin allow-presentation"></iframe>`;
return `<iframe class="w-full aspect-video rounded-lg shadow-lg my-2" src="https://www.youtube-nocookie.com/embed/${videoId}" title="${
alt || "YouTube video"
}" frameborder="0" allow="fullscreen" sandbox="allow-scripts allow-same-origin allow-presentation"></iframe>`;
}
}
if (VIDEO_URL_REGEX.test(clean)) {
return `<video controls class="max-w-full rounded-lg shadow-lg my-2" preload="none" playsinline><source src="${clean}">${alt || "Video"}</video>`;
return `<video controls class="max-w-full rounded-lg shadow-lg my-2" preload="none" playsinline><source src="${clean}">${
alt || "Video"
}</video>`;
}
if (AUDIO_URL_REGEX.test(clean)) {
return `<audio controls class="w-full my-2" preload="none"><source src="${clean}">${alt || "Audio"}</audio>`;
return `<audio controls class="w-full my-2" preload="none"><source src="${clean}">${
alt || "Audio"
}</audio>`;
}
if (IMAGE_EXTENSIONS.test(clean.split("?")[0])) {
return processImageWithReveal(clean, alt || "Embedded media");
}
// Default to clickable link
return `<a href="${clean}" target="_blank" rel="noopener noreferrer" class="text-blue-500 hover:text-blue-600 dark:text-blue-400 dark:hover:text-blue-300">${clean}</a>`;
}
@ -70,40 +83,45 @@ export function processMediaUrl(url: string, alt?: string): string { @@ -70,40 +83,45 @@ export function processMediaUrl(url: string, alt?: string): string {
/**
* Shared service for processing nostr identifiers
*/
export async function processNostrIdentifiersInText(text: string): Promise<string> {
export async function processNostrIdentifiersInText(
text: string,
): Promise<string> {
let processedText = text;
// Find all profile-related nostr addresses (only npub and nprofile)
const matches = Array.from(processedText.matchAll(NOSTR_PROFILE_REGEX));
// Process them in reverse order to avoid index shifting issues
for (let i = matches.length - 1; i >= 0; i--) {
const match = matches[i];
const [fullMatch] = match;
const matchIndex = match.index ?? 0;
// Skip if part of a URL
const before = processedText.slice(Math.max(0, matchIndex - 12), matchIndex);
const before = processedText.slice(
Math.max(0, matchIndex - 12),
matchIndex,
);
if (/https?:\/\/$|www\.$/i.test(before)) {
continue;
}
// Process the nostr identifier directly
let identifier = fullMatch;
if (!identifier.startsWith("nostr:")) {
identifier = "nostr:" + identifier;
}
// Get user metadata and create link
const { getUserMetadata, createProfileLink } = await import("../nostrUtils.ts");
const metadata = await getUserMetadata(identifier);
const displayText = metadata.displayName || metadata.name;
const link = createProfileLink(identifier, displayText);
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + link + processedText.slice(matchIndex + fullMatch.length);
processedText = processedText.slice(0, matchIndex) + link +
processedText.slice(matchIndex + fullMatch.length);
}
return processedText;
}
@ -112,37 +130,45 @@ export async function processNostrIdentifiersInText(text: string): Promise<strin @@ -112,37 +130,45 @@ export async function processNostrIdentifiersInText(text: string): Promise<strin
* Replaces nostr: links with embedded event placeholders
* Only processes event-related identifiers (nevent, naddr, note), not profile identifiers (npub, nprofile)
*/
export function processNostrIdentifiersWithEmbeddedEvents(text: string, nestingLevel: number = 0): string {
export function processNostrIdentifiersWithEmbeddedEvents(
text: string,
nestingLevel: number = 0,
): string {
const eventPattern = /nostr:(note|nevent|naddr)[a-zA-Z0-9]{20,}/g;
let processedText = text;
// Maximum nesting level allowed
const MAX_NESTING_LEVEL = 3;
// Find all event-related nostr addresses
const matches = Array.from(processedText.matchAll(eventPattern));
// Process them in reverse order to avoid index shifting issues
for (let i = matches.length - 1; i >= 0; i--) {
const match = matches[i];
const [fullMatch] = match;
const matchIndex = match.index ?? 0;
let replacement: string;
if (nestingLevel >= MAX_NESTING_LEVEL) {
// At max nesting level, just show the link
replacement = `<a href="/events?id=${fullMatch}" class="text-primary-600 dark:text-primary-500 hover:underline break-all">${fullMatch}</a>`;
replacement =
`<a href="/events?id=${fullMatch}" class="text-primary-600 dark:text-primary-500 hover:underline break-all">${fullMatch}</a>`;
} else {
// Create a placeholder for embedded event
const componentId = `embedded-event-${Math.random().toString(36).substr(2, 9)}`;
replacement = `<div class="embedded-event-placeholder" data-nostr-id="${fullMatch}" data-nesting-level="${nestingLevel}" id="${componentId}"></div>`;
const componentId = `embedded-event-${
Math.random().toString(36).substr(2, 9)
}`;
replacement =
`<div class="embedded-event-placeholder" data-nostr-id="${fullMatch}" data-nesting-level="${nestingLevel}" id="${componentId}"></div>`;
}
// Replace the match in the text
processedText = processedText.slice(0, matchIndex) + replacement + processedText.slice(matchIndex + fullMatch.length);
processedText = processedText.slice(0, matchIndex) + replacement +
processedText.slice(matchIndex + fullMatch.length);
}
return processedText;
}
@ -169,7 +195,10 @@ export function processWebSocketUrls(text: string): string { @@ -169,7 +195,10 @@ export function processWebSocketUrls(text: string): string {
*/
export function processHashtags(text: string): string {
const hashtagRegex = /(?<![^\s])#([a-zA-Z0-9_]+)(?!\w)/g;
return text.replace(hashtagRegex, '<button class="text-primary-600 dark:text-primary-500 hover:underline cursor-pointer" onclick="window.location.href=\'/events?t=$1\'">#$1</button>');
return text.replace(
hashtagRegex,
'<button class="text-primary-600 dark:text-primary-500 hover:underline cursor-pointer" onclick="window.location.href=\'/events?t=$1\'">#$1</button>',
);
}
/**
@ -177,20 +206,26 @@ export function processHashtags(text: string): string { @@ -177,20 +206,26 @@ export function processHashtags(text: string): string {
*/
export function processBasicTextFormatting(text: string): string {
// Bold: **text** or *text*
text = text.replace(/(\*\*|[*])((?:[^*\n]|\*(?!\*))+)\1/g, "<strong>$2</strong>");
text = text.replace(
/(\*\*|[*])((?:[^*\n]|\*(?!\*))+)\1/g,
"<strong>$2</strong>",
);
// Italic: _text_ or __text__
text = text.replace(/\b(_[^_\n]+_|\b__[^_\n]+__)\b/g, (match) => {
const text = match.replace(/^_+|_+$/g, "");
return `<em>${text}</em>`;
});
// Strikethrough: ~~text~~ or ~text~
text = text.replace(/~~([^~\n]+)~~|~([^~\n]+)~/g, (_match, doubleText, singleText) => {
const text = doubleText || singleText;
return `<del class="line-through">${text}</del>`;
});
text = text.replace(
/~~([^~\n]+)~~|~([^~\n]+)~/g,
(_match, doubleText, singleText) => {
const text = doubleText || singleText;
return `<del class="line-through">${text}</del>`;
},
);
return text;
}
@ -203,7 +238,9 @@ export function processBlockquotes(text: string): string { @@ -203,7 +238,9 @@ export function processBlockquotes(text: string): string {
const lines = match.split("\n").map((line) => {
return line.replace(/^[ \t]*>[ \t]?/, "").trim();
});
return `<blockquote class="pl-4 border-l-4 border-gray-300 dark:border-gray-600 my-4">${lines.join("\n")}</blockquote>`;
return `<blockquote class="pl-4 border-l-4 border-gray-300 dark:border-gray-600 my-4">${
lines.join("\n")
}</blockquote>`;
});
}
@ -212,8 +249,16 @@ export function stripTrackingParams(url: string): string { @@ -212,8 +249,16 @@ export function stripTrackingParams(url: string): string {
try {
const urlObj = new URL(url);
// Remove common tracking parameters
const trackingParams = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_term', 'utm_content', 'fbclid', 'gclid'];
trackingParams.forEach(param => urlObj.searchParams.delete(param));
const trackingParams = [
"utm_source",
"utm_medium",
"utm_campaign",
"utm_term",
"utm_content",
"fbclid",
"gclid",
];
trackingParams.forEach((param) => urlObj.searchParams.delete(param));
return urlObj.toString();
} catch {
return url;
@ -221,7 +266,9 @@ export function stripTrackingParams(url: string): string { @@ -221,7 +266,9 @@ export function stripTrackingParams(url: string): string {
}
function extractYouTubeVideoId(url: string): string | null {
const match = url.match(/(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})/);
const match = url.match(
/(?:youtube\.com\/(?:watch\?v=|embed\/)|youtu\.be\/|youtube-nocookie\.com\/embed\/)([a-zA-Z0-9_-]{11})/,
);
return match ? match[1] : null;
}
@ -263,4 +310,4 @@ export function processAsciiDocAnchors(text: string): string { @@ -263,4 +310,4 @@ export function processAsciiDocAnchors(text: string): string {
const url = `/events?d=${normalized}`;
return `<a class="wikilink text-primary-600 dark:text-primary-500 hover:underline" data-dtag="${normalized}" data-url="${url}" href="${url}">${id}</a>`;
});
}
}

4
src/lib/utils/markup/tikzRenderer.ts

@ -44,7 +44,9 @@ function createBasicSVG(tikzCode: string): string { @@ -44,7 +44,9 @@ function createBasicSVG(tikzCode: string): string {
</text>
<foreignObject x="10" y="60" width="${width - 20}" height="${height - 70}">
<div xmlns="http://www.w3.org/1999/xhtml" style="font-family: monospace; font-size: 10px; color: #666; overflow: hidden;">
<pre style="margin: 0; white-space: pre-wrap; word-break: break-all;">${escapeHtml(tikzCode)}</pre>
<pre style="margin: 0; white-space: pre-wrap; word-break: break-all;">${
escapeHtml(tikzCode)
}</pre>
</div>
</foreignObject>
</svg>`;

2
src/lib/utils/mime.ts

@ -104,7 +104,7 @@ export function getMimeTags(kind: number): [string, string][] { @@ -104,7 +104,7 @@ export function getMimeTags(kind: number): [string, string][] {
MTag = ["M", `article/long-form/${replaceability}`];
break;
// Add more cases as needed...
// Add more cases as needed...
}
return [mTag, MTag];

106
src/lib/utils/network_detection.ts

@ -4,18 +4,18 @@ import { deduplicateRelayUrls } from "./relay_management.ts"; @@ -4,18 +4,18 @@ import { deduplicateRelayUrls } from "./relay_management.ts";
* Network conditions for relay selection
*/
export enum NetworkCondition {
ONLINE = 'online',
SLOW = 'slow',
OFFLINE = 'offline'
ONLINE = "online",
SLOW = "slow",
OFFLINE = "offline",
}
/**
* Network connectivity test endpoints
*/
const NETWORK_ENDPOINTS = [
'https://www.google.com/favicon.ico',
'https://httpbin.org/status/200',
'https://api.github.com/zen'
"https://www.google.com/favicon.ico",
"https://httpbin.org/status/200",
"https://api.github.com/zen",
];
/**
@ -27,20 +27,23 @@ export async function isNetworkOnline(): Promise<boolean> { @@ -27,20 +27,23 @@ export async function isNetworkOnline(): Promise<boolean> {
try {
// Use a simple fetch without HEAD method to avoid CORS issues
await fetch(endpoint, {
method: 'GET',
cache: 'no-cache',
method: "GET",
cache: "no-cache",
signal: AbortSignal.timeout(3000),
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
mode: "no-cors", // Use no-cors mode to avoid CORS issues
});
// With no-cors mode, we can't check response.ok, so we assume success if no error
return true;
} catch (error) {
console.debug(`[network_detection.ts] Failed to reach ${endpoint}:`, error);
console.debug(
`[network_detection.ts] Failed to reach ${endpoint}:`,
error,
);
continue;
}
}
console.debug('[network_detection.ts] All network endpoints failed');
console.debug("[network_detection.ts] All network endpoints failed");
return false;
}
@ -50,25 +53,30 @@ export async function isNetworkOnline(): Promise<boolean> { @@ -50,25 +53,30 @@ export async function isNetworkOnline(): Promise<boolean> {
*/
export async function testNetworkSpeed(): Promise<number> {
const startTime = performance.now();
for (const endpoint of NETWORK_ENDPOINTS) {
try {
await fetch(endpoint, {
method: 'GET',
cache: 'no-cache',
method: "GET",
cache: "no-cache",
signal: AbortSignal.timeout(5000),
mode: 'no-cors' // Use no-cors mode to avoid CORS issues
mode: "no-cors", // Use no-cors mode to avoid CORS issues
});
const endTime = performance.now();
return endTime - startTime;
} catch (error) {
console.debug(`[network_detection.ts] Speed test failed for ${endpoint}:`, error);
console.debug(
`[network_detection.ts] Speed test failed for ${endpoint}:`,
error,
);
continue;
}
}
console.debug('[network_detection.ts] Network speed test failed for all endpoints');
console.debug(
"[network_detection.ts] Network speed test failed for all endpoints",
);
return Infinity; // Very slow if it fails
}
@ -78,21 +86,25 @@ export async function testNetworkSpeed(): Promise<number> { @@ -78,21 +86,25 @@ export async function testNetworkSpeed(): Promise<number> {
*/
export async function detectNetworkCondition(): Promise<NetworkCondition> {
const isOnline = await isNetworkOnline();
if (!isOnline) {
console.debug('[network_detection.ts] Network condition: OFFLINE');
console.debug("[network_detection.ts] Network condition: OFFLINE");
return NetworkCondition.OFFLINE;
}
const speed = await testNetworkSpeed();
// Consider network slow if response time > 2000ms
if (speed > 2000) {
console.debug(`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`);
console.debug(
`[network_detection.ts] Network condition: SLOW (${speed.toFixed(0)}ms)`,
);
return NetworkCondition.SLOW;
}
console.debug(`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`);
console.debug(
`[network_detection.ts] Network condition: ONLINE (${speed.toFixed(0)}ms)`,
);
return NetworkCondition.ONLINE;
}
@ -108,39 +120,49 @@ export function getRelaySetForNetworkCondition( @@ -108,39 +120,49 @@ export function getRelaySetForNetworkCondition(
networkCondition: NetworkCondition,
discoveredLocalRelays: string[],
lowbandwidthRelays: string[],
fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] }
fullRelaySet: { inboxRelays: string[]; outboxRelays: string[] },
): { inboxRelays: string[]; outboxRelays: string[] } {
switch (networkCondition) {
case NetworkCondition.OFFLINE:
// When offline, use local relays if available, otherwise rely on cache
// This will be improved when IndexedDB local relay is implemented
if (discoveredLocalRelays.length > 0) {
console.debug('[network_detection.ts] Using local relays (offline)');
console.debug("[network_detection.ts] Using local relays (offline)");
return {
inboxRelays: discoveredLocalRelays,
outboxRelays: discoveredLocalRelays
outboxRelays: discoveredLocalRelays,
};
} else {
console.debug('[network_detection.ts] No local relays available, will rely on cache (offline)');
console.debug(
"[network_detection.ts] No local relays available, will rely on cache (offline)",
);
return {
inboxRelays: [],
outboxRelays: []
outboxRelays: [],
};
}
case NetworkCondition.SLOW: {
// Local relays + low bandwidth relays when slow (deduplicated)
console.debug('[network_detection.ts] Using local + low bandwidth relays (slow network)');
const slowInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]);
const slowOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...lowbandwidthRelays]);
console.debug(
"[network_detection.ts] Using local + low bandwidth relays (slow network)",
);
const slowInboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...lowbandwidthRelays,
]);
const slowOutboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...lowbandwidthRelays,
]);
return {
inboxRelays: slowInboxRelays,
outboxRelays: slowOutboxRelays
outboxRelays: slowOutboxRelays,
};
}
case NetworkCondition.ONLINE:
default:
// Full relay set when online
console.debug('[network_detection.ts] Using full relay set (online)');
console.debug("[network_detection.ts] Using full relay set (online)");
return fullRelaySet;
}
}
@ -161,14 +183,16 @@ export function startNetworkMonitoring( @@ -161,14 +183,16 @@ export function startNetworkMonitoring(
const checkNetwork = async () => {
try {
const currentCondition = await detectNetworkCondition();
if (currentCondition !== lastCondition) {
console.debug(`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`);
console.debug(
`[network_detection.ts] Network condition changed: ${lastCondition} -> ${currentCondition}`,
);
lastCondition = currentCondition;
onNetworkChange(currentCondition);
}
} catch (error) {
console.warn('[network_detection.ts] Network monitoring error:', error);
console.warn("[network_detection.ts] Network monitoring error:", error);
}
};
@ -185,4 +209,4 @@ export function startNetworkMonitoring( @@ -185,4 +209,4 @@ export function startNetworkMonitoring(
intervalId = null;
}
};
}
}

41
src/lib/utils/nostrEventService.ts

@ -1,11 +1,11 @@ @@ -1,11 +1,11 @@
import { nip19 } from "nostr-tools";
import { getEventHash, signEvent, prefixNostrAddresses } from "./nostrUtils.ts";
import { getEventHash, prefixNostrAddresses, signEvent } from "./nostrUtils.ts";
import { get } from "svelte/store";
import { goto } from "$app/navigation";
import { EVENT_KINDS, TIME_CONSTANTS } from "./search_constants.ts";
import { EXPIRATION_DURATION } from "../consts.ts";
import { ndkInstance } from "../ndk.ts";
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk";
import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
export interface RootEventInfo {
rootId: string;
@ -96,21 +96,21 @@ export function extractRootEventInfo(parent: NDKEvent): RootEventInfo { @@ -96,21 +96,21 @@ export function extractRootEventInfo(parent: NDKEvent): RootEventInfo {
rootInfo.rootId = rootE[1];
rootInfo.rootRelay = getRelayString(rootE[2]);
rootInfo.rootPubkey = getPubkeyString(rootE[3] || rootInfo.rootPubkey);
rootInfo.rootKind =
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind;
rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
rootInfo.rootKind;
} else if (rootA) {
rootInfo.rootAddress = rootA[1];
rootInfo.rootRelay = getRelayString(rootA[2]);
rootInfo.rootPubkey = getPubkeyString(
getTagValue(parent.tags, "P") || rootInfo.rootPubkey,
);
rootInfo.rootKind =
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind;
rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
rootInfo.rootKind;
} else if (rootI) {
rootInfo.rootIValue = rootI[1];
rootInfo.rootIRelay = getRelayString(rootI[2]);
rootInfo.rootKind =
Number(getTagValue(parent.tags, "K")) || rootInfo.rootKind;
rootInfo.rootKind = Number(getTagValue(parent.tags, "K")) ||
rootInfo.rootKind;
}
return rootInfo;
@ -224,7 +224,8 @@ export function buildReplyTags( @@ -224,7 +224,8 @@ export function buildReplyTags(
if (isParentReplaceable) {
const dTag = getTagValue(parent.tags || [], "d");
if (dTag) {
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
const parentAddress =
`${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
addTags(tags, createTag("a", parentAddress, "", "root"));
}
}
@ -233,7 +234,8 @@ export function buildReplyTags( @@ -233,7 +234,8 @@ export function buildReplyTags(
if (isParentReplaceable) {
const dTag = getTagValue(parent.tags || [], "d");
if (dTag) {
const parentAddress = `${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
const parentAddress =
`${parentInfo.parentKind}:${parentInfo.parentPubkey}:${dTag}`;
if (isReplyToComment) {
// Root scope (uppercase) - use the original article
@ -317,14 +319,16 @@ export async function createSignedEvent( @@ -317,14 +319,16 @@ export async function createSignedEvent(
pubkey: string,
kind: number,
tags: string[][],
// deno-lint-ignore no-explicit-any
// deno-lint-ignore no-explicit-any
): Promise<{ id: string; sig: string; event: any }> {
const prefixedContent = prefixNostrAddresses(content);
// Add expiration tag for kind 24 events (NIP-40)
const finalTags = [...tags];
if (kind === 24) {
const expirationTimestamp = Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR) + EXPIRATION_DURATION;
const expirationTimestamp =
Math.floor(Date.now() / TIME_CONSTANTS.UNIX_TIMESTAMP_FACTOR) +
EXPIRATION_DURATION;
finalTags.push(["expiration", String(expirationTimestamp)]);
}
@ -344,7 +348,10 @@ export async function createSignedEvent( @@ -344,7 +348,10 @@ export async function createSignedEvent(
};
let sig, id;
if (typeof window !== "undefined" && globalThis.nostr && globalThis.nostr.signEvent) {
if (
typeof window !== "undefined" && globalThis.nostr &&
globalThis.nostr.signEvent
) {
const signed = await globalThis.nostr.signEvent(eventToSign);
sig = signed.sig as string;
id = "id" in signed ? (signed.id as string) : getEventHash(eventToSign);
@ -387,7 +394,7 @@ export async function publishEvent( @@ -387,7 +394,7 @@ export async function publishEvent(
try {
// If event is a plain object, create an NDKEvent from it
let ndkEvent: NDKEvent;
if (event.publish && typeof event.publish === 'function') {
if (event.publish && typeof event.publish === "function") {
// It's already an NDKEvent
ndkEvent = event;
} else {
@ -397,15 +404,15 @@ export async function publishEvent( @@ -397,15 +404,15 @@ export async function publishEvent(
// Publish with timeout
await ndkEvent.publish(relaySet).withTimeout(5000);
// For now, assume all relays were successful
// In a more sophisticated implementation, you'd track individual relay responses
successfulRelays.push(...relayUrls);
console.debug("[nostrEventService] Published event successfully:", {
eventId: ndkEvent.id,
relayCount: relayUrls.length,
successfulRelays
successfulRelays,
});
} catch (error) {
console.error("[nostrEventService] Failed to publish event:", error);

124
src/lib/utils/nostrUtils.ts

@ -5,7 +5,12 @@ import { npubCache } from "./npubCache.ts"; @@ -5,7 +5,12 @@ import { npubCache } from "./npubCache.ts";
import NDK, { NDKEvent, NDKRelaySet, NDKUser } from "@nostr-dev-kit/ndk";
import type { NDKKind, NostrEvent } from "@nostr-dev-kit/ndk";
import type { Filter } from "./search_types.ts";
import { communityRelays, secondaryRelays, searchRelays, anonymousRelays } from "../consts.ts";
import {
anonymousRelays,
communityRelays,
searchRelays,
secondaryRelays,
} from "../consts.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
import { NDKRelaySet as NDKRelaySetFromNDK } from "@nostr-dev-kit/ndk";
import { sha256 } from "@noble/hashes/sha2.js";
@ -55,7 +60,7 @@ function escapeHtml(text: string): string { @@ -55,7 +60,7 @@ function escapeHtml(text: string): string {
* Escape regex special characters
*/
function escapeRegExp(string: string): string {
return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
return string.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
/**
@ -68,7 +73,12 @@ export async function getUserMetadata( @@ -68,7 +73,12 @@ export async function getUserMetadata(
// Remove nostr: prefix if present
const cleanId = identifier.replace(/^nostr:/, "");
console.log("getUserMetadata called with identifier:", identifier, "force:", force);
console.log(
"getUserMetadata called with identifier:",
identifier,
"force:",
force,
);
if (!force && npubCache.has(cleanId)) {
const cached = npubCache.get(cleanId)!;
@ -100,7 +110,10 @@ export async function getUserMetadata( @@ -100,7 +110,10 @@ export async function getUserMetadata(
} else if (decoded.type === "nprofile") {
pubkey = decoded.data.pubkey;
} else {
console.warn("getUserMetadata: Unsupported identifier type:", decoded.type);
console.warn(
"getUserMetadata: Unsupported identifier type:",
decoded.type,
);
npubCache.set(cleanId, fallback);
return fallback;
}
@ -111,13 +124,12 @@ export async function getUserMetadata( @@ -111,13 +124,12 @@ export async function getUserMetadata(
kinds: [0],
authors: [pubkey],
});
console.log("getUserMetadata: Profile event found:", profileEvent);
const profile =
profileEvent && profileEvent.content
? JSON.parse(profileEvent.content)
: null;
const profile = profileEvent && profileEvent.content
? JSON.parse(profileEvent.content)
: null;
console.log("getUserMetadata: Parsed profile:", profile);
@ -199,7 +211,7 @@ export async function createProfileLinkWithVerification( @@ -199,7 +211,7 @@ export async function createProfileLinkWithVerification(
};
const allRelays = [
...searchRelays, // Include search relays for profile searches
...searchRelays, // Include search relays for profile searches
...communityRelays,
...userRelays,
...secondaryRelays,
@ -223,8 +235,7 @@ export async function createProfileLinkWithVerification( @@ -223,8 +235,7 @@ export async function createProfileLinkWithVerification(
const defaultText = `${cleanId.slice(0, 8)}...${cleanId.slice(-4)}`;
const escapedText = escapeHtml(displayText || defaultText);
const displayIdentifier =
profile?.displayName ??
const displayIdentifier = profile?.displayName ??
profile?.display_name ??
profile?.name ??
escapedText;
@ -287,7 +298,10 @@ export async function processNostrIdentifiers( @@ -287,7 +298,10 @@ export async function processNostrIdentifiers(
const displayText = metadata.displayName || metadata.name;
const link = createProfileLink(identifier, displayText);
// Replace all occurrences of this exact match
processedContent = processedContent.replace(new RegExp(escapeRegExp(fullMatch), 'g'), link);
processedContent = processedContent.replace(
new RegExp(escapeRegExp(fullMatch), "g"),
link,
);
}
// Process notes (nevent, note, naddr)
@ -304,7 +318,10 @@ export async function processNostrIdentifiers( @@ -304,7 +318,10 @@ export async function processNostrIdentifiers(
}
const link = createNoteLink(identifier);
// Replace all occurrences of this exact match
processedContent = processedContent.replace(new RegExp(escapeRegExp(fullMatch), 'g'), link);
processedContent = processedContent.replace(
new RegExp(escapeRegExp(fullMatch), "g"),
link,
);
}
return processedContent;
@ -409,7 +426,7 @@ export function withTimeout<T>( @@ -409,7 +426,7 @@ export function withTimeout<T>(
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error("Timeout")), timeoutMs),
setTimeout(() => reject(new Error("Timeout")), timeoutMs)
),
]);
}
@ -420,7 +437,7 @@ export function withTimeout<T>( @@ -420,7 +437,7 @@ export function withTimeout<T>(
return Promise.race([
promise,
new Promise<T>((_, reject) =>
setTimeout(() => reject(new Error("Timeout")), timeoutMs),
setTimeout(() => reject(new Error("Timeout")), timeoutMs)
),
]);
}
@ -455,40 +472,54 @@ export async function fetchEventWithFallback( @@ -455,40 +472,54 @@ export async function fetchEventWithFallback(
): Promise<NDKEvent | null> {
// AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive event discovery
// This ensures we don't miss events that might be on any available relay
// Get all relays from NDK pool first (most comprehensive)
const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) => r.url);
const poolRelays = Array.from(ndk.pool.relays.values()).map((r: any) =>
r.url
);
const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays);
// Combine all available relays, prioritizing pool relays
let allRelays = [...new Set([...poolRelays, ...inboxRelays, ...outboxRelays])];
let allRelays = [
...new Set([...poolRelays, ...inboxRelays, ...outboxRelays]),
];
console.log("fetchEventWithFallback: Using pool relays:", poolRelays);
console.log("fetchEventWithFallback: Using inbox relays:", inboxRelays);
console.log("fetchEventWithFallback: Using outbox relays:", outboxRelays);
console.log("fetchEventWithFallback: Total unique relays:", allRelays.length);
// Check if we have any relays available
if (allRelays.length === 0) {
console.warn("fetchEventWithFallback: No relays available for event fetch, using fallback relays");
console.warn(
"fetchEventWithFallback: No relays available for event fetch, using fallback relays",
);
// Use fallback relays when no relays are available
allRelays = [...secondaryRelays, ...searchRelays, ...anonymousRelays];
console.log("fetchEventWithFallback: Using fallback relays:", allRelays);
}
// Create relay set from all available relays
const relaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
try {
if (relaySet.relays.size === 0) {
console.warn("fetchEventWithFallback: No relays in relay set for event fetch");
console.warn(
"fetchEventWithFallback: No relays in relay set for event fetch",
);
return null;
}
console.log("fetchEventWithFallback: Relay set size:", relaySet.relays.size);
console.log(
"fetchEventWithFallback: Relay set size:",
relaySet.relays.size,
);
console.log("fetchEventWithFallback: Filter:", filterOrId);
console.log("fetchEventWithFallback: Relay URLs:", Array.from(relaySet.relays).map((r) => r.url));
console.log(
"fetchEventWithFallback: Relay URLs:",
Array.from(relaySet.relays).map((r) => r.url),
);
let found: NDKEvent | null = null;
@ -500,8 +531,9 @@ export async function fetchEventWithFallback( @@ -500,8 +531,9 @@ export async function fetchEventWithFallback(
.fetchEvent({ ids: [filterOrId] }, undefined, relaySet)
.withTimeout(timeoutMs);
} else {
const filter =
typeof filterOrId === "string" ? { ids: [filterOrId] } : filterOrId;
const filter = typeof filterOrId === "string"
? { ids: [filterOrId] }
: filterOrId;
const results = await ndk
.fetchEvents(filter, undefined, relaySet)
.withTimeout(timeoutMs);
@ -512,7 +544,9 @@ export async function fetchEventWithFallback( @@ -512,7 +544,9 @@ export async function fetchEventWithFallback(
if (!found) {
const timeoutSeconds = timeoutMs / 1000;
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", ");
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(
", ",
);
console.warn(
`fetchEventWithFallback: Event not found after ${timeoutSeconds}s timeout. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`,
);
@ -523,14 +557,19 @@ export async function fetchEventWithFallback( @@ -523,14 +557,19 @@ export async function fetchEventWithFallback(
// Always wrap as NDKEvent
return found instanceof NDKEvent ? found : new NDKEvent(ndk, found);
} catch (err) {
if (err instanceof Error && err.message === 'Timeout') {
if (err instanceof Error && err.message === "Timeout") {
const timeoutSeconds = timeoutMs / 1000;
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(", ");
const relayUrls = Array.from(relaySet.relays).map((r) => r.url).join(
", ",
);
console.warn(
`fetchEventWithFallback: Event fetch timed out after ${timeoutSeconds}s. Tried inbox relays: ${relayUrls}. Some relays may be offline or slow.`,
);
} else {
console.error("fetchEventWithFallback: Error in fetchEventWithFallback:", err);
console.error(
"fetchEventWithFallback: Error in fetchEventWithFallback:",
err,
);
}
return null;
}
@ -545,20 +584,22 @@ export function toNpub(pubkey: string | undefined): string | null { @@ -545,20 +584,22 @@ export function toNpub(pubkey: string | undefined): string | null {
try {
// If it's already an npub, return it
if (pubkey.startsWith("npub")) return pubkey;
// If it's a hex pubkey, convert to npub
if (new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(pubkey)) {
return nip19.npubEncode(pubkey);
}
// If it's an nprofile, decode and extract npub
if (pubkey.startsWith("nprofile")) {
const decoded = nip19.decode(pubkey);
if (decoded.type === 'nprofile') {
return decoded.data.pubkey ? nip19.npubEncode(decoded.data.pubkey) : null;
if (decoded.type === "nprofile") {
return decoded.data.pubkey
? nip19.npubEncode(decoded.data.pubkey)
: null;
}
}
return null;
} catch {
return null;
@ -573,7 +614,10 @@ export function createRelaySetFromUrls(relayUrls: string[], ndk: NDK) { @@ -573,7 +614,10 @@ export function createRelaySetFromUrls(relayUrls: string[], ndk: NDK) {
return NDKRelaySetFromNDK.fromRelayUrls(relayUrls, ndk);
}
export function createNDKEvent(ndk: NDK, rawEvent: NDKEvent | NostrEvent | undefined) {
export function createNDKEvent(
ndk: NDK,
rawEvent: NDKEvent | NostrEvent | undefined,
) {
return new NDKEvent(ndk, rawEvent);
}

40
src/lib/utils/nostr_identifiers.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { VALIDATION } from './search_constants';
import { VALIDATION } from "./search_constants";
/**
* Nostr identifier types
@ -22,7 +22,7 @@ export interface ParsedCoordinate { @@ -22,7 +22,7 @@ export interface ParsedCoordinate {
* @returns True if it's a valid hex event ID
*/
export function isEventId(id: string): id is NostrEventId {
return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, 'i').test(id);
return new RegExp(`^[a-f0-9]{${VALIDATION.HEX_LENGTH}}$`, "i").test(id);
}
/**
@ -30,22 +30,24 @@ export function isEventId(id: string): id is NostrEventId { @@ -30,22 +30,24 @@ export function isEventId(id: string): id is NostrEventId {
* @param coordinate The string to check
* @returns True if it's a valid coordinate
*/
export function isCoordinate(coordinate: string): coordinate is NostrCoordinate {
const parts = coordinate.split(':');
export function isCoordinate(
coordinate: string,
): coordinate is NostrCoordinate {
const parts = coordinate.split(":");
if (parts.length < 3) return false;
const [kindStr, pubkey, ...dTagParts] = parts;
// Check if kind is a valid number
const kind = parseInt(kindStr, 10);
if (isNaN(kind) || kind < 0) return false;
// Check if pubkey is a valid hex string
if (!isEventId(pubkey)) return false;
// Check if d-tag exists (can contain colons)
if (dTagParts.length === 0) return false;
return true;
}
@ -56,14 +58,14 @@ export function isCoordinate(coordinate: string): coordinate is NostrCoordinate @@ -56,14 +58,14 @@ export function isCoordinate(coordinate: string): coordinate is NostrCoordinate
*/
export function parseCoordinate(coordinate: string): ParsedCoordinate | null {
if (!isCoordinate(coordinate)) return null;
const parts = coordinate.split(':');
const parts = coordinate.split(":");
const [kindStr, pubkey, ...dTagParts] = parts;
return {
kind: parseInt(kindStr, 10),
pubkey,
dTag: dTagParts.join(':') // Rejoin in case d-tag contains colons
dTag: dTagParts.join(":"), // Rejoin in case d-tag contains colons
};
}
@ -74,7 +76,11 @@ export function parseCoordinate(coordinate: string): ParsedCoordinate | null { @@ -74,7 +76,11 @@ export function parseCoordinate(coordinate: string): ParsedCoordinate | null {
* @param dTag The d-tag value
* @returns The coordinate string
*/
export function createCoordinate(kind: number, pubkey: string, dTag: string): NostrCoordinate {
export function createCoordinate(
kind: number,
pubkey: string,
dTag: string,
): NostrCoordinate {
return `${kind}:${pubkey}:${dTag}`;
}
@ -83,6 +89,8 @@ export function createCoordinate(kind: number, pubkey: string, dTag: string): No @@ -83,6 +89,8 @@ export function createCoordinate(kind: number, pubkey: string, dTag: string): No
* @param identifier The string to check
* @returns True if it's a valid Nostr identifier
*/
export function isNostrIdentifier(identifier: string): identifier is NostrIdentifier {
export function isNostrIdentifier(
identifier: string,
): identifier is NostrIdentifier {
return isEventId(identifier) || isCoordinate(identifier);
}
}

306
src/lib/utils/notification_utils.ts

@ -1,306 +0,0 @@ @@ -1,306 +0,0 @@
import type { NDKEvent } from "$lib/utils/nostrUtils";
import { getUserMetadata, NDKRelaySetFromNDK, toNpub } from "$lib/utils/nostrUtils";
import { get } from "svelte/store";
import { ndkInstance } from "$lib/ndk";
import { searchRelays } from "$lib/consts";
import { userStore, type UserState } from "$lib/stores/userStore";
import { buildCompleteRelaySet } from "$lib/utils/relay_management";
import { neventEncode } from "$lib/utils";
import { nip19 } from "nostr-tools";
import type NDK from "@nostr-dev-kit/ndk";
import { parseEmbeddedMarkup } from "./markup/embeddedMarkupParser";
// AI-NOTE: Notification-specific utility functions that don't exist elsewhere
/**
* Truncates content to a specified length
*/
export function truncateContent(content: string, maxLength: number = 300): string {
if (content.length <= maxLength) return content;
return content.slice(0, maxLength) + "...";
}
/**
* Truncates rendered HTML content while preserving quote boxes
*/
export function truncateRenderedContent(renderedHtml: string, maxLength: number = 300): string {
if (renderedHtml.length <= maxLength) return renderedHtml;
const hasQuoteBoxes = renderedHtml.includes('jump-to-message');
if (hasQuoteBoxes) {
const quoteBoxPattern = /<div class="block w-fit my-2 px-3 py-2 bg-gray-200[^>]*onclick="window\.dispatchEvent\(new CustomEvent\('jump-to-message'[^>]*>[^<]*<\/div>/g;
const quoteBoxes = renderedHtml.match(quoteBoxPattern) || [];
let textOnly = renderedHtml.replace(quoteBoxPattern, '|||QUOTEBOX|||');
if (textOnly.length > maxLength) {
const availableLength = maxLength - (quoteBoxes.join('').length);
if (availableLength > 50) {
textOnly = textOnly.slice(0, availableLength) + "...";
} else {
textOnly = textOnly.slice(0, 50) + "...";
}
}
let result = textOnly;
quoteBoxes.forEach(box => {
result = result.replace('|||QUOTEBOX|||', box);
});
return result;
} else {
if (renderedHtml.includes('<')) {
const truncated = renderedHtml.slice(0, maxLength);
const lastTagStart = truncated.lastIndexOf('<');
const lastTagEnd = truncated.lastIndexOf('>');
if (lastTagStart > lastTagEnd) {
return renderedHtml.slice(0, lastTagStart) + "...";
}
return truncated + "...";
} else {
return renderedHtml.slice(0, maxLength) + "...";
}
}
}
/**
* Parses content with support for embedded events
*/
export async function parseContent(content: string): Promise<string> {
if (!content) return "";
return await parseEmbeddedMarkup(content, 0);
}
/**
* Parses repost content and renders it as an embedded event
*/
export async function parseRepostContent(content: string): Promise<string> {
if (!content) return "";
try {
// Try to parse the content as JSON (repost events contain the original event as JSON)
const originalEvent = JSON.parse(content);
// Extract the original event's content
const originalContent = originalEvent.content || "";
const originalAuthor = originalEvent.pubkey || "";
const originalCreatedAt = originalEvent.created_at || 0;
const originalKind = originalEvent.kind || 1;
// Parse the original content with embedded markup support
const parsedOriginalContent = await parseEmbeddedMarkup(originalContent, 0);
// Create an embedded event display with proper structure
const formattedDate = originalCreatedAt ? new Date(originalCreatedAt * 1000).toLocaleDateString() : "Unknown date";
const shortAuthor = originalAuthor ? `${originalAuthor.slice(0, 8)}...${originalAuthor.slice(-4)}` : "Unknown";
return `
<div class="embedded-repost bg-gray-50 dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 my-2">
<!-- Event header -->
<div class="flex items-center justify-between mb-3 min-w-0">
<div class="flex items-center space-x-2 min-w-0">
<span class="text-xs text-gray-500 dark:text-gray-400 font-mono flex-shrink-0">
Kind ${originalKind}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0">
(repost)
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-600 dark:text-gray-400 flex-shrink-0">Author:</span>
<span class="text-xs text-gray-700 dark:text-gray-300 font-mono">
${shortAuthor}
</span>
<span class="text-xs text-gray-500 dark:text-gray-400 flex-shrink-0"></span>
<span class="text-xs text-gray-500 dark:text-gray-400">
${formattedDate}
</span>
</div>
<button
class="text-xs text-primary-600 dark:text-primary-500 hover:underline flex-shrink-0"
onclick="window.location.href='/events?id=${originalEvent.id || 'unknown'}'"
>
View full event
</button>
</div>
<!-- Reposted content -->
<div class="text-sm text-gray-800 dark:text-gray-200 leading-relaxed">
${parsedOriginalContent}
</div>
</div>
`;
} catch (error) {
// If JSON parsing fails, fall back to embedded markup
console.warn("Failed to parse repost content as JSON, falling back to embedded markup:", error);
return await parseEmbeddedMarkup(content, 0);
}
}
/**
* Renders quoted content for a message
*/
export async function renderQuotedContent(message: NDKEvent, publicMessages: NDKEvent[]): Promise<string> {
const qTags = message.getMatchingTags("q");
if (qTags.length === 0) return "";
const qTag = qTags[0];
const eventId = qTag[1];
if (eventId) {
// Validate eventId format (should be 64 character hex string)
const isValidEventId = /^[a-fA-F0-9]{64}$/.test(eventId);
// First try to find in local messages
let quotedMessage = publicMessages.find(msg => msg.id === eventId);
// If not found locally, fetch from relays
if (!quotedMessage) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (ndk) {
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays, ...searchRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const fetchedEvent = await ndk.fetchEvent({ ids: [eventId], limit: 1 }, undefined, ndkRelaySet);
quotedMessage = fetchedEvent || undefined;
}
}
} catch (error) {
console.warn(`[renderQuotedContent] Failed to fetch quoted event ${eventId}:`, error);
}
}
if (quotedMessage) {
const quotedContent = quotedMessage.content ? quotedMessage.content.slice(0, 200) : "No content";
const parsedContent = await parseEmbeddedMarkup(quotedContent, 0);
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick="window.dispatchEvent(new CustomEvent('jump-to-message', { detail: '${eventId}' }))">${parsedContent}</div>`;
} else {
// Fallback to nevent link - only if eventId is valid
if (isValidEventId) {
try {
const nevent = nip19.neventEncode({ id: eventId });
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded cursor-pointer hover:bg-gray-300 dark:hover:bg-gray-600 transition-colors text-sm text-gray-600 dark:text-gray-300" onclick="window.location.href='/events?id=${nevent}'">Quoted message not found. Click to view event ${eventId.slice(0, 8)}...</div>`;
} catch (error) {
console.warn(`[renderQuotedContent] Failed to encode nevent for ${eventId}:`, error);
// Fall back to just showing the event ID without a link
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">Quoted message not found. Event ID: ${eventId.slice(0, 8)}...</div>`;
}
} else {
// Invalid event ID format
return `<div class="block w-fit my-2 px-3 py-2 bg-gray-200 dark:bg-gray-700 border-l-2 border-gray-400 dark:border-gray-500 rounded text-sm text-gray-600 dark:text-gray-300">Invalid quoted message reference</div>`;
}
}
}
return "";
}
/**
* Gets notification type based on event kind
*/
export function getNotificationType(event: NDKEvent): string {
switch (event.kind) {
case 1: return "Reply";
case 1111: return "Custom Reply";
case 9802: return "Highlight";
case 6: return "Repost";
case 16: return "Generic Repost";
case 24: return "Public Message";
default: return `Kind ${event.kind}`;
}
}
/**
* Fetches author profiles for a list of events
*/
export async function fetchAuthorProfiles(events: NDKEvent[]): Promise<Map<string, { name?: string; displayName?: string; picture?: string }>> {
const authorProfiles = new Map<string, { name?: string; displayName?: string; picture?: string }>();
const uniquePubkeys = new Set<string>();
events.forEach(event => {
if (event.pubkey) uniquePubkeys.add(event.pubkey);
});
const profilePromises = Array.from(uniquePubkeys).map(async (pubkey) => {
try {
const npub = toNpub(pubkey);
if (!npub) return;
// Try cache first
let profile = await getUserMetadata(npub, false);
if (profile && (profile.name || profile.displayName || profile.picture)) {
authorProfiles.set(pubkey, profile);
return;
}
// Try search relays
for (const relay of searchRelays) {
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) break;
const relaySet = NDKRelaySetFromNDK.fromRelayUrls([relay], ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
relaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
return;
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from ${relay}:`, error);
}
}
// Try all available relays as fallback
try {
const ndk: NDK | undefined = get(ndkInstance);
if (!ndk) return;
const userStoreValue: UserState = get(userStore);
const user = userStoreValue.signedIn && userStoreValue.pubkey ? ndk.getUser({ pubkey: userStoreValue.pubkey }) : null;
const relaySet = await buildCompleteRelaySet(ndk, user);
const allRelays = [...relaySet.inboxRelays, ...relaySet.outboxRelays];
if (allRelays.length > 0) {
const ndkRelaySet = NDKRelaySetFromNDK.fromRelayUrls(allRelays, ndk);
const profileEvent = await ndk.fetchEvent(
{ kinds: [0], authors: [pubkey] },
undefined,
ndkRelaySet
);
if (profileEvent) {
const profileData = JSON.parse(profileEvent.content);
authorProfiles.set(pubkey, {
name: profileData.name,
displayName: profileData.display_name || profileData.displayName,
picture: profileData.picture || profileData.image
});
}
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Failed to fetch profile from all relays:`, error);
}
} catch (error) {
console.warn(`[fetchAuthorProfiles] Error processing profile for ${pubkey}:`, error);
}
});
await Promise.all(profilePromises);
return authorProfiles;
}

22
src/lib/utils/npubCache.ts

@ -4,7 +4,7 @@ export type NpubMetadata = NostrProfile; @@ -4,7 +4,7 @@ export type NpubMetadata = NostrProfile;
class NpubCache {
private cache: Record<string, NpubMetadata> = {};
private readonly storageKey = 'alexandria_npub_cache';
private readonly storageKey = "alexandria_npub_cache";
private readonly maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds
constructor() {
@ -13,12 +13,15 @@ class NpubCache { @@ -13,12 +13,15 @@ class NpubCache {
private loadFromStorage(): void {
try {
if (typeof window !== 'undefined') {
if (typeof window !== "undefined") {
const stored = localStorage.getItem(this.storageKey);
if (stored) {
const data = JSON.parse(stored) as Record<string, { profile: NpubMetadata; timestamp: number }>;
const data = JSON.parse(stored) as Record<
string,
{ profile: NpubMetadata; timestamp: number }
>;
const now = Date.now();
// Filter out expired entries
for (const [key, entry] of Object.entries(data)) {
if (entry.timestamp && (now - entry.timestamp) < this.maxAge) {
@ -28,21 +31,24 @@ class NpubCache { @@ -28,21 +31,24 @@ class NpubCache {
}
}
} catch (error) {
console.warn('Failed to load npub cache from storage:', error);
console.warn("Failed to load npub cache from storage:", error);
}
}
private saveToStorage(): void {
try {
if (typeof window !== 'undefined') {
const data: Record<string, { profile: NpubMetadata; timestamp: number }> = {};
if (typeof window !== "undefined") {
const data: Record<
string,
{ profile: NpubMetadata; timestamp: number }
> = {};
for (const [key, profile] of Object.entries(this.cache)) {
data[key] = { profile, timestamp: Date.now() };
}
localStorage.setItem(this.storageKey, JSON.stringify(data));
}
} catch (error) {
console.warn('Failed to save npub cache to storage:', error);
console.warn("Failed to save npub cache to storage:", error);
}
}

69
src/lib/utils/profileCache.ts

@ -24,7 +24,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> { @@ -24,7 +24,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> {
const profileEvents = await ndk.fetchEvents({
kinds: [0],
authors: [pubkey],
limit: 1
limit: 1,
});
if (profileEvents.size === 0) {
@ -33,7 +33,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> { @@ -33,7 +33,7 @@ async function fetchProfile(pubkey: string): Promise<ProfileData | null> {
// Get the most recent profile event
const profileEvent = Array.from(profileEvents)[0];
try {
const content = JSON.parse(profileEvent.content);
return content as ProfileData;
@ -77,14 +77,14 @@ export async function getDisplayName(pubkey: string): Promise<string> { @@ -77,14 +77,14 @@ export async function getDisplayName(pubkey: string): Promise<string> {
* @returns Array of profile events
*/
export async function batchFetchProfiles(
pubkeys: string[],
onProgress?: (fetched: number, total: number) => void
pubkeys: string[],
onProgress?: (fetched: number, total: number) => void,
): Promise<NDKEvent[]> {
const allProfileEvents: NDKEvent[] = [];
// Filter out already cached pubkeys
const uncachedPubkeys = pubkeys.filter(pk => !profileCache.has(pk));
const uncachedPubkeys = pubkeys.filter((pk) => !profileCache.has(pk));
if (uncachedPubkeys.length === 0) {
if (onProgress) onProgress(pubkeys.length, pubkeys.length);
return allProfileEvents;
@ -92,21 +92,24 @@ export async function batchFetchProfiles( @@ -92,21 +92,24 @@ export async function batchFetchProfiles(
try {
const ndk = get(ndkInstance);
// Report initial progress
const cachedCount = pubkeys.length - uncachedPubkeys.length;
if (onProgress) onProgress(cachedCount, pubkeys.length);
// Batch fetch in chunks to avoid overwhelming relays
const CHUNK_SIZE = 50;
let fetchedCount = cachedCount;
for (let i = 0; i < uncachedPubkeys.length; i += CHUNK_SIZE) {
const chunk = uncachedPubkeys.slice(i, Math.min(i + CHUNK_SIZE, uncachedPubkeys.length));
const chunk = uncachedPubkeys.slice(
i,
Math.min(i + CHUNK_SIZE, uncachedPubkeys.length),
);
const profileEvents = await ndk.fetchEvents({
kinds: [0],
authors: chunk
authors: chunk,
});
// Process each profile event
@ -120,19 +123,19 @@ export async function batchFetchProfiles( @@ -120,19 +123,19 @@ export async function batchFetchProfiles(
console.error("Failed to parse profile content:", e);
}
});
// Update progress
if (onProgress) {
onProgress(fetchedCount, pubkeys.length);
}
}
// Final progress update
if (onProgress) onProgress(pubkeys.length, pubkeys.length);
} catch (e) {
console.error("Failed to batch fetch profiles:", e);
}
return allProfileEvents;
}
@ -173,29 +176,29 @@ export function clearProfileCache(): void { @@ -173,29 +176,29 @@ export function clearProfileCache(): void {
*/
export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
const pubkeys = new Set<string>();
events.forEach(event => {
events.forEach((event) => {
// Add author pubkey
if (event.pubkey) {
pubkeys.add(event.pubkey);
}
// Add pubkeys from p tags
const pTags = event.getMatchingTags("p");
pTags.forEach(tag => {
pTags.forEach((tag) => {
if (tag[1]) {
pubkeys.add(tag[1]);
}
});
// Extract pubkeys from content (nostr:npub1... format)
const npubPattern = /nostr:npub1[a-z0-9]{58}/g;
const matches = event.content?.match(npubPattern) || [];
matches.forEach(match => {
matches.forEach((match) => {
try {
const npub = match.replace('nostr:', '');
const npub = match.replace("nostr:", "");
const decoded = nip19.decode(npub);
if (decoded.type === 'npub') {
if (decoded.type === "npub") {
pubkeys.add(decoded.data as string);
}
} catch (e) {
@ -203,7 +206,7 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> { @@ -203,7 +206,7 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
}
});
});
return pubkeys;
}
@ -214,17 +217,17 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> { @@ -214,17 +217,17 @@ export function extractPubkeysFromEvents(events: NDKEvent[]): Set<string> {
*/
export function replaceContentPubkeys(content: string): string {
if (!content) return content;
// Replace nostr:npub1... references
const npubPattern = /nostr:npub[a-z0-9]{58}/g;
let result = content;
const matches = content.match(npubPattern) || [];
matches.forEach(match => {
matches.forEach((match) => {
try {
const npub = match.replace('nostr:', '');
const npub = match.replace("nostr:", "");
const decoded = nip19.decode(npub);
if (decoded.type === 'npub') {
if (decoded.type === "npub") {
const pubkey = decoded.data as string;
const displayName = getDisplayNameSync(pubkey);
result = result.replace(match, `@${displayName}`);
@ -233,7 +236,7 @@ export function replaceContentPubkeys(content: string): string { @@ -233,7 +236,7 @@ export function replaceContentPubkeys(content: string): string {
// Invalid npub, leave as is
}
});
return result;
}
@ -245,8 +248,8 @@ export function replaceContentPubkeys(content: string): string { @@ -245,8 +248,8 @@ export function replaceContentPubkeys(content: string): string {
export function replacePubkeysWithDisplayNames(text: string): string {
// Match hex pubkeys (64 characters)
const pubkeyRegex = /\b[0-9a-fA-F]{64}\b/g;
return text.replace(pubkeyRegex, (match) => {
return getDisplayNameSync(match);
});
}
}

32
src/lib/utils/profile_search.ts

@ -1,15 +1,15 @@ @@ -1,15 +1,15 @@
import { ndkInstance, activeInboxRelays } from "../ndk.ts";
import { getUserMetadata, getNpubFromNip05 } from "./nostrUtils.ts";
import NDK, { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk";
import { activeInboxRelays, ndkInstance } from "../ndk.ts";
import { getNpubFromNip05, getUserMetadata } from "./nostrUtils.ts";
import NDK, { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { searchCache } from "./searchCache.ts";
import { searchRelays, communityRelays, secondaryRelays } from "../consts.ts";
import { communityRelays, searchRelays, secondaryRelays } from "../consts.ts";
import { get } from "svelte/store";
import type { NostrProfile, ProfileSearchResult } from "./search_types.ts";
import {
createProfileFromEvent,
fieldMatches,
nip05Matches,
normalizeSearchTerm,
createProfileFromEvent,
} from "./search_utils.ts";
/**
@ -267,12 +267,12 @@ async function quickRelaySearch( @@ -267,12 +267,12 @@ async function quickRelaySearch(
// Use search relays (optimized for profiles) + user's inbox relays + community relays
const userInboxRelays = get(activeInboxRelays);
const quickRelayUrls = [
...searchRelays, // Dedicated profile search relays
...userInboxRelays, // User's personal inbox relays
...communityRelays, // Community relays
...secondaryRelays // Secondary relays as fallback
...searchRelays, // Dedicated profile search relays
...userInboxRelays, // User's personal inbox relays
...communityRelays, // Community relays
...secondaryRelays, // Secondary relays as fallback
];
// Deduplicate relay URLs
const uniqueRelayUrls = [...new Set(quickRelayUrls)];
console.log("Using relays for profile search:", uniqueRelayUrls);
@ -312,8 +312,8 @@ async function quickRelaySearch( @@ -312,8 +312,8 @@ async function quickRelaySearch(
try {
if (!event.content) return;
const profileData = JSON.parse(event.content);
const displayName =
profileData.displayName || profileData.display_name || "";
const displayName = profileData.displayName ||
profileData.display_name || "";
const display_name = profileData.display_name || "";
const name = profileData.name || "";
const nip05 = profileData.nip05 || "";
@ -363,7 +363,9 @@ async function quickRelaySearch( @@ -363,7 +363,9 @@ async function quickRelaySearch(
sub.on("eose", () => {
console.log(
`Relay ${index + 1} (${uniqueRelayUrls[index]}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`,
`Relay ${index + 1} (${
uniqueRelayUrls[index]
}) search completed, processed ${eventCount} events, found ${foundInRelay.length} matches`,
);
resolve(foundInRelay);
});
@ -371,7 +373,9 @@ async function quickRelaySearch( @@ -371,7 +373,9 @@ async function quickRelaySearch(
// Short timeout for quick search
setTimeout(() => {
console.log(
`Relay ${index + 1} (${uniqueRelayUrls[index]}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`,
`Relay ${index + 1} (${
uniqueRelayUrls[index]
}) search timed out after 1.5s, processed ${eventCount} events, found ${foundInRelay.length} matches`,
);
sub.stop();
resolve(foundInRelay);

7
src/lib/utils/relayDiagnostics.ts

@ -42,9 +42,8 @@ export async function testRelay(url: string): Promise<RelayDiagnostic> { @@ -42,9 +42,8 @@ export async function testRelay(url: string): Promise<RelayDiagnostic> {
responseTime: Date.now() - startTime,
});
}
}
};
});
}
/**
@ -93,7 +92,9 @@ export function logRelayDiagnostics(diagnostics: RelayDiagnostic[]): void { @@ -93,7 +92,9 @@ export function logRelayDiagnostics(diagnostics: RelayDiagnostic[]): void {
console.log(`✅ Working relays (${working.length}):`);
working.forEach((d) => {
console.log(
` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${d.responseTime ? ` (${d.responseTime}ms)` : ""}`,
` - ${d.url}${d.requiresAuth ? " (requires auth)" : ""}${
d.responseTime ? ` (${d.responseTime}ms)` : ""
}`,
);
});

84
src/lib/utils/relay_info_service.ts

@ -6,7 +6,7 @@ @@ -6,7 +6,7 @@
function simplifyUrl(url: string): string {
try {
const urlObj = new URL(url);
return urlObj.hostname + (urlObj.port ? `:${urlObj.port}` : '');
return urlObj.hostname + (urlObj.port ? `:${urlObj.port}` : "");
} catch {
// If URL parsing fails, return the original string
return url;
@ -42,18 +42,23 @@ export interface RelayInfoWithMetadata extends RelayInfo { @@ -42,18 +42,23 @@ export interface RelayInfoWithMetadata extends RelayInfo {
* @param url The relay URL to fetch info for
* @returns Promise resolving to relay info or undefined if failed
*/
export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata | undefined> {
export async function fetchRelayInfo(
url: string,
): Promise<RelayInfoWithMetadata | undefined> {
try {
// Convert WebSocket URL to HTTP URL for NIP-11
const httpUrl = url.replace('ws://', 'http://').replace('wss://', 'https://');
const httpUrl = url.replace("ws://", "http://").replace(
"wss://",
"https://",
);
const response = await fetch(httpUrl, {
headers: {
'Accept': 'application/nostr+json',
'User-Agent': 'Alexandria/1.0'
headers: {
"Accept": "application/nostr+json",
"User-Agent": "Alexandria/1.0",
},
// Add timeout to prevent hanging
signal: AbortSignal.timeout(5000)
signal: AbortSignal.timeout(5000),
});
if (!response.ok) {
@ -62,18 +67,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata @@ -62,18 +67,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
url,
shortUrl: simplifyUrl(url),
hasNip11: false,
triedNip11: true
triedNip11: true,
};
}
const relayInfo = await response.json() as RelayInfo;
return {
...relayInfo,
url,
shortUrl: simplifyUrl(url),
hasNip11: Object.keys(relayInfo).length > 0,
triedNip11: true
triedNip11: true,
};
} catch (error) {
console.warn(`[RelayInfo] Failed to fetch info for ${url}:`, error);
@ -81,7 +86,7 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata @@ -81,7 +86,7 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
url,
shortUrl: simplifyUrl(url),
hasNip11: false,
triedNip11: true
triedNip11: true,
};
}
}
@ -91,16 +96,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata @@ -91,16 +96,18 @@ export async function fetchRelayInfo(url: string): Promise<RelayInfoWithMetadata
* @param urls Array of relay URLs to fetch info for
* @returns Promise resolving to array of relay info objects
*/
export async function fetchRelayInfos(urls: string[]): Promise<RelayInfoWithMetadata[]> {
export async function fetchRelayInfos(
urls: string[],
): Promise<RelayInfoWithMetadata[]> {
if (urls.length === 0) {
return [];
}
const promises = urls.map(url => fetchRelayInfo(url));
const promises = urls.map((url) => fetchRelayInfo(url));
const results = await Promise.allSettled(promises);
return results
.map(result => result.status === 'fulfilled' ? result.value : undefined)
.map((result) => result.status === "fulfilled" ? result.value : undefined)
.filter((info): info is RelayInfoWithMetadata => info !== undefined);
}
@ -110,34 +117,42 @@ export async function fetchRelayInfos(urls: string[]): Promise<RelayInfoWithMeta @@ -110,34 +117,42 @@ export async function fetchRelayInfos(urls: string[]): Promise<RelayInfoWithMeta
* @param relayInfo Optional relay info
* @returns String describing the relay type
*/
export function getRelayTypeLabel(relayUrl: string, relayInfo?: RelayInfoWithMetadata): string {
export function getRelayTypeLabel(
relayUrl: string,
relayInfo?: RelayInfoWithMetadata,
): string {
// Check if it's a local relay
if (relayUrl.includes('localhost') || relayUrl.includes('127.0.0.1')) {
return 'Local';
if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) {
return "Local";
}
// Check if it's a community relay
if (relayUrl.includes('nostr.band') || relayUrl.includes('noswhere.com') ||
relayUrl.includes('damus.io') || relayUrl.includes('nostr.wine')) {
return 'Community';
if (
relayUrl.includes("nostr.band") || relayUrl.includes("noswhere.com") ||
relayUrl.includes("damus.io") || relayUrl.includes("nostr.wine")
) {
return "Community";
}
// Check if it's a user's relay (likely inbox/outbox)
if (relayUrl.includes('relay.nsec.app') || relayUrl.includes('relay.snort.social')) {
return 'User';
if (
relayUrl.includes("relay.nsec.app") ||
relayUrl.includes("relay.snort.social")
) {
return "User";
}
// Use relay name if available
if (relayInfo?.name) {
return relayInfo.name;
}
// Fallback to domain
try {
const domain = new URL(relayUrl).hostname;
return domain.replace('www.', '');
return domain.replace("www.", "");
} catch {
return 'Unknown';
return "Unknown";
}
}
@ -147,11 +162,14 @@ export function getRelayTypeLabel(relayUrl: string, relayInfo?: RelayInfoWithMet @@ -147,11 +162,14 @@ export function getRelayTypeLabel(relayUrl: string, relayInfo?: RelayInfoWithMet
* @param relayUrl Relay URL as fallback
* @returns Icon URL or undefined
*/
export function getRelayIcon(relayInfo?: RelayInfoWithMetadata, relayUrl?: string): string | undefined {
export function getRelayIcon(
relayInfo?: RelayInfoWithMetadata,
relayUrl?: string,
): string | undefined {
if (relayInfo?.icon) {
return relayInfo.icon;
}
// Generate favicon URL from relay URL
if (relayUrl) {
try {
@ -161,6 +179,6 @@ export function getRelayIcon(relayInfo?: RelayInfoWithMetadata, relayUrl?: strin @@ -161,6 +179,6 @@ export function getRelayIcon(relayInfo?: RelayInfoWithMetadata, relayUrl?: strin
// Invalid URL, return undefined
}
}
return undefined;
}

380
src/lib/utils/relay_management.ts

@ -1,5 +1,11 @@ @@ -1,5 +1,11 @@
import NDK, { NDKKind, NDKRelay, NDKUser } from "@nostr-dev-kit/ndk";
import { searchRelays, secondaryRelays, anonymousRelays, lowbandwidthRelays, localRelays } from "../consts.ts";
import {
anonymousRelays,
localRelays,
lowbandwidthRelays,
searchRelays,
secondaryRelays,
} from "../consts.ts";
import { getRelaySetForNetworkCondition } from "./network_detection.ts";
import { networkCondition } from "../stores/networkStore.ts";
import { get } from "svelte/store";
@ -11,15 +17,15 @@ import { get } from "svelte/store"; @@ -11,15 +17,15 @@ import { get } from "svelte/store";
*/
export function normalizeRelayUrl(url: string): string {
let normalized = url.toLowerCase().trim();
// Ensure protocol is present
if (!normalized.startsWith('ws://') && !normalized.startsWith('wss://')) {
normalized = 'wss://' + normalized;
if (!normalized.startsWith("ws://") && !normalized.startsWith("wss://")) {
normalized = "wss://" + normalized;
}
// Remove trailing slash
normalized = normalized.replace(/\/$/, '');
normalized = normalized.replace(/\/$/, "");
return normalized;
}
@ -58,7 +64,7 @@ export function testLocalRelayConnection( @@ -58,7 +64,7 @@ export function testLocalRelayConnection(
actualUrl?: string;
}> {
// Only test connections on client-side
if (typeof window === 'undefined') {
if (typeof window === "undefined") {
return Promise.resolve({
connected: false,
requiresAuth: false,
@ -66,7 +72,7 @@ export function testLocalRelayConnection( @@ -66,7 +72,7 @@ export function testLocalRelayConnection(
actualUrl: relayUrl,
});
}
return new Promise((resolve) => {
try {
// Ensure the URL is using ws:// protocol for local relays
@ -193,7 +199,7 @@ export function testRemoteRelayConnection( @@ -193,7 +199,7 @@ export function testRemoteRelayConnection(
actualUrl?: string;
}> {
// Only test connections on client-side
if (typeof window === 'undefined') {
if (typeof window === "undefined") {
return Promise.resolve({
connected: false,
requiresAuth: false,
@ -201,12 +207,14 @@ export function testRemoteRelayConnection( @@ -201,12 +207,14 @@ export function testRemoteRelayConnection(
actualUrl: relayUrl,
});
}
return new Promise((resolve) => {
// Ensure the URL is using wss:// protocol for remote relays
const secureUrl = relayUrl.replace(/^ws:\/\//, "wss://");
console.debug(`[relay_management.ts] Testing remote relay connection: ${secureUrl}`);
console.debug(
`[relay_management.ts] Testing remote relay connection: ${secureUrl}`,
);
// Use the existing NDK instance instead of creating a new one
const relay = new NDKRelay(secureUrl, undefined, ndk);
@ -216,7 +224,9 @@ export function testRemoteRelayConnection( @@ -216,7 +224,9 @@ export function testRemoteRelayConnection(
let actualUrl: string | undefined;
const timeout = setTimeout(() => {
console.debug(`[relay_management.ts] Relay ${secureUrl} connection timeout`);
console.debug(
`[relay_management.ts] Relay ${secureUrl} connection timeout`,
);
relay.disconnect();
resolve({
connected: false,
@ -227,7 +237,9 @@ export function testRemoteRelayConnection( @@ -227,7 +237,9 @@ export function testRemoteRelayConnection(
}, 3000);
relay.on("connect", () => {
console.debug(`[relay_management.ts] Relay ${secureUrl} connected successfully`);
console.debug(
`[relay_management.ts] Relay ${secureUrl} connected successfully`,
);
connected = true;
actualUrl = secureUrl;
clearTimeout(timeout);
@ -248,7 +260,9 @@ export function testRemoteRelayConnection( @@ -248,7 +260,9 @@ export function testRemoteRelayConnection(
relay.on("disconnect", () => {
if (!connected) {
console.debug(`[relay_management.ts] Relay ${secureUrl} disconnected without connecting`);
console.debug(
`[relay_management.ts] Relay ${secureUrl} disconnected without connecting`,
);
error = "Connection failed";
clearTimeout(timeout);
resolve({
@ -280,14 +294,12 @@ export function testRelayConnection( @@ -280,14 +294,12 @@ export function testRelayConnection(
actualUrl?: string;
}> {
// Determine if this is a local or remote relay
if (relayUrl.includes('localhost') || relayUrl.includes('127.0.0.1')) {
if (relayUrl.includes("localhost") || relayUrl.includes("127.0.0.1")) {
return testLocalRelayConnection(relayUrl, ndk);
} else {
return testRemoteRelayConnection(relayUrl, ndk);
}
}
/**
* Tests connection to local relays
@ -295,14 +307,17 @@ export function testRelayConnection( @@ -295,14 +307,17 @@ export function testRelayConnection(
* @param ndk NDK instance
* @returns Promise that resolves to array of working local relay URLs
*/
async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<string[]> {
async function testLocalRelays(
localRelayUrls: string[],
ndk: NDK,
): Promise<string[]> {
try {
const workingRelays: string[] = [];
if (localRelayUrls.length === 0) {
return workingRelays;
}
// Test local relays quietly, without logging failures
await Promise.all(
localRelayUrls.map(async (url) => {
@ -310,17 +325,21 @@ async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<stri @@ -310,17 +325,21 @@ async function testLocalRelays(localRelayUrls: string[], ndk: NDK): Promise<stri
const result = await testLocalRelayConnection(url, ndk);
if (result.connected) {
workingRelays.push(url);
console.debug(`[relay_management.ts] Local relay connected: ${url}`);
console.debug(
`[relay_management.ts] Local relay connected: ${url}`,
);
}
// Don't log failures - local relays are optional
} catch {
// Silently ignore local relay failures - they're optional
}
})
}),
);
if (workingRelays.length > 0) {
console.info(`[relay_management.ts] Found ${workingRelays.length} working local relays`);
console.info(
`[relay_management.ts] Found ${workingRelays.length} working local relays`,
);
}
return workingRelays;
} catch {
@ -339,17 +358,17 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> { @@ -339,17 +358,17 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> {
try {
// If no local relays are configured, return empty array
if (localRelays.length === 0) {
console.debug('[relay_management.ts] No local relays configured');
console.debug("[relay_management.ts] No local relays configured");
return [];
}
// Convert wss:// URLs from consts to ws:// for local testing
const localRelayUrls = localRelays.map((url: string) =>
url.replace(/^wss:\/\//, 'ws://')
const localRelayUrls = localRelays.map((url: string) =>
url.replace(/^wss:\/\//, "ws://")
);
const workingRelays = await testLocalRelays(localRelayUrls, ndk);
// If no local relays are working, return empty array
// The network detection logic will provide fallback relays
return workingRelays;
@ -365,7 +384,10 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> { @@ -365,7 +384,10 @@ export async function discoverLocalRelays(ndk: NDK): Promise<string[]> {
* @param user User to fetch local relays for
* @returns Promise that resolves to array of local relay URLs
*/
export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<string[]> {
export async function getUserLocalRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try {
const localRelayEvent = await ndk.fetchEvent(
{
@ -376,7 +398,7 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin @@ -376,7 +398,7 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
groupable: false,
skipVerification: false,
skipValidation: false,
}
},
);
if (!localRelayEvent) {
@ -385,14 +407,17 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin @@ -385,14 +407,17 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
const localRelays: string[] = [];
localRelayEvent.tags.forEach((tag) => {
if (tag[0] === 'r' && tag[1]) {
if (tag[0] === "r" && tag[1]) {
localRelays.push(tag[1]);
}
});
return localRelays;
} catch (error) {
console.info('[relay_management.ts] Error fetching user local relays:', error);
console.info(
"[relay_management.ts] Error fetching user local relays:",
error,
);
return [];
}
}
@ -403,7 +428,10 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin @@ -403,7 +428,10 @@ export async function getUserLocalRelays(ndk: NDK, user: NDKUser): Promise<strin
* @param user User to fetch blocked relays for
* @returns Promise that resolves to array of blocked relay URLs
*/
export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<string[]> {
export async function getUserBlockedRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try {
const blockedRelayEvent = await ndk.fetchEvent(
{
@ -414,7 +442,7 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str @@ -414,7 +442,7 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
groupable: false,
skipVerification: false,
skipValidation: false,
}
},
);
if (!blockedRelayEvent) {
@ -423,14 +451,17 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str @@ -423,14 +451,17 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
const blockedRelays: string[] = [];
blockedRelayEvent.tags.forEach((tag) => {
if (tag[0] === 'r' && tag[1]) {
if (tag[0] === "r" && tag[1]) {
blockedRelays.push(tag[1]);
}
});
return blockedRelays;
} catch (error) {
console.info('[relay_management.ts] Error fetching user blocked relays:', error);
console.info(
"[relay_management.ts] Error fetching user blocked relays:",
error,
);
return [];
}
}
@ -441,9 +472,15 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str @@ -441,9 +472,15 @@ export async function getUserBlockedRelays(ndk: NDK, user: NDKUser): Promise<str
* @param user User to fetch outbox relays for
* @returns Promise that resolves to array of outbox relay URLs
*/
export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<string[]> {
export async function getUserOutboxRelays(
ndk: NDK,
user: NDKUser,
): Promise<string[]> {
try {
console.debug('[relay_management.ts] Fetching outbox relays for user:', user.pubkey);
console.debug(
"[relay_management.ts] Fetching outbox relays for user:",
user.pubkey,
);
const relayList = await ndk.fetchEvent(
{
kinds: [10002],
@ -453,36 +490,47 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri @@ -453,36 +490,47 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri
groupable: false,
skipVerification: false,
skipValidation: false,
}
},
);
if (!relayList) {
console.debug('[relay_management.ts] No relay list found for user');
console.debug("[relay_management.ts] No relay list found for user");
return [];
}
console.debug('[relay_management.ts] Found relay list event:', relayList.id);
console.debug('[relay_management.ts] Relay list tags:', relayList.tags);
console.debug(
"[relay_management.ts] Found relay list event:",
relayList.id,
);
console.debug("[relay_management.ts] Relay list tags:", relayList.tags);
const outboxRelays: string[] = [];
relayList.tags.forEach((tag) => {
console.debug('[relay_management.ts] Processing tag:', tag);
if (tag[0] === 'w' && tag[1]) {
console.debug("[relay_management.ts] Processing tag:", tag);
if (tag[0] === "w" && tag[1]) {
outboxRelays.push(tag[1]);
console.debug('[relay_management.ts] Added outbox relay:', tag[1]);
} else if (tag[0] === 'r' && tag[1]) {
console.debug("[relay_management.ts] Added outbox relay:", tag[1]);
} else if (tag[0] === "r" && tag[1]) {
// Some relay lists use 'r' for both inbox and outbox
outboxRelays.push(tag[1]);
console.debug('[relay_management.ts] Added relay (r tag):', tag[1]);
console.debug("[relay_management.ts] Added relay (r tag):", tag[1]);
} else {
console.debug('[relay_management.ts] Skipping tag:', tag[0], 'value:', tag[1]);
console.debug(
"[relay_management.ts] Skipping tag:",
tag[0],
"value:",
tag[1],
);
}
});
console.debug('[relay_management.ts] Final outbox relays:', outboxRelays);
console.debug("[relay_management.ts] Final outbox relays:", outboxRelays);
return outboxRelays;
} catch (error) {
console.info('[relay_management.ts] Error fetching user outbox relays:', error);
console.info(
"[relay_management.ts] Error fetching user outbox relays:",
error,
);
return [];
}
}
@ -494,45 +542,65 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri @@ -494,45 +542,65 @@ export async function getUserOutboxRelays(ndk: NDK, user: NDKUser): Promise<stri
export async function getExtensionRelays(): Promise<string[]> {
try {
// Check if we're in a browser environment with extension support
if (typeof window === 'undefined' || !globalThis.nostr) {
console.debug('[relay_management.ts] No globalThis.nostr available');
if (typeof window === "undefined" || !globalThis.nostr) {
console.debug("[relay_management.ts] No globalThis.nostr available");
return [];
}
console.debug('[relay_management.ts] Extension available, checking for getRelays()');
console.debug(
"[relay_management.ts] Extension available, checking for getRelays()",
);
const extensionRelays: string[] = [];
// Try to get relays from the extension's API
// Different extensions may expose their relay config differently
if (globalThis.nostr.getRelays) {
console.debug('[relay_management.ts] getRelays() method found, calling it...');
console.debug(
"[relay_management.ts] getRelays() method found, calling it...",
);
try {
const relays = await globalThis.nostr.getRelays();
console.debug('[relay_management.ts] getRelays() returned:', relays);
if (relays && typeof relays === 'object') {
console.debug("[relay_management.ts] getRelays() returned:", relays);
if (relays && typeof relays === "object") {
// Convert relay object to array of URLs
const relayUrls = Object.keys(relays);
extensionRelays.push(...relayUrls);
console.debug('[relay_management.ts] Got relays from extension:', relayUrls);
console.debug(
"[relay_management.ts] Got relays from extension:",
relayUrls,
);
}
} catch (error) {
console.debug('[relay_management.ts] Extension getRelays() failed:', error);
console.debug(
"[relay_management.ts] Extension getRelays() failed:",
error,
);
}
} else {
console.debug('[relay_management.ts] getRelays() method not found on globalThis.nostr');
console.debug(
"[relay_management.ts] getRelays() method not found on globalThis.nostr",
);
}
// If getRelays() didn't work, try alternative methods
if (extensionRelays.length === 0) {
// Some extensions might expose relays through other methods
// This is a fallback for extensions that don't expose getRelays()
console.debug('[relay_management.ts] Extension does not expose relay configuration');
console.debug(
"[relay_management.ts] Extension does not expose relay configuration",
);
}
console.debug('[relay_management.ts] Final extension relays:', extensionRelays);
console.debug(
"[relay_management.ts] Final extension relays:",
extensionRelays,
);
return extensionRelays;
} catch (error) {
console.debug('[relay_management.ts] Error getting extension relays:', error);
console.debug(
"[relay_management.ts] Error getting extension relays:",
error,
);
return [];
}
}
@ -547,36 +615,59 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> { @@ -547,36 +615,59 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> {
const workingRelays: string[] = [];
const maxConcurrent = 2; // Reduce to 2 relays at a time to avoid overwhelming them
console.debug(`[relay_management.ts] Testing ${relayUrls.length} relays in batches of ${maxConcurrent}`);
console.debug(
`[relay_management.ts] Testing ${relayUrls.length} relays in batches of ${maxConcurrent}`,
);
console.debug(`[relay_management.ts] Relay URLs to test:`, relayUrls);
for (let i = 0; i < relayUrls.length; i += maxConcurrent) {
const batch = relayUrls.slice(i, i + maxConcurrent);
console.debug(`[relay_management.ts] Testing batch ${Math.floor(i/maxConcurrent) + 1}:`, batch);
console.debug(
`[relay_management.ts] Testing batch ${
Math.floor(i / maxConcurrent) + 1
}:`,
batch,
);
const batchPromises = batch.map(async (url) => {
try {
console.debug(`[relay_management.ts] Testing relay: ${url}`);
const result = await testRelayConnection(url, ndk);
console.debug(`[relay_management.ts] Relay ${url} test result:`, result);
console.debug(
`[relay_management.ts] Relay ${url} test result:`,
result,
);
return result.connected ? url : null;
} catch (error) {
console.debug(`[relay_management.ts] Failed to test relay ${url}:`, error);
console.debug(
`[relay_management.ts] Failed to test relay ${url}:`,
error,
);
return null;
}
});
const batchResults = await Promise.allSettled(batchPromises);
const batchWorkingRelays = batchResults
.filter((result): result is PromiseFulfilledResult<string | null> => result.status === 'fulfilled')
.map(result => result.value)
.filter((result): result is PromiseFulfilledResult<string | null> =>
result.status === "fulfilled"
)
.map((result) => result.value)
.filter((url): url is string => url !== null);
console.debug(`[relay_management.ts] Batch ${Math.floor(i/maxConcurrent) + 1} working relays:`, batchWorkingRelays);
console.debug(
`[relay_management.ts] Batch ${
Math.floor(i / maxConcurrent) + 1
} working relays:`,
batchWorkingRelays,
);
workingRelays.push(...batchWorkingRelays);
}
console.debug(`[relay_management.ts] Total working relays after testing:`, workingRelays);
console.debug(
`[relay_management.ts] Total working relays after testing:`,
workingRelays,
);
return workingRelays;
}
@ -588,13 +679,19 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> { @@ -588,13 +679,19 @@ async function testRelaySet(relayUrls: string[], ndk: NDK): Promise<string[]> {
*/
export async function buildCompleteRelaySet(
ndk: NDK,
user: NDKUser | null
user: NDKUser | null,
): Promise<{ inboxRelays: string[]; outboxRelays: string[] }> {
console.debug('[relay_management.ts] buildCompleteRelaySet: Starting with user:', user?.pubkey || 'null');
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Starting with user:",
user?.pubkey || "null",
);
// Discover local relays first
const discoveredLocalRelays = await discoverLocalRelays(ndk);
console.debug('[relay_management.ts] buildCompleteRelaySet: Discovered local relays:', discoveredLocalRelays);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Discovered local relays:",
discoveredLocalRelays,
);
// Get user-specific relays if available
let userOutboxRelays: string[] = [];
@ -603,42 +700,75 @@ export async function buildCompleteRelaySet( @@ -603,42 +700,75 @@ export async function buildCompleteRelaySet(
let extensionRelays: string[] = [];
if (user) {
console.debug('[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:', user.pubkey);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Fetching user-specific relays for:",
user.pubkey,
);
try {
userOutboxRelays = await getUserOutboxRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User outbox relays:', userOutboxRelays);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: User outbox relays:",
userOutboxRelays,
);
} catch (error) {
console.debug('[relay_management.ts] Error fetching user outbox relays:', error);
console.debug(
"[relay_management.ts] Error fetching user outbox relays:",
error,
);
}
try {
userLocalRelays = await getUserLocalRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User local relays:', userLocalRelays);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: User local relays:",
userLocalRelays,
);
} catch (error) {
console.debug('[relay_management.ts] Error fetching user local relays:', error);
console.debug(
"[relay_management.ts] Error fetching user local relays:",
error,
);
}
try {
blockedRelays = await getUserBlockedRelays(ndk, user);
console.debug('[relay_management.ts] buildCompleteRelaySet: User blocked relays:', blockedRelays);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: User blocked relays:",
blockedRelays,
);
} catch {
// Silently ignore blocked relay fetch errors
}
try {
extensionRelays = await getExtensionRelays();
console.debug('[relay_management.ts] Extension relays gathered:', extensionRelays);
console.debug(
"[relay_management.ts] Extension relays gathered:",
extensionRelays,
);
} catch (error) {
console.debug('[relay_management.ts] Error fetching extension relays:', error);
console.debug(
"[relay_management.ts] Error fetching extension relays:",
error,
);
}
} else {
console.debug('[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays');
console.debug(
"[relay_management.ts] buildCompleteRelaySet: No user provided, skipping user-specific relays",
);
}
// Build initial relay sets and deduplicate
const finalInboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userLocalRelays]);
const finalOutboxRelays = deduplicateRelayUrls([...discoveredLocalRelays, ...userOutboxRelays, ...extensionRelays]);
const finalInboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...userLocalRelays,
]);
const finalOutboxRelays = deduplicateRelayUrls([
...discoveredLocalRelays,
...userOutboxRelays,
...extensionRelays,
]);
// Test relays and filter out non-working ones
let testedInboxRelays: string[] = [];
@ -654,21 +784,27 @@ export async function buildCompleteRelaySet( @@ -654,21 +784,27 @@ export async function buildCompleteRelaySet(
// If no relays passed testing, use remote relays without testing
if (testedInboxRelays.length === 0 && testedOutboxRelays.length === 0) {
const remoteRelays = deduplicateRelayUrls([...secondaryRelays, ...searchRelays]);
const remoteRelays = deduplicateRelayUrls([
...secondaryRelays,
...searchRelays,
]);
return {
inboxRelays: remoteRelays,
outboxRelays: remoteRelays
outboxRelays: remoteRelays,
};
}
// Always include some remote relays as fallback, even when local relays are working
const fallbackRelays = deduplicateRelayUrls([...anonymousRelays, ...secondaryRelays]);
const fallbackRelays = deduplicateRelayUrls([
...anonymousRelays,
...secondaryRelays,
]);
// Use tested relays and add fallback relays
const inboxRelays = testedInboxRelays.length > 0
const inboxRelays = testedInboxRelays.length > 0
? deduplicateRelayUrls([...testedInboxRelays, ...fallbackRelays])
: deduplicateRelayUrls(fallbackRelays);
const outboxRelays = testedOutboxRelays.length > 0
const outboxRelays = testedOutboxRelays.length > 0
? deduplicateRelayUrls([...testedOutboxRelays, ...fallbackRelays])
: deduplicateRelayUrls(fallbackRelays);
@ -678,27 +814,51 @@ export async function buildCompleteRelaySet( @@ -678,27 +814,51 @@ export async function buildCompleteRelaySet(
currentNetworkCondition,
discoveredLocalRelays,
lowbandwidthRelays,
{ inboxRelays, outboxRelays }
{ inboxRelays, outboxRelays },
);
// Filter out blocked relays and deduplicate final sets
const finalRelaySet = {
inboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.inboxRelays.filter((r: string) => !blockedRelays.includes(r))),
outboxRelays: deduplicateRelayUrls(networkOptimizedRelaySet.outboxRelays.filter((r: string) => !blockedRelays.includes(r)))
inboxRelays: deduplicateRelayUrls(
networkOptimizedRelaySet.inboxRelays.filter((r: string) =>
!blockedRelays.includes(r)
),
),
outboxRelays: deduplicateRelayUrls(
networkOptimizedRelaySet.outboxRelays.filter((r: string) =>
!blockedRelays.includes(r)
),
),
};
// Ensure we always have at least some relays
if (finalRelaySet.inboxRelays.length === 0 && finalRelaySet.outboxRelays.length === 0) {
console.warn('[relay_management.ts] No relays available, using anonymous relays as final fallback');
if (
finalRelaySet.inboxRelays.length === 0 &&
finalRelaySet.outboxRelays.length === 0
) {
console.warn(
"[relay_management.ts] No relays available, using anonymous relays as final fallback",
);
return {
inboxRelays: deduplicateRelayUrls(anonymousRelays),
outboxRelays: deduplicateRelayUrls(anonymousRelays)
outboxRelays: deduplicateRelayUrls(anonymousRelays),
};
}
console.debug('[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:', finalRelaySet.inboxRelays.length, 'outbox:', finalRelaySet.outboxRelays.length);
console.debug('[relay_management.ts] buildCompleteRelaySet: Final inbox relays:', finalRelaySet.inboxRelays);
console.debug('[relay_management.ts] buildCompleteRelaySet: Final outbox relays:', finalRelaySet.outboxRelays);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Final relay sets - inbox:",
finalRelaySet.inboxRelays.length,
"outbox:",
finalRelaySet.outboxRelays.length,
);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Final inbox relays:",
finalRelaySet.inboxRelays,
);
console.debug(
"[relay_management.ts] buildCompleteRelaySet: Final outbox relays:",
finalRelaySet.outboxRelays,
);
return finalRelaySet;
}
}

16
src/lib/utils/search_result_formatter.ts

@ -6,17 +6,19 @@ export class SearchResultFormatter { @@ -6,17 +6,19 @@ export class SearchResultFormatter {
/**
* Formats a result message based on search count and type
*/
formatResultMessage(searchResultCount: number | null, searchResultType: string | null): string {
formatResultMessage(
searchResultCount: number | null,
searchResultType: string | null,
): string {
if (searchResultCount === 0) {
return "Search completed. No results found.";
}
const typeLabel =
searchResultType === "n"
? "profile"
: searchResultType === "nip05"
? "NIP-05 address"
: "event";
const typeLabel = searchResultType === "n"
? "profile"
: searchResultType === "nip05"
? "NIP-05 address"
: "event";
const countLabel = searchResultType === "n" ? "profiles" : "events";
return searchResultCount === 1

14
src/lib/utils/search_utility.ts

@ -13,13 +13,13 @@ export { searchBySubscription } from "./subscription_search"; @@ -13,13 +13,13 @@ export { searchBySubscription } from "./subscription_search";
export { searchEvent, searchNip05 } from "./event_search";
export { checkCommunity } from "./community_checker";
export {
wellKnownUrl,
lnurlpWellKnownUrl,
isValidNip05Address,
normalizeSearchTerm,
fieldMatches,
nip05Matches,
COMMON_DOMAINS,
isEmojiReaction,
createProfileFromEvent,
fieldMatches,
isEmojiReaction,
isValidNip05Address,
lnurlpWellKnownUrl,
nip05Matches,
normalizeSearchTerm,
wellKnownUrl,
} from "./search_utils";

272
src/lib/utils/subscription_search.ts

@ -2,28 +2,28 @@ @@ -2,28 +2,28 @@
import { ndkInstance } from "../ndk.ts";
import { getMatchingTags, getNpubFromNip05 } from "./nostrUtils.ts";
import { nip19 } from "./nostrUtils.ts";
import { NDKRelaySet, NDKEvent } from "@nostr-dev-kit/ndk";
import { NDKEvent, NDKRelaySet } from "@nostr-dev-kit/ndk";
import { searchCache } from "./searchCache.ts";
import { communityRelays, searchRelays } from "../consts.ts";
import { get } from "svelte/store";
import type {
SearchCallbacks,
SearchFilter,
SearchResult,
SearchSubscriptionType,
SearchFilter,
SearchCallbacks,
} from "./search_types.ts";
import {
fieldMatches,
nip05Matches,
COMMON_DOMAINS,
fieldMatches,
isEmojiReaction,
nip05Matches,
} from "./search_utils.ts";
import { TIMEOUTS, SEARCH_LIMITS } from "./search_constants.ts";
import { SEARCH_LIMITS, TIMEOUTS } from "./search_constants.ts";
import { activeInboxRelays, activeOutboxRelays } from "../ndk.ts";
// Helper function to normalize URLs for comparison
const normalizeUrl = (url: string): string => {
return url.replace(/\/$/, ''); // Remove trailing slash
return url.replace(/\/$/, ""); // Remove trailing slash
};
/**
@ -62,7 +62,9 @@ export async function searchBySubscription( @@ -62,7 +62,9 @@ export async function searchBySubscription(
// AI-NOTE: 2025-01-24 - For profile searches, return cached results immediately
// The EventSearch component now handles cache checking before calling this function
if (searchType === "n") {
console.log("subscription_search: Returning cached profile result immediately");
console.log(
"subscription_search: Returning cached profile result immediately",
);
return cachedResult;
} else {
return cachedResult;
@ -147,8 +149,10 @@ export async function searchBySubscription( @@ -147,8 +149,10 @@ export async function searchBySubscription(
// AI-NOTE: 2025-01-08 - For profile searches, return immediately when found
// but still start background search for second-order results
if (searchType === "n") {
console.log("subscription_search: Profile found, returning immediately but starting background second-order search");
console.log(
"subscription_search: Profile found, returning immediately but starting background second-order search",
);
// Start Phase 2 in background for second-order results
searchOtherRelaysInBackground(
searchType,
@ -157,9 +161,11 @@ export async function searchBySubscription( @@ -157,9 +161,11 @@ export async function searchBySubscription(
callbacks,
cleanup,
);
const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms`);
console.log(
`subscription_search: Profile search completed in ${elapsed}ms`,
);
return immediateResult;
}
@ -177,7 +183,7 @@ export async function searchBySubscription( @@ -177,7 +183,7 @@ export async function searchBySubscription(
console.log(
"subscription_search: No results from primary relay",
);
// AI-NOTE: 2025-01-08 - For profile searches, if no results found in search relays,
// try all relays as fallback
if (searchType === "n") {
@ -185,20 +191,23 @@ export async function searchBySubscription( @@ -185,20 +191,23 @@ export async function searchBySubscription(
"subscription_search: No profile found in search relays, trying all relays",
);
// Try with all relays as fallback
const allRelaySet = new NDKRelaySet(new Set(Array.from(ndk.pool.relays.values())) as any, ndk);
const allRelaySet = new NDKRelaySet(
new Set(Array.from(ndk.pool.relays.values())) as any,
ndk,
);
try {
const fallbackEvents = await ndk.fetchEvents(
searchFilter.filter,
{ closeOnEose: true },
allRelaySet,
);
console.log(
"subscription_search: Fallback search returned",
fallbackEvents.size,
"events",
);
processPrimaryRelayResults(
fallbackEvents,
searchType,
@ -208,7 +217,7 @@ export async function searchBySubscription( @@ -208,7 +217,7 @@ export async function searchBySubscription(
abortSignal,
cleanup,
);
if (hasResults(searchState, searchType)) {
console.log(
"subscription_search: Found profile in fallback search, returning immediately",
@ -220,21 +229,31 @@ export async function searchBySubscription( @@ -220,21 +229,31 @@ export async function searchBySubscription(
);
searchCache.set(searchType, normalizedSearchTerm, fallbackResult);
const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms (fallback)`);
console.log(
`subscription_search: Profile search completed in ${elapsed}ms (fallback)`,
);
return fallbackResult;
}
} catch (fallbackError) {
console.error("subscription_search: Fallback search failed:", fallbackError);
console.error(
"subscription_search: Fallback search failed:",
fallbackError,
);
}
console.log(
"subscription_search: Profile not found in any relays, returning empty result",
);
const emptyResult = createEmptySearchResult(searchType, normalizedSearchTerm);
const emptyResult = createEmptySearchResult(
searchType,
normalizedSearchTerm,
);
// AI-NOTE: 2025-01-08 - Don't cache empty profile results as they may be due to search issues
// rather than the profile not existing
const elapsed = Date.now() - startTime;
console.log(`subscription_search: Profile search completed in ${elapsed}ms (not found)`);
console.log(
`subscription_search: Profile search completed in ${elapsed}ms (not found)`,
);
return emptyResult;
} else {
console.log(
@ -262,13 +281,15 @@ export async function searchBySubscription( @@ -262,13 +281,15 @@ export async function searchBySubscription(
callbacks,
cleanup,
);
// AI-NOTE: 2025-01-08 - Log performance for non-profile searches
if (searchType !== "n") {
const elapsed = Date.now() - startTime;
console.log(`subscription_search: ${searchType} search completed in ${elapsed}ms`);
console.log(
`subscription_search: ${searchType} search completed in ${elapsed}ms`,
);
}
return result;
}
@ -324,7 +345,10 @@ async function createSearchFilter( @@ -324,7 +345,10 @@ async function createSearchFilter(
switch (searchType) {
case "d": {
const dFilter = {
filter: { "#d": [normalizedSearchTerm], limit: SEARCH_LIMITS.GENERAL_CONTENT },
filter: {
"#d": [normalizedSearchTerm],
limit: SEARCH_LIMITS.GENERAL_CONTENT,
},
subscriptionType: "d-tag",
};
console.log("subscription_search: Created d-tag filter:", dFilter);
@ -332,7 +356,10 @@ async function createSearchFilter( @@ -332,7 +356,10 @@ async function createSearchFilter(
}
case "t": {
const tFilter = {
filter: { "#t": [normalizedSearchTerm], limit: SEARCH_LIMITS.GENERAL_CONTENT },
filter: {
"#t": [normalizedSearchTerm],
limit: SEARCH_LIMITS.GENERAL_CONTENT,
},
subscriptionType: "t-tag",
};
console.log("subscription_search: Created t-tag filter:", tFilter);
@ -412,11 +439,14 @@ function createPrimaryRelaySet( @@ -412,11 +439,14 @@ function createPrimaryRelaySet(
): NDKRelaySet {
// Debug: Log all relays in NDK pool
const poolRelays = Array.from(ndk.pool.relays.values());
console.debug('subscription_search: NDK pool relays:', poolRelays.map((r: any) => r.url));
console.debug(
"subscription_search: NDK pool relays:",
poolRelays.map((r: any) => r.url),
);
// AI-NOTE: 2025-01-24 - Use ALL available relays for comprehensive search coverage
// This ensures searches don't fail due to missing relays and provides maximum event discovery
if (searchType === "n") {
// For profile searches, prioritize search relays for speed but include all relays
const searchRelaySet = poolRelays.filter(
@ -426,29 +456,43 @@ function createPrimaryRelaySet( @@ -426,29 +456,43 @@ function createPrimaryRelaySet(
normalizeUrl(relay.url) === normalizeUrl(searchRelay),
),
);
if (searchRelaySet.length > 0) {
console.debug('subscription_search: Profile search - using search relays for speed:', searchRelaySet.map((r: any) => r.url));
console.debug(
"subscription_search: Profile search - using search relays for speed:",
searchRelaySet.map((r: any) => r.url),
);
// Still include all relays for comprehensive coverage
console.debug('subscription_search: Profile search - also including all relays for comprehensive coverage');
console.debug(
"subscription_search: Profile search - also including all relays for comprehensive coverage",
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk);
} else {
// Use all relays if search relays not available
console.debug('subscription_search: Profile search - using all relays:', poolRelays.map((r: any) => r.url));
console.debug(
"subscription_search: Profile search - using all relays:",
poolRelays.map((r: any) => r.url),
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk);
}
} else {
// For all other searches, use ALL available relays for maximum coverage
const activeRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)];
console.debug('subscription_search: Active relay stores:', {
const activeRelays = [
...get(activeInboxRelays),
...get(activeOutboxRelays),
];
console.debug("subscription_search: Active relay stores:", {
inboxRelays: get(activeInboxRelays),
outboxRelays: get(activeOutboxRelays),
activeRelays
activeRelays,
});
// AI-NOTE: 2025-01-24 - Use all pool relays instead of filtering to active relays only
// This ensures we don't miss events that might be on other relays
console.debug('subscription_search: Using ALL pool relays for comprehensive search coverage:', poolRelays.map((r: any) => r.url));
console.debug(
"subscription_search: Using ALL pool relays for comprehensive search coverage:",
poolRelays.map((r: any) => r.url),
);
return new NDKRelaySet(new Set(poolRelays) as any, ndk);
}
}
@ -620,12 +664,11 @@ function createSearchResult( @@ -620,12 +664,11 @@ function createSearchResult(
normalizedSearchTerm: string,
): SearchResult {
return {
events:
searchType === "n"
? searchState.foundProfiles
: searchType === "t"
? searchState.tTagEvents
: searchState.firstOrderEvents,
events: searchType === "n"
? searchState.foundProfiles
: searchType === "t"
? searchState.tTagEvents
: searchState.firstOrderEvents,
secondOrder: [],
tTagEvents: [],
eventIds: searchState.eventIds,
@ -653,9 +696,11 @@ function searchOtherRelaysInBackground( @@ -653,9 +696,11 @@ function searchOtherRelaysInBackground(
new Set(Array.from(ndk.pool.relays.values())),
ndk,
);
console.debug('subscription_search: Background search using ALL relays:',
Array.from(ndk.pool.relays.values()).map((r: any) => r.url));
console.debug(
"subscription_search: Background search using ALL relays:",
Array.from(ndk.pool.relays.values()).map((r: any) => r.url),
);
// Subscribe to events from other relays
const sub = ndk.subscribe(
@ -758,7 +803,10 @@ function processProfileEoseResults( @@ -758,7 +803,10 @@ function processProfileEoseResults(
) {
const targetPubkey = dedupedProfiles[0]?.pubkey;
if (targetPubkey) {
console.log("subscription_search: Triggering second-order search for npub-specific profile:", targetPubkey);
console.log(
"subscription_search: Triggering second-order search for npub-specific profile:",
targetPubkey,
);
performSecondOrderSearchInBackground(
"n",
dedupedProfiles,
@ -768,13 +816,18 @@ function processProfileEoseResults( @@ -768,13 +816,18 @@ function processProfileEoseResults(
callbacks,
);
} else {
console.log("subscription_search: No targetPubkey found for second-order search");
console.log(
"subscription_search: No targetPubkey found for second-order search",
);
}
} else if (searchFilter.subscriptionType === "profile") {
// For general profile searches, perform second-order search for each found profile
for (const profile of dedupedProfiles) {
if (profile.pubkey) {
console.log("subscription_search: Triggering second-order search for general profile:", profile.pubkey);
console.log(
"subscription_search: Triggering second-order search for general profile:",
profile.pubkey,
);
performSecondOrderSearchInBackground(
"n",
dedupedProfiles,
@ -786,7 +839,10 @@ function processProfileEoseResults( @@ -786,7 +839,10 @@ function processProfileEoseResults(
}
}
} else {
console.log("subscription_search: No second-order search triggered for subscription type:", searchFilter.subscriptionType);
console.log(
"subscription_search: No second-order search triggered for subscription type:",
searchFilter.subscriptionType,
);
}
return {
@ -896,7 +952,12 @@ async function performSecondOrderSearchInBackground( @@ -896,7 +952,12 @@ async function performSecondOrderSearchInBackground(
callbacks?: SearchCallbacks,
) {
try {
console.log("subscription_search: Starting second-order search for", searchType, "with targetPubkey:", targetPubkey);
console.log(
"subscription_search: Starting second-order search for",
searchType,
"with targetPubkey:",
targetPubkey,
);
const ndk = get(ndkInstance);
let allSecondOrderEvents: NDKEvent[] = [];
@ -910,20 +971,30 @@ async function performSecondOrderSearchInBackground( @@ -910,20 +971,30 @@ async function performSecondOrderSearchInBackground(
const searchPromise = (async () => {
if (searchType === "n" && targetPubkey) {
console.log("subscription_search: Searching for events mentioning pubkey:", targetPubkey);
console.log(
"subscription_search: Searching for events mentioning pubkey:",
targetPubkey,
);
// AI-NOTE: 2025-01-24 - Use only active relays for second-order profile search to prevent hanging
const activeRelays = [...get(activeInboxRelays), ...get(activeOutboxRelays)];
const activeRelays = [
...get(activeInboxRelays),
...get(activeOutboxRelays),
];
const availableRelays = activeRelays
.map(url => ndk.pool.relays.get(url))
.map((url) => ndk.pool.relays.get(url))
.filter((relay): relay is any => relay !== undefined);
const relaySet = new NDKRelaySet(
new Set(availableRelays),
ndk
ndk,
);
console.log(
"subscription_search: Using",
activeRelays.length,
"active relays for second-order search",
);
console.log("subscription_search: Using", activeRelays.length, "active relays for second-order search");
// Search for events that mention this pubkey via p-tags
const pTagFilter = { "#p": [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging
const pTagEvents = await ndk.fetchEvents(
@ -931,8 +1002,13 @@ async function performSecondOrderSearchInBackground( @@ -931,8 +1002,13 @@ async function performSecondOrderSearchInBackground(
{ closeOnEose: true },
relaySet,
);
console.log("subscription_search: Found", pTagEvents.size, "events with p-tag for", targetPubkey);
console.log(
"subscription_search: Found",
pTagEvents.size,
"events with p-tag for",
targetPubkey,
);
// AI-NOTE: 2025-01-24 - Also search for events written by this pubkey with limit
const authorFilter = { authors: [targetPubkey], limit: 50 }; // AI-NOTE: 2025-01-24 - Limit results to prevent hanging
const authorEvents = await ndk.fetchEvents(
@ -940,14 +1016,27 @@ async function performSecondOrderSearchInBackground( @@ -940,14 +1016,27 @@ async function performSecondOrderSearchInBackground(
{ closeOnEose: true },
relaySet,
);
console.log("subscription_search: Found", authorEvents.size, "events written by", targetPubkey);
console.log(
"subscription_search: Found",
authorEvents.size,
"events written by",
targetPubkey,
);
// Filter out unwanted events from both sets
const filteredPTagEvents = filterUnwantedEvents(Array.from(pTagEvents));
const filteredAuthorEvents = filterUnwantedEvents(Array.from(authorEvents));
console.log("subscription_search: After filtering unwanted events:", filteredPTagEvents.length, "p-tag events,", filteredAuthorEvents.length, "author events");
const filteredAuthorEvents = filterUnwantedEvents(
Array.from(authorEvents),
);
console.log(
"subscription_search: After filtering unwanted events:",
filteredPTagEvents.length,
"p-tag events,",
filteredAuthorEvents.length,
"author events",
);
// Combine both sets of events
allSecondOrderEvents = [...filteredPTagEvents, ...filteredAuthorEvents];
} else if (searchType === "d") {
@ -959,17 +1048,23 @@ async function performSecondOrderSearchInBackground( @@ -959,17 +1048,23 @@ async function performSecondOrderSearchInBackground(
const [eTagEvents, aTagEvents] = await Promise.all([
eventIds.size > 0
? ndk.fetchEvents(
{ "#e": Array.from(eventIds), limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS },
{ closeOnEose: true },
relaySet,
)
{
"#e": Array.from(eventIds),
limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS,
},
{ closeOnEose: true },
relaySet,
)
: Promise.resolve([]),
addresses.size > 0
? ndk.fetchEvents(
{ "#a": Array.from(addresses), limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS },
{ closeOnEose: true },
relaySet,
)
{
"#a": Array.from(addresses),
limit: SEARCH_LIMITS.SECOND_ORDER_RESULTS,
},
{ closeOnEose: true },
relaySet,
)
: Promise.resolve([]),
]);
// Filter out unwanted events
@ -1003,17 +1098,20 @@ async function performSecondOrderSearchInBackground( @@ -1003,17 +1098,20 @@ async function performSecondOrderSearchInBackground(
.sort((a, b) => (b.created_at || 0) - (a.created_at || 0))
.slice(0, SEARCH_LIMITS.SECOND_ORDER_RESULTS);
console.log("subscription_search: Second-order search completed with", sortedSecondOrder.length, "results");
console.log(
"subscription_search: Second-order search completed with",
sortedSecondOrder.length,
"results",
);
// Update the search results with second-order events
const result: SearchResult = {
events: firstOrderEvents,
secondOrder: sortedSecondOrder,
tTagEvents: [],
eventIds:
searchType === "n"
? new Set(firstOrderEvents.map((p) => p.id))
: eventIds,
eventIds: searchType === "n"
? new Set(firstOrderEvents.map((p) => p.id))
: eventIds,
addresses: searchType === "n" ? new Set() : addresses,
searchType: searchType,
searchTerm: "", // This will be set by the caller
@ -1021,10 +1119,16 @@ async function performSecondOrderSearchInBackground( @@ -1021,10 +1119,16 @@ async function performSecondOrderSearchInBackground(
// Notify UI of updated results
if (callbacks?.onSecondOrderUpdate) {
console.log("subscription_search: Calling onSecondOrderUpdate callback with", sortedSecondOrder.length, "second-order events");
console.log(
"subscription_search: Calling onSecondOrderUpdate callback with",
sortedSecondOrder.length,
"second-order events",
);
callbacks.onSecondOrderUpdate(result);
} else {
console.log("subscription_search: No onSecondOrderUpdate callback available");
console.log(
"subscription_search: No onSecondOrderUpdate callback available",
);
}
})();

117
src/lib/utils/tag_event_fetch.ts

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import { ndkInstance } from "../ndk";
import { get } from "svelte/store";
import { extractPubkeysFromEvents, batchFetchProfiles } from "./profileCache";
import { batchFetchProfiles, extractPubkeysFromEvents } from "./profileCache";
// Constants for publication event kinds
const INDEX_EVENT_KIND = 30040;
@ -17,12 +17,12 @@ export interface TagExpansionResult { @@ -17,12 +17,12 @@ export interface TagExpansionResult {
/**
* Fetches publications and their content events from relays based on tags
*
*
* This function handles the relay-based fetching portion of tag expansion:
* 1. Fetches publication index events that have any of the specified tags
* 2. Extracts content event references from those publications
* 3. Fetches the referenced content events
*
*
* @param tags Array of tags to search for in publications
* @param existingEventIds Set of existing event IDs to avoid duplicates
* @param baseEvents Array of base events to check for existing content
@ -33,44 +33,46 @@ export async function fetchTaggedEventsFromRelays( @@ -33,44 +33,46 @@ export async function fetchTaggedEventsFromRelays(
tags: string[],
existingEventIds: Set<string>,
baseEvents: NDKEvent[],
debug?: (...args: any[]) => void
debug?: (...args: any[]) => void,
): Promise<TagExpansionResult> {
const log = debug || console.debug;
log("Fetching from relays for tags:", tags);
// Fetch publications that have any of the specified tags
const ndk = get(ndkInstance);
const taggedPublications = await ndk.fetchEvents({
kinds: [INDEX_EVENT_KIND],
"#t": tags, // Match any of these tags
limit: 30 // Reasonable default limit
limit: 30, // Reasonable default limit
});
log("Found tagged publications from relays:", taggedPublications.size);
// Filter to avoid duplicates
const newPublications = Array.from(taggedPublications).filter(
(event: NDKEvent) => !existingEventIds.has(event.id)
(event: NDKEvent) => !existingEventIds.has(event.id),
);
// Extract content event d-tags from new publications
const contentEventDTags = new Set<string>();
const existingContentDTags = new Set(
baseEvents
.filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind))
.map(e => e.tagValue("d"))
.filter(d => d !== undefined)
.filter((e) =>
e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)
)
.map((e) => e.tagValue("d"))
.filter((d) => d !== undefined),
);
newPublications.forEach((event: NDKEvent) => {
const aTags = event.getMatchingTags("a");
aTags.forEach((tag: string[]) => {
// Parse the 'a' tag identifier: kind:pubkey:d-tag
if (tag[1]) {
const parts = tag[1].split(':');
const parts = tag[1].split(":");
if (parts.length >= 3) {
const dTag = parts.slice(2).join(':'); // Handle d-tags with colons
const dTag = parts.slice(2).join(":"); // Handle d-tags with colons
if (!existingContentDTags.has(dTag)) {
contentEventDTags.add(dTag);
}
@ -78,7 +80,7 @@ export async function fetchTaggedEventsFromRelays( @@ -78,7 +80,7 @@ export async function fetchTaggedEventsFromRelays(
}
});
});
// Fetch the content events
let newContentEvents: NDKEvent[] = [];
if (contentEventDTags.size > 0) {
@ -88,21 +90,21 @@ export async function fetchTaggedEventsFromRelays( @@ -88,21 +90,21 @@ export async function fetchTaggedEventsFromRelays(
});
newContentEvents = Array.from(contentEventsSet);
}
return {
publications: newPublications,
contentEvents: newContentEvents
contentEvents: newContentEvents,
};
}
/**
* Searches through already fetched events for publications with specified tags
*
*
* This function handles the local search portion of tag expansion:
* 1. Searches through existing events for publications with matching tags
* 2. Extracts content event references from those publications
* 3. Finds the referenced content events in existing events
*
*
* @param allEvents Array of all fetched events to search through
* @param tags Array of tags to search for in publications
* @param existingEventIds Set of existing event IDs to avoid duplicates
@ -115,42 +117,44 @@ export function findTaggedEventsInFetched( @@ -115,42 +117,44 @@ export function findTaggedEventsInFetched(
tags: string[],
existingEventIds: Set<string>,
baseEvents: NDKEvent[],
debug?: (...args: any[]) => void
debug?: (...args: any[]) => void,
): TagExpansionResult {
const log = debug || console.debug;
log("Searching through already fetched events for tags:", tags);
// Find publications in allEvents that have the specified tags
const taggedPublications = allEvents.filter(event => {
const taggedPublications = allEvents.filter((event) => {
if (event.kind !== INDEX_EVENT_KIND) return false;
if (existingEventIds.has(event.id)) return false; // Skip base events
// Check if event has any of the specified tags
const eventTags = event.getMatchingTags("t").map(tag => tag[1]);
return tags.some(tag => eventTags.includes(tag));
const eventTags = event.getMatchingTags("t").map((tag) => tag[1]);
return tags.some((tag) => eventTags.includes(tag));
});
const newPublications = taggedPublications;
log("Found", newPublications.length, "publications in fetched events");
// For content events, also search in allEvents
const existingContentDTags = new Set(
baseEvents
.filter(e => e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind))
.map(e => e.tagValue("d"))
.filter(d => d !== undefined)
.filter((e) =>
e.kind !== undefined && CONTENT_EVENT_KINDS.includes(e.kind)
)
.map((e) => e.tagValue("d"))
.filter((d) => d !== undefined),
);
const contentEventDTags = new Set<string>();
newPublications.forEach((event: NDKEvent) => {
const aTags = event.getMatchingTags("a");
aTags.forEach((tag: string[]) => {
// Parse the 'a' tag identifier: kind:pubkey:d-tag
if (tag[1]) {
const parts = tag[1].split(':');
const parts = tag[1].split(":");
if (parts.length >= 3) {
const dTag = parts.slice(2).join(':'); // Handle d-tags with colons
const dTag = parts.slice(2).join(":"); // Handle d-tags with colons
if (!existingContentDTags.has(dTag)) {
contentEventDTags.add(dTag);
}
@ -158,23 +162,23 @@ export function findTaggedEventsInFetched( @@ -158,23 +162,23 @@ export function findTaggedEventsInFetched(
}
});
});
// Find content events in allEvents
const newContentEvents = allEvents.filter(event => {
const newContentEvents = allEvents.filter((event) => {
if (!CONTENT_EVENT_KINDS.includes(event.kind || 0)) return false;
const dTag = event.tagValue("d");
return dTag !== undefined && contentEventDTags.has(dTag);
});
return {
publications: newPublications,
contentEvents: newContentEvents
contentEvents: newContentEvents,
};
}
/**
* Fetches profiles for new events and updates progress
*
*
* @param newPublications Array of new publication events
* @param newContentEvents Array of new content events
* @param onProgressUpdate Callback to update progress state
@ -184,23 +188,32 @@ export function findTaggedEventsInFetched( @@ -184,23 +188,32 @@ export function findTaggedEventsInFetched(
export async function fetchProfilesForNewEvents(
newPublications: NDKEvent[],
newContentEvents: NDKEvent[],
onProgressUpdate: (progress: { current: number; total: number } | null) => void,
debug?: (...args: any[]) => void
onProgressUpdate: (
progress: { current: number; total: number } | null,
) => void,
debug?: (...args: any[]) => void,
): Promise<void> {
const log = debug || console.debug;
// Extract pubkeys from new events
const newPubkeys = extractPubkeysFromEvents([...newPublications, ...newContentEvents]);
const newPubkeys = extractPubkeysFromEvents([
...newPublications,
...newContentEvents,
]);
if (newPubkeys.size > 0) {
log("Fetching profiles for", newPubkeys.size, "new pubkeys from tag expansion");
log(
"Fetching profiles for",
newPubkeys.size,
"new pubkeys from tag expansion",
);
onProgressUpdate({ current: 0, total: newPubkeys.size });
await batchFetchProfiles(Array.from(newPubkeys), (fetched, total) => {
onProgressUpdate({ current: fetched, total });
});
onProgressUpdate(null);
}
}
}

91
src/lib/utils/websocket_utils.ts

@ -18,7 +18,7 @@ export interface NostrFilter { @@ -18,7 +18,7 @@ export interface NostrFilter {
ids?: string[];
authors?: string[];
kinds?: number[];
[tag: `#${string}`]: string[] | undefined;
[tag: `#${string}`]: string[] | undefined;
since?: number;
until?: number;
limit?: number;
@ -28,14 +28,16 @@ type ResolveCallback<T> = (value: T | PromiseLike<T>) => void; @@ -28,14 +28,16 @@ type ResolveCallback<T> = (value: T | PromiseLike<T>) => void;
type RejectCallback = (reason?: any) => void;
type EventHandler = (ev: Event) => void;
type MessageEventHandler = (ev: MessageEvent) => void;
type EventHandlerReject = (reject: RejectCallback) => EventHandler;
type EventHandlerResolve<T> = (resolve: ResolveCallback<T>) => (reject: RejectCallback) => MessageEventHandler;
type EventHandlerReject = (reject: RejectCallback) => EventHandler;
type EventHandlerResolve<T> = (
resolve: ResolveCallback<T>,
) => (reject: RejectCallback) => MessageEventHandler;
function handleMessage(
ev: MessageEvent,
subId: string,
resolve: (event: NostrEvent) => void,
reject: (reason: any) => void
reject: (reason: any) => void,
) {
const data = JSON.parse(ev.data);
@ -64,43 +66,48 @@ function handleMessage( @@ -64,43 +66,48 @@ function handleMessage(
function handleError(
ev: Event,
reject: (reason: any) => void
reject: (reason: any) => void,
) {
reject(ev);
}
export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent | null> {
export async function fetchNostrEvent(
filter: NostrFilter,
): Promise<NostrEvent | null> {
// AI-NOTE: Updated to use active relay stores instead of hardcoded relay URL
// This ensures the function uses the user's configured relays and can find events
// across multiple relays rather than being limited to a single hardcoded relay.
// Get available relays from the active relay stores
const inboxRelays = get(activeInboxRelays);
const outboxRelays = get(activeOutboxRelays);
// Combine all available relays, prioritizing inbox relays
let availableRelays = [...inboxRelays, ...outboxRelays];
// AI-NOTE: Use fallback relays when stores are empty (e.g., during SSR)
// This ensures publications can still load even when relay stores haven't been populated
if (availableRelays.length === 0) {
// Import fallback relays from constants
const { searchRelays, secondaryRelays } = await import("../consts.ts");
availableRelays = [...searchRelays, ...secondaryRelays];
if (availableRelays.length === 0) {
availableRelays = ["wss://thecitadel.nostr1.com"];
}
}
// AI-NOTE: 2025-01-24 - Enhanced relay strategy for better event discovery
// Always include search relays in the relay set for comprehensive event discovery
const { searchRelays, secondaryRelays } = await import("../consts.ts");
const allRelays = [...availableRelays, ...searchRelays, ...secondaryRelays];
const uniqueRelays = [...new Set(allRelays)]; // Remove duplicates
console.debug(`[fetchNostrEvent] Trying ${uniqueRelays.length} relays for event discovery:`, uniqueRelays);
console.debug(
`[fetchNostrEvent] Trying ${uniqueRelays.length} relays for event discovery:`,
uniqueRelays,
);
// Try all available relays in parallel and return the first result
const relayPromises = uniqueRelays.map(async (relay) => {
try {
@ -110,16 +117,15 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent | @@ -110,16 +117,15 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
// AI-NOTE: Currying is used here to abstract the internal handler logic away from the WebSocket
// handling logic. The message and error handlers themselves can be refactored without affecting
// the WebSocket handling logic.
const curriedMessageHandler: (subId: string) => (resolve: ResolveCallback<NostrEvent>) => (reject: RejectCallback) => MessageEventHandler =
(subId) =>
(resolve) =>
(reject) =>
(ev: MessageEvent) =>
handleMessage(ev, subId, resolve, reject);
const curriedErrorHandler: EventHandlerReject =
(reject) =>
(ev: Event) =>
handleError(ev, reject);
const curriedMessageHandler: (
subId: string,
) => (
resolve: ResolveCallback<NostrEvent>,
) => (reject: RejectCallback) => MessageEventHandler =
(subId) => (resolve) => (reject) => (ev: MessageEvent) =>
handleMessage(ev, subId, resolve, reject);
const curriedErrorHandler: EventHandlerReject = (reject) => (ev: Event) =>
handleError(ev, reject);
// AI-NOTE: These variables store references to partially-applied handlers so that the `finally`
// block receives the correct references to clean up the listeners.
@ -133,20 +139,20 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent | @@ -133,20 +139,20 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
ws.addEventListener("message", messageHandler);
ws.addEventListener("error", errorHandler);
})
.withTimeout(2000)
.finally(() => {
ws.removeEventListener("message", messageHandler);
ws.removeEventListener("error", errorHandler);
WebSocketPool.instance.release(ws);
});
.withTimeout(2000)
.finally(() => {
ws.removeEventListener("message", messageHandler);
ws.removeEventListener("error", errorHandler);
WebSocketPool.instance.release(ws);
});
ws.send(JSON.stringify(["REQ", subId, filter]));
const result = await res;
if (result) {
return result;
}
return null;
} catch (err) {
return null;
@ -155,14 +161,14 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent | @@ -155,14 +161,14 @@ export async function fetchNostrEvent(filter: NostrFilter): Promise<NostrEvent |
// Wait for all relay results and find the first successful one
const results = await Promise.allSettled(relayPromises);
// Find the first successful result
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {
if (result.status === "fulfilled" && result.value) {
return result.value;
}
}
return null;
}
@ -191,7 +197,10 @@ export async function fetchEventByDTag(dTag: string): Promise<NostrEvent> { @@ -191,7 +197,10 @@ export async function fetchEventByDTag(dTag: string): Promise<NostrEvent> {
try {
const event = await fetchNostrEvent({ "#d": [dTag], limit: 1 });
if (!event) {
error(404, `Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`);
error(
404,
`Event not found for d-tag: ${dTag}. href="/events?d=${dTag}"`,
);
}
return event;
} catch (err) {
@ -215,7 +224,10 @@ export async function fetchEventByNaddr(naddr: string): Promise<NostrEvent> { @@ -215,7 +224,10 @@ export async function fetchEventByNaddr(naddr: string): Promise<NostrEvent> {
};
const event = await fetchNostrEvent(filter);
if (!event) {
error(404, `Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`);
error(
404,
`Event not found for naddr: ${naddr}. href="/events?id=${naddr}"`,
);
}
return event;
} catch (err) {
@ -234,7 +246,10 @@ export async function fetchEventByNevent(nevent: string): Promise<NostrEvent> { @@ -234,7 +246,10 @@ export async function fetchEventByNevent(nevent: string): Promise<NostrEvent> {
const decoded = neventDecode(nevent);
const event = await fetchNostrEvent({ ids: [decoded.id], limit: 1 });
if (!event) {
error(404, `Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`);
error(
404,
`Event not found for nevent: ${nevent}. href="/events?id=${nevent}"`,
);
}
return event;
} catch (err) {

141
src/routes/+layout.ts

@ -1,141 +0,0 @@ @@ -1,141 +0,0 @@
import { getPersistedLogin, initNdk, ndkInstance } from "../lib/ndk.ts";
import {
loginWithExtension,
loginWithAmber,
loginWithNpub,
} from "../lib/stores/userStore.ts";
import { loginMethodStorageKey } from "../lib/stores/userStore.ts";
import Pharos, { pharosInstance } from "../lib/parser.ts";
import type { LayoutLoad } from "./$types";
import { get } from "svelte/store";
import { browser } from "$app/environment";
// AI-NOTE: SSR enabled for better SEO and OpenGraph support
export const ssr = true;
/**
* Attempts to restore the user's authentication session from localStorage.
* Handles extension, Amber (NIP-46), and npub login methods.
* Only runs on client-side.
*/
function restoreAuthSession() {
// Only run on client-side
if (!browser) return;
try {
const pubkey = getPersistedLogin();
const loginMethod = localStorage.getItem(loginMethodStorageKey);
const logoutFlag = localStorage.getItem("alexandria/logout/flag");
console.log("Layout load - persisted pubkey:", pubkey);
console.log("Layout load - persisted login method:", loginMethod);
console.log("Layout load - logout flag:", logoutFlag);
console.log("All localStorage keys:", Object.keys(localStorage));
if (pubkey && loginMethod && !logoutFlag) {
if (loginMethod === "extension") {
console.log("Restoring extension login...");
loginWithExtension();
} else if (loginMethod === "amber") {
// Attempt to restore Amber (NIP-46) session from localStorage
const relay = "wss://relay.nsec.app";
const localNsec = localStorage.getItem("amber/nsec");
if (localNsec) {
import("@nostr-dev-kit/ndk").then(
async ({ NDKNip46Signer }) => {
const ndk = get(ndkInstance);
try {
// deno-lint-ignore no-explicit-any
const amberSigner = (NDKNip46Signer as any).nostrconnect(
ndk,
relay,
localNsec,
{
name: "Alexandria",
perms: "sign_event:1;sign_event:4",
},
);
// Try to reconnect (blockUntilReady will resolve if Amber is running and session is valid)
await amberSigner.blockUntilReady();
const user = await amberSigner.user();
await loginWithAmber(amberSigner, user);
console.log("Amber session restored.");
} catch {
// If reconnection fails, automatically fallback to npub-only mode
console.warn(
"Amber session could not be restored. Falling back to npub-only mode.",
);
try {
// Set the flag first, before login
localStorage.setItem("alexandria/amber/fallback", "1");
console.log("Set fallback flag in localStorage");
// Small delay to ensure flag is set
await new Promise((resolve) => setTimeout(resolve, 100));
await loginWithNpub(pubkey);
console.log("Successfully fell back to npub-only mode.");
} catch (fallbackErr) {
console.error(
"Failed to fallback to npub-only mode:",
fallbackErr,
);
}
}
},
);
} else {
// No session data, automatically fallback to npub-only mode
console.log(
"No Amber session data found. Falling back to npub-only mode.",
);
// Set the flag first, before login
localStorage.setItem("alexandria/amber/fallback", "1");
console.log("Set fallback flag in localStorage");
// Small delay to ensure flag is set
setTimeout(async () => {
try {
await loginWithNpub(pubkey);
console.log("Successfully fell back to npub-only mode.");
} catch (fallbackErr) {
console.error(
"Failed to fallback to npub-only mode:",
fallbackErr,
);
}
}, 100);
}
} else if (loginMethod === "npub") {
console.log("Restoring npub login...");
loginWithNpub(pubkey);
}
} else if (logoutFlag) {
console.log("Skipping auto-login due to logout flag");
localStorage.removeItem("alexandria/logout/flag");
}
} catch (e) {
console.warn(
`Failed to restore login: ${e}\n\nContinuing with anonymous session.`,
);
}
}
export const load: LayoutLoad = () => {
// Initialize NDK with new relay management system
const ndk = initNdk();
ndkInstance.set(ndk);
// Only restore auth session on client-side
if (browser) {
restoreAuthSession();
}
const parser = new Pharos(ndk);
pharosInstance.set(parser);
return {
ndk,
parser,
};
};

69
src/routes/events/+page.svelte

@ -1,6 +1,5 @@ @@ -1,6 +1,5 @@
<script lang="ts">
import { Heading, P } from "flowbite-svelte";
import { onMount } from "svelte";
import { page } from "$app/stores";
import { goto } from "$app/navigation";
import type { NDKEvent } from "$lib/utils/nostrUtils";
@ -8,19 +7,18 @@ @@ -8,19 +7,18 @@
import EventDetails from "$lib/components/EventDetails.svelte";
import RelayActions from "$lib/components/RelayActions.svelte";
import CommentBox from "$lib/components/CommentBox.svelte";
import CommentViewer from "$lib/components/CommentViewer.svelte";
import { userStore } from "$lib/stores/userStore";
import CommentViewer from "$lib/components/CommentViewer.svelte";
import { userBadge } from "$lib/snippets/UserSnippets.svelte";
import { getMatchingTags, toNpub, getUserMetadata } from "$lib/utils/nostrUtils";
import EventInput from "$lib/components/EventInput.svelte";
import { userPubkey, isLoggedIn } from "$lib/stores/authStore.Svelte";
import CopyToClipboard from "$lib/components/util/CopyToClipboard.svelte";
import { neventEncode, naddrEncode } from "$lib/utils";
import { activeInboxRelays, activeOutboxRelays, logCurrentRelayConfiguration } from "$lib/ndk";
import { activeInboxRelays } from "$lib/ndk";
import { getEventType } from "$lib/utils/mime";
import ViewPublicationLink from "$lib/components/util/ViewPublicationLink.svelte";
import { checkCommunity } from "$lib/utils/search_utility";
import { parseRepostContent, parseContent } from "$lib/utils/notification_utils";
import EmbeddedEvent from "$lib/components/EmbeddedEvent.svelte";
let loading = $state(false);
let error = $state<string | null>(null);
@ -44,7 +42,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -44,7 +42,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
lud16?: string;
nip05?: string;
} | null>(null);
let user = $state($userStore);
let userRelayPreference = $state(false);
let showSidePanel = $state(false);
let searchInProgress = $state(false);
@ -52,23 +49,11 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -52,23 +49,11 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
let communityStatus = $state<Record<string, boolean>>({});
let searchResultsCollapsed = $state(false);
userStore.subscribe((val) => (user = val));
function handleEventFound(newEvent: NDKEvent) {
event = newEvent;
showSidePanel = true;
// AI-NOTE: 2025-01-24 - Preserve search results to allow navigation through them
// Don't clear search results when showing a single event - this allows users to browse through results
// searchResults = [];
// secondOrderResults = [];
// tTagResults = [];
// originalEventIds = new Set();
// originalAddresses = new Set();
// searchType = null;
// searchTerm = null;
// searchInProgress = false;
// secondOrderSearchMessage = null;
if (newEvent.kind === 0) {
try {
profile = JSON.parse(newEvent.content);
@ -209,10 +194,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -209,10 +194,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
// AI-NOTE: 2025-01-24 - Cache profiles for all search results
cacheProfilesForEvents([...results, ...secondOrder, ...tTagEvents]);
// Don't clear the current event - let the user continue viewing it
// event = null;
// profile = null;
}
// AI-NOTE: 2025-01-24 - Function to cache profiles for multiple events
@ -330,10 +311,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -330,10 +311,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
return neventEncode(event, $activeInboxRelays);
}
function getNaddrUrl(event: NDKEvent): string {
return naddrEncode(event, $activeInboxRelays);
}
function isAddressableEvent(event: NDKEvent): boolean {
return getEventType(event.kind || 0) === "addressable";
}
@ -397,26 +374,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -397,26 +374,6 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
communityStatus = { ...communityStatus, ...newCommunityStatus };
}
// AI-NOTE: Refactored to avoid blocking $effect with logging operations
// Reactive effect to log relay configuration when stores change - non-blocking approach
$effect.pre(() => {
const inboxRelays = $activeInboxRelays;
const outboxRelays = $activeOutboxRelays;
// Only log if we have relays (not empty arrays)
if (inboxRelays.length > 0 || outboxRelays.length > 0) {
// Defer logging to avoid blocking the reactive system
requestAnimationFrame(() => {
console.log('🔌 Events Page - Relay Configuration Updated:');
console.log('📥 Inbox Relays:', inboxRelays);
console.log('📤 Outbox Relays:', outboxRelays);
console.log(`📊 Total: ${inboxRelays.length} inbox, ${outboxRelays.length} outbox`);
});
}
});
</script>
<div class="w-full flex justify-center">
@ -617,11 +574,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -617,11 +574,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
>
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent}
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
<EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
</div>
{/if}
{/if}
@ -784,11 +737,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -784,11 +737,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
>
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent}
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
<EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
</div>
{/if}
{/if}
@ -938,11 +887,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -938,11 +887,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
<div
class="text-sm text-gray-800 dark:text-gray-200 mt-1 line-clamp-2 break-words"
>
{#await ((result.kind === 6 || result.kind === 16) ? parseRepostContent(result.content) : parseContent(result.content)) then parsedContent}
{@html parsedContent.slice(0, 200)}{parsedContent.length > 200 ? "..." : ""}
{:catch}
{result.content.slice(0, 200)}{result.content.length > 200 ? "..." : ""}
{/await}
<EmbeddedEvent nostrIdentifier={result.id} nestingLevel={0} />
</div>
{/if}
{/if}
@ -997,7 +942,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte"; @@ -997,7 +942,7 @@ import CommentViewer from "$lib/components/CommentViewer.svelte";
{/if}
<div class="min-w-0 overflow-hidden">
<EventDetails {event} {profile} {searchValue} />
<EventDetails {event} {profile} />
</div>
<div class="min-w-0 overflow-hidden">
<RelayActions {event} />

5
src/routes/proxy+layout.ts

@ -1,5 +0,0 @@ @@ -1,5 +0,0 @@
import type { LayoutLoad } from "./$types";
export const load: LayoutLoad = async () => {
return {};
};

4
src/routes/publication/+page.server.ts

@ -5,7 +5,7 @@ import type { PageServerLoad } from "./$types"; @@ -5,7 +5,7 @@ import type { PageServerLoad } from "./$types";
const ROUTES = {
PUBLICATION_BASE: "/publication",
NADDR: "/publication/naddr",
NEVENT: "/publication/nevent",
NEVENT: "/publication/nevent",
ID: "/publication/id",
D_TAG: "/publication/d",
START: "/start",
@ -38,4 +38,4 @@ export const load: PageServerLoad = ({ url }) => { @@ -38,4 +38,4 @@ export const load: PageServerLoad = ({ url }) => {
// If no query parameters, redirect to the start page
redirect(301, ROUTES.START);
};
};

15
src/routes/publication/[type]/[identifier]/+layout.server.ts

@ -3,7 +3,10 @@ import type { LayoutServerLoad } from "./$types"; @@ -3,7 +3,10 @@ import type { LayoutServerLoad } from "./$types";
import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts";
// AI-NOTE: Server-side event fetching for SEO metadata
async function fetchEventServerSide(type: string, identifier: string): Promise<NostrEvent | null> {
async function fetchEventServerSide(
type: string,
identifier: string,
): Promise<NostrEvent | null> {
// For now, return null to indicate server-side fetch not implemented
// This will fall back to client-side fetching
return null;
@ -16,10 +19,12 @@ export const load: LayoutServerLoad = async ({ params, url }) => { @@ -16,10 +19,12 @@ export const load: LayoutServerLoad = async ({ params, url }) => {
const indexEvent = await fetchEventServerSide(type, identifier);
// Extract metadata for meta tags (use fallbacks if no event found)
const title = indexEvent?.tags.find((tag) => tag[0] === "title")?.[1] || "Alexandria Publication";
const summary = indexEvent?.tags.find((tag) => tag[0] === "summary")?.[1] ||
const title = indexEvent?.tags.find((tag) => tag[0] === "title")?.[1] ||
"Alexandria Publication";
const summary = indexEvent?.tags.find((tag) => tag[0] === "summary")?.[1] ||
"Alexandria is a digital library, utilizing Nostr events for curated publications and wiki pages.";
const image = indexEvent?.tags.find((tag) => tag[0] === "image")?.[1] || "/screenshots/old_books.jpg";
const image = indexEvent?.tags.find((tag) => tag[0] === "image")?.[1] ||
"/screenshots/old_books.jpg";
const currentUrl = `${url.origin}${url.pathname}`;
return {
@ -31,4 +36,4 @@ export const load: LayoutServerLoad = async ({ params, url }) => { @@ -31,4 +36,4 @@ export const load: LayoutServerLoad = async ({ params, url }) => {
currentUrl,
},
};
};
};

52
src/routes/publication/[type]/[identifier]/+page.ts

@ -1,30 +1,40 @@ @@ -1,30 +1,40 @@
import { error } from "@sveltejs/kit";
import type { PageLoad } from "./$types";
import { fetchEventByDTag, fetchEventById, fetchEventByNaddr, fetchEventByNevent } from "../../../../lib/utils/websocket_utils.ts";
import {
fetchEventByDTag,
fetchEventById,
fetchEventByNaddr,
fetchEventByNevent,
} from "../../../../lib/utils/websocket_utils.ts";
import type { NostrEvent } from "../../../../lib/utils/websocket_utils.ts";
export const load: PageLoad = async ({ params, parent }: { params: { type: string; identifier: string }; parent: any }) => {
export const load: PageLoad = async (
{ params, parent }: {
params: { type: string; identifier: string };
parent: any;
},
) => {
const { type, identifier } = params;
// Get layout data (no server-side data since SSR is disabled)
const layoutData = await parent();
// AI-NOTE: Always fetch client-side since server-side fetch returns null for now
let indexEvent: NostrEvent | null = null;
try {
// Handle different identifier types
switch (type) {
case 'id':
case "id":
indexEvent = await fetchEventById(identifier);
break;
case 'd':
case "d":
indexEvent = await fetchEventByDTag(identifier);
break;
case 'naddr':
case "naddr":
indexEvent = await fetchEventByNaddr(identifier);
break;
case 'nevent':
case "nevent":
indexEvent = await fetchEventByNevent(identifier);
break;
default:
@ -33,32 +43,36 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin @@ -33,32 +43,36 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin
} catch (err) {
throw err;
}
if (!indexEvent) {
// AI-NOTE: Handle case where no relays are available during preloading
// This prevents 404 errors when relay stores haven't been populated yet
// Create appropriate search link based on type
let searchParam = '';
let searchParam = "";
switch (type) {
case 'id':
case "id":
searchParam = `id=${identifier}`;
break;
case 'd':
case "d":
searchParam = `d=${identifier}`;
break;
case 'naddr':
case 'nevent':
case "naddr":
case "nevent":
searchParam = `id=${identifier}`;
break;
default:
searchParam = `q=${identifier}`;
}
error(404, `Event not found for ${type}: ${identifier}. href="/events?${searchParam}"`);
error(
404,
`Event not found for ${type}: ${identifier}. href="/events?${searchParam}"`,
);
}
const publicationType = indexEvent.tags.find((tag) => tag[0] === "type")?.[1] ?? "";
const publicationType =
indexEvent.tags.find((tag) => tag[0] === "type")?.[1] ?? "";
// AI-NOTE: Use proper NDK instance from layout or create one with relays
let ndk = layoutData?.ndk;
@ -75,6 +89,6 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin @@ -75,6 +89,6 @@ export const load: PageLoad = async ({ params, parent }: { params: { type: strin
indexEvent,
ndk, // Use minimal NDK instance
};
return result;
};

10
src/routes/visualize/+page.ts

@ -1,9 +1,9 @@ @@ -1,9 +1,9 @@
import type { PageLoad } from './$types';
import type { PageLoad } from "./$types";
export const load: PageLoad = async ({ url }) => {
const eventId = url.searchParams.get('event');
const eventId = url.searchParams.get("event");
return {
eventId
eventId,
};
};
};

8
src/styles/notifications.css

@ -151,7 +151,13 @@ @@ -151,7 +151,13 @@
/* Transition utilities */
.transition-colors {
transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out, text-decoration-color 0.15s ease-in-out, fill 0.15s ease-in-out, stroke 0.15s ease-in-out;
transition:
color 0.15s ease-in-out,
background-color 0.15s ease-in-out,
border-color 0.15s ease-in-out,
text-decoration-color 0.15s ease-in-out,
fill 0.15s ease-in-out,
stroke 0.15s ease-in-out;
}
.transition-all {

20
src/styles/publications.css

@ -100,7 +100,8 @@ @@ -100,7 +100,8 @@
/* blockquote; prose and poetry quotes */
.publication-leather .quoteblock,
.publication-leather .verseblock {
@apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50 dark:border-primary-500 dark:bg-primary-700;
@apply p-4 my-4 border-s-4 rounded border-primary-300 bg-primary-50
dark:border-primary-500 dark:bg-primary-700;
}
.publication-leather .verseblock pre.content {
@ -154,7 +155,8 @@ @@ -154,7 +155,8 @@
}
.publication-leather .admonitionblock.tip {
@apply rounded overflow-hidden border border-success-100 dark:border-success-800;
@apply rounded overflow-hidden border border-success-100
dark:border-success-800;
}
.publication-leather .admonitionblock.tip .icon,
@ -172,7 +174,8 @@ @@ -172,7 +174,8 @@
}
.publication-leather .admonitionblock.important {
@apply rounded overflow-hidden border border-primary-200 dark:border-primary-700;
@apply rounded overflow-hidden border border-primary-200
dark:border-primary-700;
}
.publication-leather .admonitionblock.important .icon,
@ -181,7 +184,8 @@ @@ -181,7 +184,8 @@
}
.publication-leather .admonitionblock.caution {
@apply rounded overflow-hidden border border-warning-200 dark:border-warning-700;
@apply rounded overflow-hidden border border-warning-200
dark:border-warning-700;
}
.publication-leather .admonitionblock.caution .icon,
@ -190,7 +194,8 @@ @@ -190,7 +194,8 @@
}
.publication-leather .admonitionblock.warning {
@apply rounded overflow-hidden border border-danger-200 dark:border-danger-800;
@apply rounded overflow-hidden border border-danger-200
dark:border-danger-800;
}
.publication-leather .admonitionblock.warning .icon,
@ -201,7 +206,7 @@ @@ -201,7 +206,7 @@
/* listingblock, literalblock */
.publication-leather .listingblock,
.publication-leather .literalblock {
@apply p-4 rounded bg-highlight dark:bg-primary-700;
@apply p-4 rounded bg-highlight dark:bg-primary-700;
}
.publication-leather .sidebarblock .title,
@ -254,7 +259,8 @@ @@ -254,7 +259,8 @@
@screen lg {
@media (hover: hover) {
.blog .discreet .card-leather:not(:hover) {
@apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition duration-500 ease-in-out;
@apply bg-primary-50 dark:bg-primary-1000 opacity-75 transition
duration-500 ease-in-out;
}
.blog .discreet .group {
@apply bg-transparent;

6
src/styles/scrollbar.css

@ -1,7 +1,8 @@ @@ -1,7 +1,8 @@
@layer components {
/* Global scrollbar styles */
* {
scrollbar-color: rgba(87, 66, 41, 0.8) transparent; /* Transparent track, default scrollbar thumb */
scrollbar-color: rgba(87, 66, 41, 0.8)
transparent; /* Transparent track, default scrollbar thumb */
}
/* Webkit Browsers (Chrome, Safari, Edge) */
@ -14,7 +15,8 @@ @@ -14,7 +15,8 @@
}
*::-webkit-scrollbar-thumb {
@apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600 dark:hover:bg-primary-800;
@apply bg-primary-500 dark:bg-primary-600 hover:bg-primary-600
dark:hover:bg-primary-800;
border-radius: 6px; /* Rounded scrollbar */
}
}

28
src/styles/visualize.css

@ -30,7 +30,8 @@ @@ -30,7 +30,8 @@
}
.legend-letter {
@apply absolute inset-0 flex items-center justify-center text-black text-xs font-bold;
@apply absolute inset-0 flex items-center justify-center text-black text-xs
font-bold;
}
.legend-text {
@ -39,7 +40,8 @@ @@ -39,7 +40,8 @@
/* Network visualization styles - specific to visualization */
.network-container {
@apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px] max-h-[900px];
@apply flex flex-col w-full h-[calc(100vh-138px)] min-h-[400px]
max-h-[900px];
}
.network-svg-container {
@ -48,11 +50,15 @@ @@ -48,11 +50,15 @@
.network-svg {
@apply w-full sm:h-[100%] border;
@apply border border-primary-200 has-[:hover]:border-primary-700 dark:bg-primary-1000 dark:border-primary-800 dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500 rounded;
@apply border border-primary-200 has-[:hover]:border-primary-700
dark:bg-primary-1000 dark:border-primary-800
dark:has-[:hover]:bg-primary-950 dark:has-[:hover]:border-primary-500
rounded;
}
.network-error {
@apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200 rounded-lg mb-4;
@apply w-full p-4 bg-red-100 dark:bg-red-900 text-red-800 dark:text-red-200
rounded-lg mb-4;
}
.network-error-title {
@ -78,8 +84,9 @@ @@ -78,8 +84,9 @@
/* Tooltip styles - specific to visualization tooltips */
.tooltip-close-btn {
@apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700 dark:hover:bg-gray-600
rounded-full p-1 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200;
@apply absolute top-2 right-2 bg-gray-200 hover:bg-gray-300 dark:bg-gray-700
dark:hover:bg-gray-600 rounded-full p-1 text-gray-500 hover:text-gray-700
dark:text-gray-400 dark:hover:text-gray-200;
}
.tooltip-content {
@ -91,7 +98,8 @@ @@ -91,7 +98,8 @@
}
.tooltip-title-link {
@apply text-gray-800 hover:text-blue-600 dark:text-gray-200 dark:hover:text-blue-400;
@apply text-gray-800 hover:text-blue-600 dark:text-gray-200
dark:hover:text-blue-400;
}
.tooltip-metadata {
@ -99,11 +107,13 @@ @@ -99,11 +107,13 @@
}
.tooltip-summary {
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40;
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto
max-h-40;
}
.tooltip-content-preview {
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto max-h-40;
@apply mt-2 text-xs bg-gray-100 dark:bg-gray-900 p-2 rounded overflow-auto
max-h-40;
}
.tooltip-help-text {

85
test_data/LaTeXtestfile.md

@ -1,12 +1,24 @@ @@ -1,12 +1,24 @@
# This is a testfile for writing mathematic formulas in NostrMarkup
This document covers the rendering of formulas in TeX/LaTeX and AsciiMath notation, or some combination of those within the same page. It is meant to be rendered by clients utilizing MathJax.
If you want the entire document to be rendered as mathematics, place the entire thing in a backtick-codeblock, but know that this makes the document slower to load, it is harder to format the prose, and the result is less legible. It also doesn't increase portability, as it's easy to export markup as LaTeX files, or as PDFs, with the formulas rendered.
The general idea, is that anything placed within `single backticks` is inline code, and inline-code will all be scanned for typical mathematics statements and rendered with best-effort. (For more precise rendering, use Asciidoc.) We will not render text that is not marked as inline code, as mathematical formulas, as that is prose.
If you want the TeX to be blended into the surrounding text, wrap the text within single `$`. Otherwise, use double `$$` symbols, for display math, and it will appear on its own line.
This document covers the rendering of formulas in TeX/LaTeX and AsciiMath
notation, or some combination of those within the same page. It is meant to be
rendered by clients utilizing MathJax.
If you want the entire document to be rendered as mathematics, place the entire
thing in a backtick-codeblock, but know that this makes the document slower to
load, it is harder to format the prose, and the result is less legible. It also
doesn't increase portability, as it's easy to export markup as LaTeX files, or
as PDFs, with the formulas rendered.
The general idea, is that anything placed within `single backticks` is inline
code, and inline-code will all be scanned for typical mathematics statements and
rendered with best-effort. (For more precise rendering, use Asciidoc.) We will
not render text that is not marked as inline code, as mathematical formulas, as
that is prose.
If you want the TeX to be blended into the surrounding text, wrap the text
within single `$`. Otherwise, use double `$$` symbols, for display math, and it
will appear on its own line.
## TeX Examples
@ -16,36 +28,25 @@ Same equation, in the display mode: `$$\sqrt{x}$$` @@ -16,36 +28,25 @@ Same equation, in the display mode: `$$\sqrt{x}$$`
Something more complex, inline: `$\mathbb{N} = \{ a \in \mathbb{Z} : a > 0 \}$`
Something complex, in display mode: `$$P \left( A=2 \, \middle| \, \dfrac{A^2}{B}>4 \right)$$`
Something complex, in display mode:
`$$P \left( A=2 \, \middle| \, \dfrac{A^2}{B}>4 \right)$$`
Another example of `$$\prod_{i=1}^{n} x_i - 1$$` inline formulas.
Function example:
`$$
f(x)=
\begin{cases}
1/d_{ij} & \quad \text{when $d_{ij} \leq 160$}\\
0 & \quad \text{otherwise}
\end{cases}
Function example: `$$ f(x)= \begin{cases} 1/d_{ij} & \quad \text{when
$d_{ij} \leq 160$}\\ 0 & \quad \text{otherwise} \end{cases}
$$
`
$$ `
And a matrix:
`
$$
And a matrix: ` $$
M =
\begin{bmatrix}
\frac{5}{6} & \frac{1}{6} & 0 \\[0.3em]
\frac{5}{6} & 0 & \frac{1}{6} \\[0.3em]
0 & \frac{5}{6} & \frac{1}{6}
\end{bmatrix}
M = \begin{bmatrix} \frac{5}{6} & \frac{1}{6} & 0 \\[0.3em] \frac{5}{6} & 0 &
\frac{1}{6} \\[0.3em] 0 & \frac{5}{6} & \frac{1}{6} \end{bmatrix}
$$
`
$$ `
LaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this sort of thing.
LaTeX ypesetting won't be rendered. Use NostrMarkup delimeter tables for this
sort of thing.
`\\begin{tabular}{|c|c|c|l|r|}
\\hline
@ -69,13 +70,17 @@ We also recognize common LaTeX statements: @@ -69,13 +70,17 @@ We also recognize common LaTeX statements:
Greek letters are a snap: `$\Psi$`, `$\psi$`, `$\Phi$`, `$\phi$`.
Equations within text are easy--- A well known Maxwell thermodynamic relation is `$\left.{\partial T \over \partial P}\right|_{s} = \left.{\partial v \over \partial s}\right|_{P}$`.
Equations within text are easy--- A well known Maxwell thermodynamic relation is
`$\left.{\partial T \over \partial P}\right|_{s} = \left.{\partial v \over \partial s}\right|_{P}$`.
You can also set aside equations like so: `\begin{eqnarray} du &=& T\ ds -P\ dv, \qquad \mbox{first law.}\label{fl}\\ ds &\ge& {\delta q \over T}.\qquad \qquad \mbox{second law.} \label{sl} \end {eqnarray}`
You can also set aside equations like so:
`\begin{eqnarray} du &=& T\ ds -P\ dv, \qquad \mbox{first law.}\label{fl}\\ ds &\ge& {\delta q \over T}.\qquad \qquad \mbox{second law.} \label{sl} \end {eqnarray}`
## And some good ole Asciimath
Asciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy stuff easier to find. If you want it inline, include it inline. If you want it on a separate line, put a hard-return before and after.
Asciimath doesn't use `$` or `$$` delimiters, but we are using it to make mathy
stuff easier to find. If you want it inline, include it inline. If you want it
on a separate line, put a hard-return before and after.
Inline text example here `$E=mc^2$` and another `$1/(x+1)$`; very simple.
@ -109,19 +114,23 @@ Using the quadratic formula, the roots of `$x^2-6x+4=0$` are @@ -109,19 +114,23 @@ Using the quadratic formula, the roots of `$x^2-6x+4=0$` are
Advanced alignment and matrices looks like this:
A `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or vector, `$$((1),(0))$$`.
A `$3xx3$` matrix, `$$((1,2,3),(4,5,6),(7,8,9))$$` and a `$2xx1$` matrix, or
vector, `$$((1),(0))$$`.
The outer brackets determine the delimiters e.g. `$|(a,b),(c,d)|=ad-bc$`.
A general `$m xx n$` matrix `$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$`
A general `$m xx n$` matrix
`$$((a_(11), cdots , a_(1n)),(vdots, ddots, vdots),(a_(m1), cdots , a_(mn)))$$`
## Mixed Examples
Here are some examples mixing LaTeX and AsciiMath:
- LaTeX inline: `$\frac{1}{2}$` vs AsciiMath inline: `$1/2$`
- LaTeX display: `$$\sum_{i=1}^n x_i$$` vs AsciiMath display: `$$sum_(i=1)^n x_i$$`
- LaTeX matrix: `$$\begin{pmatrix} a & b \\ c & d \end{pmatrix}$$` vs AsciiMath matrix: `$$((a,b),(c,d))$$`
- LaTeX display: `$$\sum_{i=1}^n x_i$$` vs AsciiMath display:
`$$sum_(i=1)^n x_i$$`
- LaTeX matrix: `$$\begin{pmatrix} a & b \\ c & d \end{pmatrix}$$` vs AsciiMath
matrix: `$$((a,b),(c,d))$$`
## Edge Cases
@ -134,9 +143,9 @@ Here are some examples mixing LaTeX and AsciiMath: @@ -134,9 +143,9 @@ Here are some examples mixing LaTeX and AsciiMath:
- CSS with dollar signs: `color: $primary-color`
This document should demonstrate that:
1. LaTeX is processed within inline code blocks with proper delimiters
2. AsciiMath is processed within inline code blocks with proper delimiters
3. Regular code blocks remain unchanged
4. Mixed content is handled correctly
5. Edge cases are handled gracefully
$$
5. Edge cases are handled gracefully $$

26
tests/e2e/my_notes_layout.pw.spec.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { test, expect, type Page } from '@playwright/test';
import { expect, type Page, test } from "@playwright/test";
// Utility to check for horizontal scroll bar
async function hasHorizontalScroll(page: Page, selector: string) {
@ -9,16 +9,16 @@ async function hasHorizontalScroll(page: Page, selector: string) { @@ -9,16 +9,16 @@ async function hasHorizontalScroll(page: Page, selector: string) {
}, selector);
}
test.describe('My Notes Layout', () => {
test.describe("My Notes Layout", () => {
test.beforeEach(async ({ page }) => {
await page.goto('/my-notes');
await page.goto("/my-notes");
await page.waitForSelector('h1:text("My Notes")');
});
test('no horizontal scroll bar for all tag type and tag filter combinations', async ({ page }) => {
test("no horizontal scroll bar for all tag type and tag filter combinations", async ({ page }) => {
// Helper to check scroll for current state
async function assertNoScroll() {
const hasScroll = await hasHorizontalScroll(page, 'main, body, html');
const hasScroll = await hasHorizontalScroll(page, "main, body, html");
expect(hasScroll).toBeFalsy();
}
@ -26,9 +26,11 @@ test.describe('My Notes Layout', () => { @@ -26,9 +26,11 @@ test.describe('My Notes Layout', () => {
await assertNoScroll();
// Get all tag type buttons
const tagTypeButtons = await page.locator('aside button').all();
const tagTypeButtons = await page.locator("aside button").all();
// Only consider tag type buttons (first N)
const tagTypeCount = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-6 > button').count();
const tagTypeCount = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-6 > button",
).count();
// For each single tag type
for (let i = 0; i < tagTypeCount; i++) {
// Click tag type button
@ -36,7 +38,9 @@ test.describe('My Notes Layout', () => { @@ -36,7 +38,9 @@ test.describe('My Notes Layout', () => {
await page.waitForTimeout(100); // Wait for UI update
await assertNoScroll();
// Get tag filter buttons (after tag type buttons)
const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all();
const tagFilterButtons = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-4 > button",
).all();
// Try all single tag filter selections
for (let j = 0; j < tagFilterButtons.length; j++) {
await tagFilterButtons[j].click();
@ -72,7 +76,9 @@ test.describe('My Notes Layout', () => { @@ -72,7 +76,9 @@ test.describe('My Notes Layout', () => {
await page.waitForTimeout(100);
await assertNoScroll();
// Get tag filter buttons for this combination
const tagFilterButtons = await page.locator('aside > div.flex.flex-wrap.gap-2.mb-4 > button').all();
const tagFilterButtons = await page.locator(
"aside > div.flex.flex-wrap.gap-2.mb-4 > button",
).all();
// Try all single tag filter selections
for (let k = 0; k < tagFilterButtons.length; k++) {
await tagFilterButtons[k].click();
@ -100,4 +106,4 @@ test.describe('My Notes Layout', () => { @@ -100,4 +106,4 @@ test.describe('My Notes Layout', () => {
}
}
});
});
});

275
tests/unit/ZettelEditor.test.ts

@ -1,37 +1,45 @@ @@ -1,37 +1,45 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { AsciiDocMetadata } from "../../src/lib/utils/asciidoc_metadata";
// Mock all Svelte components and dependencies
vi.mock("flowbite-svelte", () => ({
Textarea: vi.fn().mockImplementation((props) => {
return {
$$render: () => `<textarea data-testid="textarea" class="${props.class || ''}" rows="${props.rows || 12}" ${props.disabled ? 'disabled' : ''} placeholder="${props.placeholder || ''}"></textarea>`,
$$bind: { value: props.bind, oninput: props.oninput }
$$render: () =>
`<textarea data-testid="textarea" class="${props.class || ""}" rows="${
props.rows || 12
}" ${props.disabled ? "disabled" : ""} placeholder="${
props.placeholder || ""
}"></textarea>`,
$$bind: { value: props.bind, oninput: props.oninput },
};
}),
Button: vi.fn().mockImplementation((props) => {
return {
$$render: () => `<button data-testid="preview-button" class="${props.class || ''}" ${props.disabled ? 'disabled' : ''} onclick="${props.onclick || ''}">${props.children || ''}</button>`,
$$bind: { onclick: props.onclick }
$$render: () =>
`<button data-testid="preview-button" class="${props.class || ""}" ${
props.disabled ? "disabled" : ""
} onclick="${props.onclick || ""}">${props.children || ""}</button>`,
$$bind: { onclick: props.onclick },
};
})
}),
}));
vi.mock("flowbite-svelte-icons", () => ({
EyeOutline: vi.fn().mockImplementation(() => ({
$$render: () => `<svg data-testid="eye-icon"></svg>`
}))
$$render: () => `<svg data-testid="eye-icon"></svg>`,
})),
}));
vi.mock("asciidoctor", () => ({
default: vi.fn(() => ({
convert: vi.fn((content, options) => {
// Mock AsciiDoctor conversion - return simple HTML
return content.replace(/^==\s+(.+)$/gm, '<h2>$1</h2>')
.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>')
.replace(/\*(.+?)\*/g, '<em>$1</em>');
})
}))
return content.replace(/^==\s+(.+)$/gm, "<h2>$1</h2>")
.replace(/\*\*(.+?)\*\*/g, "<strong>$1</strong>")
.replace(/\*(.+?)\*/g, "<em>$1</em>");
}),
})),
}));
// Mock sessionStorage
@ -41,21 +49,21 @@ const mockSessionStorage = { @@ -41,21 +49,21 @@ const mockSessionStorage = {
removeItem: vi.fn(),
clear: vi.fn(),
};
Object.defineProperty(global, 'sessionStorage', {
Object.defineProperty(global, "sessionStorage", {
value: mockSessionStorage,
writable: true
writable: true,
});
// Mock window object for DOM manipulation
Object.defineProperty(global, 'window', {
Object.defineProperty(global, "window", {
value: {
sessionStorage: mockSessionStorage,
document: {
querySelector: vi.fn(),
createElement: vi.fn(),
}
},
},
writable: true
writable: true,
});
// Mock DOM methods
@ -64,14 +72,14 @@ const mockCreateElement = vi.fn(); @@ -64,14 +72,14 @@ const mockCreateElement = vi.fn();
const mockAddEventListener = vi.fn();
const mockRemoveEventListener = vi.fn();
Object.defineProperty(global, 'document', {
Object.defineProperty(global, "document", {
value: {
querySelector: mockQuerySelector,
createElement: mockCreateElement,
addEventListener: mockAddEventListener,
removeEventListener: mockRemoveEventListener,
},
writable: true
writable: true,
});
describe("ZettelEditor Component Logic", () => {
@ -90,8 +98,9 @@ describe("ZettelEditor Component Logic", () => { @@ -90,8 +98,9 @@ describe("ZettelEditor Component Logic", () => {
describe("Publication Format Detection Logic", () => {
it("should detect document header format", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent";
const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the regex pattern used in the component
const hasDocumentHeader = contentWithDocumentHeader.match(/^=\s+/m);
expect(hasDocumentHeader).toBeTruthy();
@ -99,12 +108,12 @@ describe("ZettelEditor Component Logic", () => { @@ -99,12 +108,12 @@ describe("ZettelEditor Component Logic", () => {
it("should detect index card format", () => {
const contentWithIndexCard = "index card\n\n== Section 1\nContent";
// Test the logic used in the component
const lines = contentWithIndexCard.split(/\r?\n/);
let hasIndexCard = false;
for (const line of lines) {
if (line.trim().toLowerCase() === 'index card') {
if (line.trim().toLowerCase() === "index card") {
hasIndexCard = true;
break;
}
@ -113,8 +122,9 @@ describe("ZettelEditor Component Logic", () => { @@ -113,8 +122,9 @@ describe("ZettelEditor Component Logic", () => {
});
it("should not detect publication format for normal section content", () => {
const normalContent = "== Section 1\nContent\n\n== Section 2\nMore content";
const normalContent =
"== Section 1\nContent\n\n== Section 2\nMore content";
// Test the logic used in the component
const lines = normalContent.split(/\r?\n/);
let hasPublicationHeader = false;
@ -123,7 +133,7 @@ describe("ZettelEditor Component Logic", () => { @@ -123,7 +133,7 @@ describe("ZettelEditor Component Logic", () => {
hasPublicationHeader = true;
break;
}
if (line.trim().toLowerCase() === 'index card') {
if (line.trim().toLowerCase() === "index card") {
hasPublicationHeader = true;
break;
}
@ -135,26 +145,30 @@ describe("ZettelEditor Component Logic", () => { @@ -135,26 +145,30 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Parsing Logic", () => {
it("should parse sections with document header", () => {
const content = "== Section 1\n:author: Test Author\n\nContent 1";
// Test the parsing logic
const hasDocumentHeader = content.match(/^=\s+/m);
expect(hasDocumentHeader).toBeFalsy(); // This content doesn't have a document header
// Test section splitting logic
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim());
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sectionStrings).toHaveLength(1);
expect(sectionStrings[0]).toContain("== Section 1");
});
it("should parse sections without document header", () => {
const content = "== Section 1\nContent 1";
// Test the parsing logic
const hasDocumentHeader = content.match(/^=\s+/m);
expect(hasDocumentHeader).toBeFalsy();
// Test section splitting logic
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((section: string) => section.trim());
const sectionStrings = content.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sectionStrings).toHaveLength(1);
expect(sectionStrings[0]).toContain("== Section 1");
});
@ -168,49 +182,70 @@ describe("ZettelEditor Component Logic", () => { @@ -168,49 +182,70 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Conversion Logic", () => {
it("should convert document title to section title", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent";
const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the conversion logic
let convertedContent = contentWithDocumentHeader.replace(/^=\s+(.+)$/gm, '== $1');
convertedContent = convertedContent.replace(/^index card$/gim, '');
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n');
let convertedContent = contentWithDocumentHeader.replace(
/^=\s+(.+)$/gm,
"== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent");
});
it("should remove index card line", () => {
const contentWithIndexCard = "index card\n\n== Section 1\nContent";
// Test the conversion logic
let convertedContent = contentWithIndexCard.replace(/^=\s+(.+)$/gm, '== $1');
convertedContent = convertedContent.replace(/^index card$/gim, '');
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n');
let convertedContent = contentWithIndexCard.replace(
/^=\s+(.+)$/gm,
"== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("\n\n== Section 1\nContent");
});
it("should clean up double newlines", () => {
const contentWithExtraNewlines = "= Document Title\n\n\n== Section 1\nContent";
const contentWithExtraNewlines =
"= Document Title\n\n\n== Section 1\nContent";
// Test the conversion logic
let convertedContent = contentWithExtraNewlines.replace(/^=\s+(.+)$/gm, '== $1');
convertedContent = convertedContent.replace(/^index card$/gim, '');
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, '\n\n');
let convertedContent = contentWithExtraNewlines.replace(
/^=\s+(.+)$/gm,
"== $1",
);
convertedContent = convertedContent.replace(/^index card$/gim, "");
const finalContent = convertedContent.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(finalContent).toBe("== Document Title\n\n== Section 1\nContent");
});
});
describe("SessionStorage Integration", () => {
it("should store content in sessionStorage when switching to publication editor", () => {
const contentWithDocumentHeader = "= Document Title\n\n== Section 1\nContent";
const contentWithDocumentHeader =
"= Document Title\n\n== Section 1\nContent";
// Test the sessionStorage logic
mockSessionStorage.setItem('zettelEditorContent', contentWithDocumentHeader);
mockSessionStorage.setItem('zettelEditorSource', 'publication-format');
expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorContent', contentWithDocumentHeader);
expect(mockSessionStorage.setItem).toHaveBeenCalledWith('zettelEditorSource', 'publication-format');
mockSessionStorage.setItem(
"zettelEditorContent",
contentWithDocumentHeader,
);
mockSessionStorage.setItem("zettelEditorSource", "publication-format");
expect(mockSessionStorage.setItem).toHaveBeenCalledWith(
"zettelEditorContent",
contentWithDocumentHeader,
);
expect(mockSessionStorage.setItem).toHaveBeenCalledWith(
"zettelEditorSource",
"publication-format",
);
});
});
@ -219,7 +254,7 @@ describe("ZettelEditor Component Logic", () => { @@ -219,7 +254,7 @@ describe("ZettelEditor Component Logic", () => {
const sections = [{ title: "Section 1", content: "Content 1", tags: [] }];
const eventCount = sections.length;
const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`;
expect(eventCount).toBe(1);
expect(eventText).toBe("1 event");
});
@ -227,11 +262,11 @@ describe("ZettelEditor Component Logic", () => { @@ -227,11 +262,11 @@ describe("ZettelEditor Component Logic", () => {
it("should calculate correct event count for multiple sections", () => {
const sections = [
{ title: "Section 1", content: "Content 1", tags: [] },
{ title: "Section 2", content: "Content 2", tags: [] }
{ title: "Section 2", content: "Content 2", tags: [] },
];
const eventCount = sections.length;
const eventText = `${eventCount} event${eventCount !== 1 ? "s" : ""}`;
expect(eventCount).toBe(2);
expect(eventText).toBe("2 events");
});
@ -240,11 +275,17 @@ describe("ZettelEditor Component Logic", () => { @@ -240,11 +275,17 @@ describe("ZettelEditor Component Logic", () => {
describe("Tag Processing Logic", () => {
it("should process tags correctly", () => {
// Mock the metadataToTags function
const mockMetadataToTags = vi.fn().mockReturnValue([["author", "Test Author"]]);
const mockMetadata = { title: "Section 1", author: "Test Author" } as AsciiDocMetadata;
const mockMetadataToTags = vi.fn().mockReturnValue([[
"author",
"Test Author",
]]);
const mockMetadata = {
title: "Section 1",
author: "Test Author",
} as AsciiDocMetadata;
const tags = mockMetadataToTags(mockMetadata);
expect(tags).toEqual([["author", "Test Author"]]);
expect(mockMetadataToTags).toHaveBeenCalledWith(mockMetadata);
});
@ -252,10 +293,10 @@ describe("ZettelEditor Component Logic", () => { @@ -252,10 +293,10 @@ describe("ZettelEditor Component Logic", () => {
it("should handle empty tags", () => {
// Mock the metadataToTags function
const mockMetadataToTags = vi.fn().mockReturnValue([]);
const mockMetadata = { title: "Section 1" } as AsciiDocMetadata;
const tags = mockMetadataToTags(mockMetadata);
expect(tags).toEqual([]);
});
});
@ -264,11 +305,11 @@ describe("ZettelEditor Component Logic", () => { @@ -264,11 +305,11 @@ describe("ZettelEditor Component Logic", () => {
it("should process AsciiDoc content correctly", () => {
// Mock the asciidoctor conversion
const mockConvert = vi.fn((content, options) => {
return content.replace(/^==\s+(.+)$/gm, '<h2>$1</h2>')
.replace(/\*\*(.+?)\*\*/g, '<strong>$1</strong>')
.replace(/\*(.+?)\*/g, '<em>$1</em>');
return content.replace(/^==\s+(.+)$/gm, "<h2>$1</h2>")
.replace(/\*\*(.+?)\*\*/g, "<strong>$1</strong>")
.replace(/\*(.+?)\*/g, "<em>$1</em>");
});
const content = "== Test Section\n\nThis is **bold** and *italic* text.";
const processedContent = mockConvert(content, {
standalone: false,
@ -278,10 +319,10 @@ describe("ZettelEditor Component Logic", () => { @@ -278,10 +319,10 @@ describe("ZettelEditor Component Logic", () => {
sectids: true,
},
});
expect(processedContent).toContain('<h2>Test Section</h2>');
expect(processedContent).toContain('<strong>bold</strong>');
expect(processedContent).toContain('<em>italic</em>');
expect(processedContent).toContain("<h2>Test Section</h2>");
expect(processedContent).toContain("<strong>bold</strong>");
expect(processedContent).toContain("<em>italic</em>");
});
});
@ -291,9 +332,9 @@ describe("ZettelEditor Component Logic", () => { @@ -291,9 +332,9 @@ describe("ZettelEditor Component Logic", () => {
const mockParseFunction = vi.fn().mockImplementation(() => {
throw new Error("Parsing error");
});
const content = "== Section 1\nContent 1";
// Should not throw error when called
expect(() => {
try {
@ -321,12 +362,12 @@ describe("ZettelEditor Component Logic", () => { @@ -321,12 +362,12 @@ describe("ZettelEditor Component Logic", () => {
onContentChange: vi.fn(),
onPreviewToggle: vi.fn(),
};
expect(expectedProps).toHaveProperty('content');
expect(expectedProps).toHaveProperty('placeholder');
expect(expectedProps).toHaveProperty('showPreview');
expect(expectedProps).toHaveProperty('onContentChange');
expect(expectedProps).toHaveProperty('onPreviewToggle');
expect(expectedProps).toHaveProperty("content");
expect(expectedProps).toHaveProperty("placeholder");
expect(expectedProps).toHaveProperty("showPreview");
expect(expectedProps).toHaveProperty("onContentChange");
expect(expectedProps).toHaveProperty("onPreviewToggle");
});
});
@ -334,12 +375,12 @@ describe("ZettelEditor Component Logic", () => { @@ -334,12 +375,12 @@ describe("ZettelEditor Component Logic", () => {
it("should integrate with ZettelParser utilities", () => {
// Mock the parseAsciiDocSections function
const mockParseAsciiDocSections = vi.fn().mockReturnValue([
{ title: "Section 1", content: "Content 1", tags: [] }
{ title: "Section 1", content: "Content 1", tags: [] },
]);
const content = "== Section 1\nContent 1";
const sections = mockParseAsciiDocSections(content, 2);
expect(sections).toHaveLength(1);
expect(sections[0].title).toBe("Section 1");
});
@ -348,21 +389,21 @@ describe("ZettelEditor Component Logic", () => { @@ -348,21 +389,21 @@ describe("ZettelEditor Component Logic", () => {
// Mock the utility functions
const mockExtractDocumentMetadata = vi.fn().mockReturnValue({
metadata: { title: "Document Title" } as AsciiDocMetadata,
content: "Document content"
content: "Document content",
});
const mockExtractSectionMetadata = vi.fn().mockReturnValue({
metadata: { title: "Section Title" } as AsciiDocMetadata,
content: "Section content",
title: "Section Title"
title: "Section Title",
});
const documentContent = "= Document Title\nDocument content";
const sectionContent = "== Section Title\nSection content";
const documentResult = mockExtractDocumentMetadata(documentContent);
const sectionResult = mockExtractSectionMetadata(sectionContent);
expect(documentResult.metadata.title).toBe("Document Title");
expect(sectionResult.title).toBe("Section Title");
});
@ -370,27 +411,35 @@ describe("ZettelEditor Component Logic", () => { @@ -370,27 +411,35 @@ describe("ZettelEditor Component Logic", () => {
describe("Content Validation", () => {
it("should validate content structure", () => {
const validContent = "== Section 1\nContent here\n\n== Section 2\nMore content";
const validContent =
"== Section 1\nContent here\n\n== Section 2\nMore content";
const invalidContent = "Just some text without sections";
// Test section detection
const validSections = validContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim());
const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim());
const validSections = validContent.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
const invalidSections = invalidContent.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(validSections.length).toBeGreaterThan(0);
// The invalid content will have one section (the entire content) since it doesn't start with ==
expect(invalidSections.length).toBe(1);
});
it("should handle mixed content types", () => {
const mixedContent = "= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content";
const mixedContent =
"= Document Title\n\n== Section 1\nContent\n\n== Section 2\nMore content";
// Test document header detection
const hasDocumentHeader = mixedContent.match(/^=\s+/m);
expect(hasDocumentHeader).toBeTruthy();
// Test section extraction
const sections = mixedContent.split(/(?=^==\s+)/gm).filter((section: string) => section.trim());
const sections = mixedContent.split(/(?=^==\s+)/gm).filter((
section: string,
) => section.trim());
expect(sections.length).toBeGreaterThan(0);
});
});
@ -398,13 +447,13 @@ describe("ZettelEditor Component Logic", () => { @@ -398,13 +447,13 @@ describe("ZettelEditor Component Logic", () => {
describe("String Manipulation", () => {
it("should handle string replacements correctly", () => {
const originalContent = "= Title\n\n== Section\nContent";
// Test various string manipulations
const convertedContent = originalContent
.replace(/^=\s+(.+)$/gm, '== $1')
.replace(/^index card$/gim, '')
.replace(/\n\s*\n\s*\n/g, '\n\n');
.replace(/^=\s+(.+)$/gm, "== $1")
.replace(/^index card$/gim, "")
.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(convertedContent).toBe("== Title\n\n== Section\nContent");
});
@ -414,16 +463,16 @@ describe("ZettelEditor Component Logic", () => { @@ -414,16 +463,16 @@ describe("ZettelEditor Component Logic", () => {
"index card\n\n== Section\nContent", // Index card
"= Title\nindex card\n== Section\nContent", // Both
];
edgeCases.forEach(content => {
edgeCases.forEach((content) => {
const converted = content
.replace(/^=\s+(.+)$/gm, '== $1')
.replace(/^index card$/gim, '')
.replace(/\n\s*\n\s*\n/g, '\n\n');
.replace(/^=\s+(.+)$/gm, "== $1")
.replace(/^index card$/gim, "")
.replace(/\n\s*\n\s*\n/g, "\n\n");
expect(converted).toBeDefined();
expect(typeof converted).toBe('string');
expect(typeof converted).toBe("string");
});
});
});
});
});

337
tests/unit/eventInput30040.test.ts

@ -1,6 +1,12 @@ @@ -1,6 +1,12 @@
import { describe, it, expect, vi, beforeEach } from "vitest";
import { build30040EventSet, validate30040EventSet } from "../../src/lib/utils/event_input_utils";
import { extractDocumentMetadata, parseAsciiDocWithMetadata } from "../../src/lib/utils/asciidoc_metadata";
import { beforeEach, describe, expect, it, vi } from "vitest";
import {
build30040EventSet,
validate30040EventSet,
} from "../../src/lib/utils/event_input_utils";
import {
extractDocumentMetadata,
parseAsciiDocWithMetadata,
} from "../../src/lib/utils/asciidoc_metadata";
// Mock NDK and other dependencies
vi.mock("@nostr-dev-kit/ndk", () => ({
@ -60,16 +66,29 @@ This is the content of the second section.`; @@ -60,16 +66,29 @@ This is the content of the second section.`;
const tags: [string, string][] = [["type", "article"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-document-with-preamble"]);
expect(indexEvent.tags).toContainEqual(["title", "Test Document with Preamble"]);
expect(indexEvent.tags).toContainEqual([
"d",
"test-document-with-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Document with Preamble",
]);
expect(indexEvent.tags).toContainEqual(["author", "John Doe"]);
expect(indexEvent.tags).toContainEqual(["version", "1.0"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is a test document with preamble"]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a test document with preamble",
]);
expect(indexEvent.tags).toContainEqual(["t", "test"]);
expect(indexEvent.tags).toContainEqual(["t", "preamble"]);
expect(indexEvent.tags).toContainEqual(["t", "asciidoc"]);
@ -80,22 +99,47 @@ This is the content of the second section.`; @@ -80,22 +99,47 @@ This is the content of the second section.`;
// First section
expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the content of the first section.");
expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-with-preamble-first-section"]);
expect(sectionEvents[0].content).toBe(
"This is the content of the first section.",
);
expect(sectionEvents[0].tags).toContainEqual([
"d",
"test-document-with-preamble-first-section",
]);
expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]);
expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This is the first section",
]);
// Second section
expect(sectionEvents[1].kind).toBe(30041);
expect(sectionEvents[1].content).toBe("This is the content of the second section.");
expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-with-preamble-second-section"]);
expect(sectionEvents[1].content).toBe(
"This is the content of the second section.",
);
expect(sectionEvents[1].tags).toContainEqual([
"d",
"test-document-with-preamble-second-section",
]);
expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]);
expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]);
expect(sectionEvents[1].tags).toContainEqual([
"summary",
"This is the second section",
]);
// Test a-tags in index event
expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-first-section"]);
expect(indexEvent.tags).toContainEqual(["a", "30041:test-pubkey:test-document-with-preamble-second-section"]);
expect(indexEvent.tags).toContainEqual([
"a",
"30041:test-pubkey:test-document-with-preamble-first-section",
]);
expect(indexEvent.tags).toContainEqual([
"a",
"30041:test-pubkey:test-document-with-preamble-second-section",
]);
});
});
@ -118,32 +162,64 @@ This is the content of the second section.`; @@ -118,32 +162,64 @@ This is the content of the second section.`;
const tags: [string, string][] = [["type", "article"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-document-without-preamble"]);
expect(indexEvent.tags).toContainEqual(["title", "Test Document without Preamble"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is a test document without preamble"]);
expect(indexEvent.tags).toContainEqual([
"d",
"test-document-without-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Document without Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a test document without preamble",
]);
// Test section events
expect(sectionEvents).toHaveLength(2);
// First section
expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the content of the first section.");
expect(sectionEvents[0].tags).toContainEqual(["d", "test-document-without-preamble-first-section"]);
expect(sectionEvents[0].content).toBe(
"This is the content of the first section.",
);
expect(sectionEvents[0].tags).toContainEqual([
"d",
"test-document-without-preamble-first-section",
]);
expect(sectionEvents[0].tags).toContainEqual(["title", "First Section"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]);
expect(sectionEvents[0].tags).toContainEqual(["summary", "This is the first section"]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This is the first section",
]);
// Second section
expect(sectionEvents[1].kind).toBe(30041);
expect(sectionEvents[1].content).toBe("This is the content of the second section.");
expect(sectionEvents[1].tags).toContainEqual(["d", "test-document-without-preamble-second-section"]);
expect(sectionEvents[1].content).toBe(
"This is the content of the second section.",
);
expect(sectionEvents[1].tags).toContainEqual([
"d",
"test-document-without-preamble-second-section",
]);
expect(sectionEvents[1].tags).toContainEqual(["title", "Second Section"]);
expect(sectionEvents[1].tags).toContainEqual(["summary", "This is the second section"]);
expect(sectionEvents[1].tags).toContainEqual([
"summary",
"This is the second section",
]);
});
});
@ -163,14 +239,27 @@ This is the preamble content. @@ -163,14 +239,27 @@ This is the preamble content.
const tags: [string, string][] = [["type", "skeleton"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-with-preamble"]);
expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document with Preamble"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document with preamble"]);
expect(indexEvent.tags).toContainEqual([
"d",
"skeleton-document-with-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Skeleton Document with Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a skeleton document with preamble",
]);
// Test section events
expect(sectionEvents).toHaveLength(3);
@ -179,8 +268,14 @@ This is the preamble content. @@ -179,8 +268,14 @@ This is the preamble content.
sectionEvents.forEach((section, index) => {
expect(section.kind).toBe(30041);
expect(section.content).toBe("");
expect(section.tags).toContainEqual(["d", `skeleton-document-with-preamble-empty-section-${index + 1}`]);
expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]);
expect(section.tags).toContainEqual([
"d",
`skeleton-document-with-preamble-empty-section-${index + 1}`,
]);
expect(section.tags).toContainEqual([
"title",
`Empty Section ${index + 1}`,
]);
});
});
});
@ -199,14 +294,27 @@ This is the preamble content. @@ -199,14 +294,27 @@ This is the preamble content.
const tags: [string, string][] = [["type", "skeleton"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "skeleton-document-without-preamble"]);
expect(indexEvent.tags).toContainEqual(["title", "Skeleton Document without Preamble"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is a skeleton document without preamble"]);
expect(indexEvent.tags).toContainEqual([
"d",
"skeleton-document-without-preamble",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Skeleton Document without Preamble",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a skeleton document without preamble",
]);
// Test section events
expect(sectionEvents).toHaveLength(3);
@ -215,8 +323,14 @@ This is the preamble content. @@ -215,8 +323,14 @@ This is the preamble content.
sectionEvents.forEach((section, index) => {
expect(section.kind).toBe(30041);
expect(section.content).toBe("");
expect(section.tags).toContainEqual(["d", `skeleton-document-without-preamble-empty-section-${index + 1}`]);
expect(section.tags).toContainEqual(["title", `Empty Section ${index + 1}`]);
expect(section.tags).toContainEqual([
"d",
`skeleton-document-without-preamble-empty-section-${index + 1}`,
]);
expect(section.tags).toContainEqual([
"title",
`Empty Section ${index + 1}`,
]);
});
});
});
@ -228,7 +342,11 @@ index card`; @@ -228,7 +342,11 @@ index card`;
const tags: [string, string][] = [["type", "index-card"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
@ -249,14 +367,27 @@ index card`; @@ -249,14 +367,27 @@ index card`;
const tags: [string, string][] = [["type", "index-card"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.content).toBe("");
expect(indexEvent.tags).toContainEqual(["d", "test-index-card-with-metadata"]);
expect(indexEvent.tags).toContainEqual(["title", "Test Index Card with Metadata"]);
expect(indexEvent.tags).toContainEqual(["summary", "This is an index card with metadata"]);
expect(indexEvent.tags).toContainEqual([
"d",
"test-index-card-with-metadata",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Test Index Card with Metadata",
]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is an index card with metadata",
]);
expect(indexEvent.tags).toContainEqual(["t", "index"]);
expect(indexEvent.tags).toContainEqual(["t", "card"]);
expect(indexEvent.tags).toContainEqual(["t", "metadata"]);
@ -303,23 +434,45 @@ This is the section content.`; @@ -303,23 +434,45 @@ This is the section content.`;
const tags: [string, string][] = [["type", "complex"]];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
// Test index event metadata
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "complex-metadata-document"]);
expect(indexEvent.tags).toContainEqual(["title", "Complex Metadata Document"]);
expect(indexEvent.tags).toContainEqual([
"d",
"complex-metadata-document",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Complex Metadata Document",
]);
expect(indexEvent.tags).toContainEqual(["author", "Jane Smith"]); // Should use header line author
expect(indexEvent.tags).toContainEqual(["author", "Override Author"]); // Additional author from attribute
expect(indexEvent.tags).toContainEqual(["author", "Third Author"]); // Additional author from attribute
expect(indexEvent.tags).toContainEqual(["version", "2.0"]); // Should use revision line version
expect(indexEvent.tags).toContainEqual(["summary", "This is a complex document with all metadata types Alternative description field"]);
expect(indexEvent.tags).toContainEqual([
"summary",
"This is a complex document with all metadata types Alternative description field",
]);
expect(indexEvent.tags).toContainEqual(["published_on", "2024-03-01"]);
expect(indexEvent.tags).toContainEqual(["published_by", "Alexandria Complex"]);
expect(indexEvent.tags).toContainEqual([
"published_by",
"Alexandria Complex",
]);
expect(indexEvent.tags).toContainEqual(["type", "book"]);
expect(indexEvent.tags).toContainEqual(["image", "https://example.com/cover.jpg"]);
expect(indexEvent.tags).toContainEqual([
"image",
"https://example.com/cover.jpg",
]);
expect(indexEvent.tags).toContainEqual(["i", "978-0-123456-78-9"]);
expect(indexEvent.tags).toContainEqual(["source", "https://github.com/alexandria/complex"]);
expect(indexEvent.tags).toContainEqual([
"source",
"https://github.com/alexandria/complex",
]);
expect(indexEvent.tags).toContainEqual(["auto-update", "yes"]);
expect(indexEvent.tags).toContainEqual(["t", "complex"]);
expect(indexEvent.tags).toContainEqual(["t", "metadata"]);
@ -332,13 +485,31 @@ This is the section content.`; @@ -332,13 +485,31 @@ This is the section content.`;
expect(sectionEvents).toHaveLength(1);
expect(sectionEvents[0].kind).toBe(30041);
expect(sectionEvents[0].content).toBe("This is the section content.");
expect(sectionEvents[0].tags).toContainEqual(["d", "complex-metadata-document-section-with-complex-metadata"]);
expect(sectionEvents[0].tags).toContainEqual(["title", "Section with Complex Metadata"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Author"]);
expect(sectionEvents[0].tags).toContainEqual(["author", "Section Co-Author"]);
expect(sectionEvents[0].tags).toContainEqual(["summary", "This section has complex metadata Alternative description for section"]);
expect(sectionEvents[0].tags).toContainEqual([
"d",
"complex-metadata-document-section-with-complex-metadata",
]);
expect(sectionEvents[0].tags).toContainEqual([
"title",
"Section with Complex Metadata",
]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"author",
"Section Co-Author",
]);
expect(sectionEvents[0].tags).toContainEqual([
"summary",
"This section has complex metadata Alternative description for section",
]);
expect(sectionEvents[0].tags).toContainEqual(["type", "chapter"]);
expect(sectionEvents[0].tags).toContainEqual(["image", "https://example.com/section-image.jpg"]);
expect(sectionEvents[0].tags).toContainEqual([
"image",
"https://example.com/section-image.jpg",
]);
expect(sectionEvents[0].tags).toContainEqual(["t", "section"]);
expect(sectionEvents[0].tags).toContainEqual(["t", "complex"]);
expect(sectionEvents[0].tags).toContainEqual(["t", "metadata"]);
@ -387,7 +558,9 @@ index card`; @@ -387,7 +558,9 @@ index card`;
const validation = validate30040EventSet(content);
expect(validation.valid).toBe(false);
expect(validation.reason).toContain("30040 events must have a document title");
expect(validation.reason).toContain(
"30040 events must have a document title",
);
});
});
@ -400,11 +573,21 @@ This is just preamble content.`; @@ -400,11 +573,21 @@ This is just preamble content.`;
const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "document-with-no-sections"]);
expect(indexEvent.tags).toContainEqual(["title", "Document with No Sections"]);
expect(indexEvent.tags).toContainEqual([
"d",
"document-with-no-sections",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Document with No Sections",
]);
expect(sectionEvents).toHaveLength(0);
});
@ -418,16 +601,27 @@ Content here.`; @@ -418,16 +601,27 @@ Content here.`;
const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["d", "document-with-special-characters-test-more"]);
expect(indexEvent.tags).toContainEqual(["title", "Document with Special Characters: Test & More!"]);
expect(indexEvent.tags).toContainEqual([
"d",
"document-with-special-characters-test-more",
]);
expect(indexEvent.tags).toContainEqual([
"title",
"Document with Special Characters: Test & More!",
]);
expect(sectionEvents).toHaveLength(1);
});
it("should handle document with very long title", () => {
const content = `= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality
const content =
`= This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality
:summary: This document has a very long title
== Section 1
@ -436,11 +630,18 @@ Content here.`; @@ -436,11 +630,18 @@ Content here.`;
const tags: [string, string][] = [];
const { indexEvent, sectionEvents } = build30040EventSet(content, tags, baseEvent);
const { indexEvent, sectionEvents } = build30040EventSet(
content,
tags,
baseEvent,
);
expect(indexEvent.kind).toBe(30040);
expect(indexEvent.tags).toContainEqual(["title", "This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality"]);
expect(indexEvent.tags).toContainEqual([
"title",
"This is a very long document title that should be handled properly by the system and should not cause any issues with the d-tag generation or any other functionality",
]);
expect(sectionEvents).toHaveLength(1);
});
});
});
});

2
tests/unit/latexRendering.test.ts

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
import { describe, it, expect } from "vitest";
import { describe, expect, it } from "vitest";
import { parseAdvancedmarkup } from "../../src/lib/utils/markup/advancedMarkupParser";
import { readFileSync } from "fs";
import { join } from "path";

124
tests/unit/metadataExtraction.test.ts

@ -1,10 +1,10 @@ @@ -1,10 +1,10 @@
import { describe, it, expect } from "vitest";
import {
extractDocumentMetadata,
extractSectionMetadata,
parseAsciiDocWithMetadata,
import { describe, expect, it } from "vitest";
import {
extractDocumentMetadata,
extractSectionMetadata,
extractSmartMetadata,
metadataToTags,
extractSmartMetadata
parseAsciiDocWithMetadata,
} from "../../src/lib/utils/asciidoc_metadata.ts";
describe("AsciiDoc Metadata Extraction", () => {
@ -39,13 +39,15 @@ This is the content of the second section.`; @@ -39,13 +39,15 @@ This is the content of the second section.`;
it("extractDocumentMetadata should extract document metadata correctly", () => {
const { metadata, content } = extractDocumentMetadata(testContent);
expect(metadata.title).toBe("Test Document with Metadata");
expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.version).toBe("1.0");
expect(metadata.publicationDate).toBe("2024-01-15");
expect(metadata.publishedBy).toBe("Alexandria Test");
expect(metadata.summary).toBe("This is a test document for metadata extraction");
expect(metadata.summary).toBe(
"This is a test document for metadata extraction",
);
expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.type).toBe("article");
expect(metadata.tags).toEqual(["test", "metadata", "asciidoc"]);
@ -53,7 +55,7 @@ This is the content of the second section.`; @@ -53,7 +55,7 @@ This is the content of the second section.`;
expect(metadata.isbn).toBe("978-0-123456-78-9");
expect(metadata.source).toBe("https://github.com/alexandria/test");
expect(metadata.autoUpdate).toBe("yes");
// Content should not include the header metadata
expect(content).toContain("This is the preamble content");
expect(content).toContain("== First Section");
@ -70,7 +72,7 @@ This is the content of the second section.`; @@ -70,7 +72,7 @@ This is the content of the second section.`;
This is the content of the first section.`;
const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("First Section");
expect(metadata.authors).toEqual(["Section Author"]);
expect(metadata.summary).toBe("This is the first section");
@ -86,7 +88,7 @@ Stella @@ -86,7 +88,7 @@ Stella
Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella"]);
expect(metadata.summary).toBe("Some summary");
@ -102,7 +104,7 @@ Stella @@ -102,7 +104,7 @@ Stella
Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella", "John Doe"]);
expect(metadata.summary).toBe("Some summary");
@ -118,22 +120,26 @@ This is not an author line @@ -118,22 +120,26 @@ This is not an author line
Some context text`;
const { metadata, content, title } = extractSectionMetadata(sectionContent);
expect(title).toBe("Section Header1");
expect(metadata.authors).toEqual(["Stella"]);
expect(metadata.summary).toBe("Some summary");
expect(content.trim()).toBe("This is not an author line\nSome context text");
expect(content.trim()).toBe(
"This is not an author line\nSome context text",
);
});
it("parseAsciiDocWithMetadata should parse complete document", () => {
const parsed = parseAsciiDocWithMetadata(testContent);
expect(parsed.metadata.title).toBe("Test Document with Metadata");
expect(parsed.sections).toHaveLength(2);
expect(parsed.sections[0].title).toBe("First Section");
expect(parsed.sections[1].title).toBe("Second Section");
expect(parsed.sections[0].metadata.authors).toEqual(["Section Author"]);
expect(parsed.sections[1].metadata.summary).toBe("This is the second section");
expect(parsed.sections[1].metadata.summary).toBe(
"This is the second section",
);
});
it("metadataToTags should convert metadata to Nostr tags", () => {
@ -142,11 +148,11 @@ Some context text`; @@ -142,11 +148,11 @@ Some context text`;
authors: ["Author 1", "Author 2"],
version: "1.0",
summary: "Test summary",
tags: ["tag1", "tag2"]
tags: ["tag1", "tag2"],
};
const tags = metadataToTags(metadata);
expect(tags).toContainEqual(["title", "Test Title"]);
expect(tags).toContainEqual(["author", "Author 1"]);
expect(tags).toContainEqual(["author", "Author 2"]);
@ -161,16 +167,16 @@ Some context text`; @@ -161,16 +167,16 @@ Some context text`;
index card`;
const { metadata, content } = extractDocumentMetadata(indexCardContent);
expect(metadata.title).toBe("Test Index Card");
expect(content.trim()).toBe("index card");
});
it("should handle empty content gracefully", () => {
const emptyContent = "";
const { metadata, content } = extractDocumentMetadata(emptyContent);
expect(metadata.title).toBeUndefined();
expect(content).toBe("");
});
@ -182,7 +188,7 @@ index card`; @@ -182,7 +188,7 @@ index card`;
Some content here.`;
const { metadata } = extractDocumentMetadata(contentWithKeywords);
expect(metadata.tags).toEqual(["keyword1", "keyword2", "keyword3"]);
});
@ -194,7 +200,7 @@ Some content here.`; @@ -194,7 +200,7 @@ Some content here.`;
Some content here.`;
const { metadata } = extractDocumentMetadata(contentWithBoth);
// Both tags and keywords are valid, both should be accumulated
expect(metadata.tags).toEqual(["tag1", "tag2", "keyword1", "keyword2"]);
});
@ -206,7 +212,7 @@ Some content here.`; @@ -206,7 +212,7 @@ Some content here.`;
Content here.`;
const { metadata } = extractDocumentMetadata(contentWithTags);
expect(metadata.tags).toEqual(["tag1", "tag2", "tag3"]);
});
@ -221,15 +227,19 @@ Content here.`; @@ -221,15 +227,19 @@ Content here.`;
Content here.`;
const { metadata: summaryMetadata } = extractDocumentMetadata(contentWithSummary);
const { metadata: descriptionMetadata } = extractDocumentMetadata(contentWithDescription);
const { metadata: summaryMetadata } = extractDocumentMetadata(
contentWithSummary,
);
const { metadata: descriptionMetadata } = extractDocumentMetadata(
contentWithDescription,
);
expect(summaryMetadata.summary).toBe("This is a summary");
expect(descriptionMetadata.summary).toBe("This is a description");
});
describe('Smart metadata extraction', () => {
it('should handle section-only content correctly', () => {
describe("Smart metadata extraction", () => {
it("should handle section-only content correctly", () => {
const sectionOnlyContent = `== First Section
:author: Section Author
:description: This is the first section
@ -244,20 +254,20 @@ This is the content of the first section. @@ -244,20 +254,20 @@ This is the content of the first section.
This is the content of the second section.`;
const { metadata, content } = extractSmartMetadata(sectionOnlyContent);
// Should extract title from first section
expect(metadata.title).toBe('First Section');
expect(metadata.title).toBe("First Section");
// Should not have document-level metadata since there's no document header
expect(metadata.authors).toBeUndefined();
expect(metadata.version).toBeUndefined();
expect(metadata.publicationDate).toBeUndefined();
// Content should be preserved
expect(content).toBe(sectionOnlyContent);
});
it('should handle minimal document header (just title) correctly', () => {
it("should handle minimal document header (just title) correctly", () => {
const minimalDocumentHeader = `= Test Document
== First Section
@ -273,22 +283,22 @@ This is the content of the first section. @@ -273,22 +283,22 @@ This is the content of the first section.
This is the content of the second section.`;
const { metadata, content } = extractSmartMetadata(minimalDocumentHeader);
// Should extract title from document header
expect(metadata.title).toBe('Test Document');
expect(metadata.title).toBe("Test Document");
// Should not have document-level metadata since there's no other metadata
expect(metadata.authors).toBeUndefined();
// Note: version might be set from section attributes like :type: chapter
expect(metadata.publicationDate).toBeUndefined();
// Content should preserve the title line for 30040 events
expect(content).toContain('= Test Document');
expect(content).toContain('== First Section');
expect(content).toContain('== Second Section');
expect(content).toContain("= Test Document");
expect(content).toContain("== First Section");
expect(content).toContain("== Second Section");
});
it('should handle document with full header correctly', () => {
it("should handle document with full header correctly", () => {
const documentWithHeader = `= Test Document
John Doe <john@example.com>
1.0, 2024-01-15: Alexandria Test
@ -302,21 +312,21 @@ John Doe <john@example.com> @@ -302,21 +312,21 @@ John Doe <john@example.com>
This is the content.`;
const { metadata, content } = extractSmartMetadata(documentWithHeader);
// Should extract document-level metadata
expect(metadata.title).toBe('Test Document');
expect(metadata.authors).toEqual(['John Doe', 'Jane Smith']);
expect(metadata.version).toBe('1.0');
expect(metadata.publishedBy).toBe('Alexandria Test');
expect(metadata.publicationDate).toBe('2024-01-15');
expect(metadata.summary).toBe('This is a test document');
expect(metadata.title).toBe("Test Document");
expect(metadata.authors).toEqual(["John Doe", "Jane Smith"]);
expect(metadata.version).toBe("1.0");
expect(metadata.publishedBy).toBe("Alexandria Test");
expect(metadata.publicationDate).toBe("2024-01-15");
expect(metadata.summary).toBe("This is a test document");
// Content should be cleaned
expect(content).not.toContain('= Test Document');
expect(content).not.toContain('John Doe <john@example.com>');
expect(content).not.toContain('1.0, 2024-01-15: Alexandria Test');
expect(content).not.toContain(':summary: This is a test document');
expect(content).not.toContain(':author: Jane Smith');
expect(content).not.toContain("= Test Document");
expect(content).not.toContain("John Doe <john@example.com>");
expect(content).not.toContain("1.0, 2024-01-15: Alexandria Test");
expect(content).not.toContain(":summary: This is a test document");
expect(content).not.toContain(":author: Jane Smith");
});
});
});
});

132
tests/unit/nostr_identifiers.test.ts

@ -1,106 +1,112 @@ @@ -1,106 +1,112 @@
import { describe, it, expect } from 'vitest';
import {
isEventId,
isCoordinate,
parseCoordinate,
import { describe, expect, it } from "vitest";
import {
createCoordinate,
isNostrIdentifier
} from '../../src/lib/utils/nostr_identifiers';
isCoordinate,
isEventId,
isNostrIdentifier,
parseCoordinate,
} from "../../src/lib/utils/nostr_identifiers";
describe('Nostr Identifier Validation', () => {
describe('isEventId', () => {
it('should validate correct hex event IDs', () => {
const validId = 'a'.repeat(64);
describe("Nostr Identifier Validation", () => {
describe("isEventId", () => {
it("should validate correct hex event IDs", () => {
const validId = "a".repeat(64);
expect(isEventId(validId)).toBe(true);
const validIdWithMixedCase = 'A'.repeat(32) + 'f'.repeat(32);
const validIdWithMixedCase = "A".repeat(32) + "f".repeat(32);
expect(isEventId(validIdWithMixedCase)).toBe(true);
});
it('should reject invalid event IDs', () => {
expect(isEventId('')).toBe(false);
expect(isEventId('abc')).toBe(false);
expect(isEventId('a'.repeat(63))).toBe(false); // too short
expect(isEventId('a'.repeat(65))).toBe(false); // too long
expect(isEventId('g'.repeat(64))).toBe(false); // invalid hex char
it("should reject invalid event IDs", () => {
expect(isEventId("")).toBe(false);
expect(isEventId("abc")).toBe(false);
expect(isEventId("a".repeat(63))).toBe(false); // too short
expect(isEventId("a".repeat(65))).toBe(false); // too long
expect(isEventId("g".repeat(64))).toBe(false); // invalid hex char
});
});
describe('isCoordinate', () => {
it('should validate correct coordinates', () => {
const validCoordinate = `30040:${'a'.repeat(64)}:chapter-1`;
describe("isCoordinate", () => {
it("should validate correct coordinates", () => {
const validCoordinate = `30040:${"a".repeat(64)}:chapter-1`;
expect(isCoordinate(validCoordinate)).toBe(true);
const coordinateWithColonsInDTag = `30041:${'b'.repeat(64)}:chapter:with:colons`;
const coordinateWithColonsInDTag = `30041:${
"b".repeat(64)
}:chapter:with:colons`;
expect(isCoordinate(coordinateWithColonsInDTag)).toBe(true);
});
it('should reject invalid coordinates', () => {
expect(isCoordinate('')).toBe(false);
expect(isCoordinate('abc')).toBe(false);
expect(isCoordinate('30040:abc:chapter-1')).toBe(false); // invalid pubkey
expect(isCoordinate('30040:abc')).toBe(false); // missing d-tag
expect(isCoordinate('abc:def:ghi')).toBe(false); // invalid kind
expect(isCoordinate('-1:abc:def')).toBe(false); // negative kind
it("should reject invalid coordinates", () => {
expect(isCoordinate("")).toBe(false);
expect(isCoordinate("abc")).toBe(false);
expect(isCoordinate("30040:abc:chapter-1")).toBe(false); // invalid pubkey
expect(isCoordinate("30040:abc")).toBe(false); // missing d-tag
expect(isCoordinate("abc:def:ghi")).toBe(false); // invalid kind
expect(isCoordinate("-1:abc:def")).toBe(false); // negative kind
});
});
describe('parseCoordinate', () => {
it('should parse valid coordinates correctly', () => {
const coordinate = `30040:${'a'.repeat(64)}:chapter-1`;
describe("parseCoordinate", () => {
it("should parse valid coordinates correctly", () => {
const coordinate = `30040:${"a".repeat(64)}:chapter-1`;
const parsed = parseCoordinate(coordinate);
expect(parsed).toEqual({
kind: 30040,
pubkey: 'a'.repeat(64),
dTag: 'chapter-1'
pubkey: "a".repeat(64),
dTag: "chapter-1",
});
});
it('should handle d-tags with colons', () => {
const coordinate = `30041:${'b'.repeat(64)}:chapter:with:colons`;
it("should handle d-tags with colons", () => {
const coordinate = `30041:${"b".repeat(64)}:chapter:with:colons`;
const parsed = parseCoordinate(coordinate);
expect(parsed).toEqual({
kind: 30041,
pubkey: 'b'.repeat(64),
dTag: 'chapter:with:colons'
pubkey: "b".repeat(64),
dTag: "chapter:with:colons",
});
});
it('should return null for invalid coordinates', () => {
expect(parseCoordinate('')).toBeNull();
expect(parseCoordinate('abc')).toBeNull();
expect(parseCoordinate('30040:abc:chapter-1')).toBeNull();
it("should return null for invalid coordinates", () => {
expect(parseCoordinate("")).toBeNull();
expect(parseCoordinate("abc")).toBeNull();
expect(parseCoordinate("30040:abc:chapter-1")).toBeNull();
});
});
describe('createCoordinate', () => {
it('should create valid coordinates', () => {
const coordinate = createCoordinate(30040, 'a'.repeat(64), 'chapter-1');
expect(coordinate).toBe(`30040:${'a'.repeat(64)}:chapter-1`);
describe("createCoordinate", () => {
it("should create valid coordinates", () => {
const coordinate = createCoordinate(30040, "a".repeat(64), "chapter-1");
expect(coordinate).toBe(`30040:${"a".repeat(64)}:chapter-1`);
});
it('should handle d-tags with colons', () => {
const coordinate = createCoordinate(30041, 'b'.repeat(64), 'chapter:with:colons');
expect(coordinate).toBe(`30041:${'b'.repeat(64)}:chapter:with:colons`);
it("should handle d-tags with colons", () => {
const coordinate = createCoordinate(
30041,
"b".repeat(64),
"chapter:with:colons",
);
expect(coordinate).toBe(`30041:${"b".repeat(64)}:chapter:with:colons`);
});
});
describe('isNostrIdentifier', () => {
it('should accept valid event IDs', () => {
expect(isNostrIdentifier('a'.repeat(64))).toBe(true);
describe("isNostrIdentifier", () => {
it("should accept valid event IDs", () => {
expect(isNostrIdentifier("a".repeat(64))).toBe(true);
});
it('should accept valid coordinates', () => {
const coordinate = `30040:${'a'.repeat(64)}:chapter-1`;
it("should accept valid coordinates", () => {
const coordinate = `30040:${"a".repeat(64)}:chapter-1`;
expect(isNostrIdentifier(coordinate)).toBe(true);
});
it('should reject invalid identifiers', () => {
expect(isNostrIdentifier('')).toBe(false);
expect(isNostrIdentifier('abc')).toBe(false);
expect(isNostrIdentifier('30040:abc:chapter-1')).toBe(false);
it("should reject invalid identifiers", () => {
expect(isNostrIdentifier("")).toBe(false);
expect(isNostrIdentifier("abc")).toBe(false);
expect(isNostrIdentifier("30040:abc:chapter-1")).toBe(false);
});
});
});
});

742
tests/unit/relayDeduplication.test.ts

@ -1,11 +1,11 @@ @@ -1,11 +1,11 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import type { NDKEvent } from '@nostr-dev-kit/ndk';
import {
deduplicateContentEvents,
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import {
deduplicateAndCombineEvents,
deduplicateContentEvents,
getEventCoordinate,
isReplaceableEvent,
getEventCoordinate
} from '../../src/lib/utils/eventDeduplication';
} from "../../src/lib/utils/eventDeduplication";
// Mock NDKEvent for testing
class MockNDKEvent {
@ -16,162 +16,264 @@ class MockNDKEvent { @@ -16,162 +16,264 @@ class MockNDKEvent {
content: string;
tags: string[][];
constructor(id: string, kind: number, pubkey: string, created_at: number, dTag: string, content: string = '') {
constructor(
id: string,
kind: number,
pubkey: string,
created_at: number,
dTag: string,
content: string = "",
) {
this.id = id;
this.kind = kind;
this.pubkey = pubkey;
this.created_at = created_at;
this.content = content;
this.tags = [['d', dTag]];
this.tags = [["d", dTag]];
}
tagValue(tagName: string): string | undefined {
const tag = this.tags.find(t => t[0] === tagName);
const tag = this.tags.find((t) => t[0] === tagName);
return tag ? tag[1] : undefined;
}
}
describe('Relay Deduplication Behavior Tests', () => {
describe("Relay Deduplication Behavior Tests", () => {
let mockEvents: MockNDKEvent[];
beforeEach(() => {
// Create test events with different timestamps
mockEvents = [
// Older version of a publication content event
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old content'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old content",
),
// Newer version of the same publication content event
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Updated content'),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"Updated content",
),
// Different publication content event
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-2', 'Different content'),
new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-2",
"Different content",
),
// Publication index event (should not be deduplicated)
new MockNDKEvent('event4', 30040, 'pubkey1', 1200, 'book-1', 'Index content'),
new MockNDKEvent(
"event4",
30040,
"pubkey1",
1200,
"book-1",
"Index content",
),
// Regular text note (should not be deduplicated)
new MockNDKEvent('event5', 1, 'pubkey1', 1300, '', 'Regular note'),
new MockNDKEvent("event5", 1, "pubkey1", 1300, "", "Regular note"),
];
});
describe('Addressable Event Deduplication', () => {
it('should keep only the most recent version of addressable events by coordinate', () => {
describe("Addressable Event Deduplication", () => {
it("should keep only the most recent version of addressable events by coordinate", () => {
// Test the deduplication logic for content events
const eventSets = [new Set(mockEvents.filter(e => e.kind === 30041) as NDKEvent[])];
const eventSets = [
new Set(mockEvents.filter((e) => e.kind === 30041) as NDKEvent[]),
];
const result = deduplicateContentEvents(eventSets);
// Should have 2 unique coordinates: chapter-1 and chapter-2
expect(result.size).toBe(2);
// Should keep the newer version of chapter-1
const chapter1Event = result.get('30041:pubkey1:chapter-1');
expect(chapter1Event?.id).toBe('event2');
expect(chapter1Event?.content).toBe('Updated content');
const chapter1Event = result.get("30041:pubkey1:chapter-1");
expect(chapter1Event?.id).toBe("event2");
expect(chapter1Event?.content).toBe("Updated content");
// Should keep chapter-2
const chapter2Event = result.get('30041:pubkey1:chapter-2');
expect(chapter2Event?.id).toBe('event3');
const chapter2Event = result.get("30041:pubkey1:chapter-2");
expect(chapter2Event?.id).toBe("event3");
});
it('should handle events with missing d-tags gracefully', () => {
const eventWithoutDTag = new MockNDKEvent('event6', 30041, 'pubkey1', 1400, '', 'No d-tag');
it("should handle events with missing d-tags gracefully", () => {
const eventWithoutDTag = new MockNDKEvent(
"event6",
30041,
"pubkey1",
1400,
"",
"No d-tag",
);
eventWithoutDTag.tags = []; // Remove d-tag
const eventSets = [new Set([eventWithoutDTag] as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Should not include events without d-tags
expect(result.size).toBe(0);
});
it('should handle events with missing timestamps', () => {
const eventWithoutTimestamp = new MockNDKEvent('event7', 30041, 'pubkey1', 0, 'chapter-3', 'No timestamp');
const eventWithTimestamp = new MockNDKEvent('event8', 30041, 'pubkey1', 1500, 'chapter-3', 'With timestamp');
const eventSets = [new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[])];
it("should handle events with missing timestamps", () => {
const eventWithoutTimestamp = new MockNDKEvent(
"event7",
30041,
"pubkey1",
0,
"chapter-3",
"No timestamp",
);
const eventWithTimestamp = new MockNDKEvent(
"event8",
30041,
"pubkey1",
1500,
"chapter-3",
"With timestamp",
);
const eventSets = [
new Set([eventWithoutTimestamp, eventWithTimestamp] as NDKEvent[]),
];
const result = deduplicateContentEvents(eventSets);
// Should prefer the event with timestamp
const chapter3Event = result.get('30041:pubkey1:chapter-3');
expect(chapter3Event?.id).toBe('event8');
const chapter3Event = result.get("30041:pubkey1:chapter-3");
expect(chapter3Event?.id).toBe("event8");
});
});
describe('Mixed Event Type Deduplication', () => {
it('should only deduplicate addressable events (kinds 30000-39999)', () => {
describe("Mixed Event Type Deduplication", () => {
it("should only deduplicate addressable events (kinds 30000-39999)", () => {
const result = deduplicateAndCombineEvents(
[mockEvents[4]] as NDKEvent[], // Regular text note
new Set([mockEvents[3]] as NDKEvent[]), // Publication index
new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]) // Content events
new Set([mockEvents[0], mockEvents[1], mockEvents[2]] as NDKEvent[]), // Content events
);
// Should have 4 events total:
// - 1 regular text note (not deduplicated)
// - 1 publication index (not deduplicated)
// - 2 unique content events (deduplicated from 3)
expect(result.length).toBe(4);
// Verify the content events were deduplicated
const contentEvents = result.filter(e => e.kind === 30041);
const contentEvents = result.filter((e) => e.kind === 30041);
expect(contentEvents.length).toBe(2);
// Verify the newer version was kept
const newerEvent = contentEvents.find(e => e.id === 'event2');
const newerEvent = contentEvents.find((e) => e.id === "event2");
expect(newerEvent).toBeDefined();
});
it('should handle non-addressable events correctly', () => {
it("should handle non-addressable events correctly", () => {
const regularEvents = [
new MockNDKEvent('note1', 1, 'pubkey1', 1000, '', 'Note 1'),
new MockNDKEvent('note2', 1, 'pubkey1', 2000, '', 'Note 2'),
new MockNDKEvent('profile1', 0, 'pubkey1', 1500, '', 'Profile 1'),
new MockNDKEvent("note1", 1, "pubkey1", 1000, "", "Note 1"),
new MockNDKEvent("note2", 1, "pubkey1", 2000, "", "Note 2"),
new MockNDKEvent("profile1", 0, "pubkey1", 1500, "", "Profile 1"),
];
const result = deduplicateAndCombineEvents(
regularEvents as NDKEvent[],
new Set(),
new Set()
new Set(),
);
// All regular events should be included (no deduplication)
expect(result.length).toBe(3);
});
});
describe('Coordinate System Validation', () => {
it('should correctly identify event coordinates', () => {
const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test-chapter');
describe("Coordinate System Validation", () => {
it("should correctly identify event coordinates", () => {
const event = new MockNDKEvent(
"test",
30041,
"pubkey123",
1000,
"test-chapter",
);
const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBe('30041:pubkey123:test-chapter');
expect(coordinate).toBe("30041:pubkey123:test-chapter");
});
it('should handle d-tags with colons correctly', () => {
const event = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'chapter:with:colons');
it("should handle d-tags with colons correctly", () => {
const event = new MockNDKEvent(
"test",
30041,
"pubkey123",
1000,
"chapter:with:colons",
);
const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBe('30041:pubkey123:chapter:with:colons');
expect(coordinate).toBe("30041:pubkey123:chapter:with:colons");
});
it('should return null for non-replaceable events', () => {
const event = new MockNDKEvent('test', 1, 'pubkey123', 1000, '');
it("should return null for non-replaceable events", () => {
const event = new MockNDKEvent("test", 1, "pubkey123", 1000, "");
const coordinate = getEventCoordinate(event as NDKEvent);
expect(coordinate).toBeNull();
});
});
describe('Replaceable Event Detection', () => {
it('should correctly identify replaceable events', () => {
const addressableEvent = new MockNDKEvent('test', 30041, 'pubkey123', 1000, 'test');
const regularEvent = new MockNDKEvent('test', 1, 'pubkey123', 1000, '');
describe("Replaceable Event Detection", () => {
it("should correctly identify replaceable events", () => {
const addressableEvent = new MockNDKEvent(
"test",
30041,
"pubkey123",
1000,
"test",
);
const regularEvent = new MockNDKEvent("test", 1, "pubkey123", 1000, "");
expect(isReplaceableEvent(addressableEvent as NDKEvent)).toBe(true);
expect(isReplaceableEvent(regularEvent as NDKEvent)).toBe(false);
});
it('should handle edge cases of replaceable event ranges', () => {
const event29999 = new MockNDKEvent('test', 29999, 'pubkey123', 1000, 'test');
const event30000 = new MockNDKEvent('test', 30000, 'pubkey123', 1000, 'test');
const event39999 = new MockNDKEvent('test', 39999, 'pubkey123', 1000, 'test');
const event40000 = new MockNDKEvent('test', 40000, 'pubkey123', 1000, 'test');
it("should handle edge cases of replaceable event ranges", () => {
const event29999 = new MockNDKEvent(
"test",
29999,
"pubkey123",
1000,
"test",
);
const event30000 = new MockNDKEvent(
"test",
30000,
"pubkey123",
1000,
"test",
);
const event39999 = new MockNDKEvent(
"test",
39999,
"pubkey123",
1000,
"test",
);
const event40000 = new MockNDKEvent(
"test",
40000,
"pubkey123",
1000,
"test",
);
expect(isReplaceableEvent(event29999 as NDKEvent)).toBe(false);
expect(isReplaceableEvent(event30000 as NDKEvent)).toBe(true);
expect(isReplaceableEvent(event39999 as NDKEvent)).toBe(true);
@ -179,279 +281,429 @@ describe('Relay Deduplication Behavior Tests', () => { @@ -179,279 +281,429 @@ describe('Relay Deduplication Behavior Tests', () => {
});
});
describe('Edge Cases', () => {
it('should handle empty event sets', () => {
describe("Edge Cases", () => {
it("should handle empty event sets", () => {
const result = deduplicateContentEvents([]);
expect(result.size).toBe(0);
});
it('should handle events with null/undefined values', () => {
it("should handle events with null/undefined values", () => {
const invalidEvent = {
id: undefined,
kind: 30041,
pubkey: 'pubkey1',
pubkey: "pubkey1",
created_at: 1000,
tagValue: () => undefined, // Return undefined for d-tag
} as unknown as NDKEvent;
const eventSets = [new Set([invalidEvent])];
const result = deduplicateContentEvents(eventSets);
// Should handle gracefully without crashing
expect(result.size).toBe(0);
});
it('should handle events from different authors with same d-tag', () => {
const event1 = new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'same-chapter', 'Author 1');
const event2 = new MockNDKEvent('event2', 30041, 'pubkey2', 1000, 'same-chapter', 'Author 2');
it("should handle events from different authors with same d-tag", () => {
const event1 = new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"same-chapter",
"Author 1",
);
const event2 = new MockNDKEvent(
"event2",
30041,
"pubkey2",
1000,
"same-chapter",
"Author 2",
);
const eventSets = [new Set([event1, event2] as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Should have 2 events (different coordinates due to different authors)
expect(result.size).toBe(2);
expect(result.has('30041:pubkey1:same-chapter')).toBe(true);
expect(result.has('30041:pubkey2:same-chapter')).toBe(true);
expect(result.has("30041:pubkey1:same-chapter")).toBe(true);
expect(result.has("30041:pubkey2:same-chapter")).toBe(true);
});
});
});
describe('Relay Behavior Simulation', () => {
it('should simulate what happens when relays return duplicate events', () => {
describe("Relay Behavior Simulation", () => {
it("should simulate what happens when relays return duplicate events", () => {
// Simulate a relay that returns multiple versions of the same event
const relayEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'),
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'),
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-1",
"Middle version",
),
];
// This simulates what a "bad" relay might return
const eventSets = [new Set(relayEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Should only keep the newest version
expect(result.size).toBe(1);
const keptEvent = result.get('30041:pubkey1:chapter-1');
expect(keptEvent?.id).toBe('event2');
expect(keptEvent?.content).toBe('New version');
const keptEvent = result.get("30041:pubkey1:chapter-1");
expect(keptEvent?.id).toBe("event2");
expect(keptEvent?.content).toBe("New version");
});
it('should simulate multiple relays returning different versions', () => {
it("should simulate multiple relays returning different versions", () => {
// Simulate multiple relays returning different versions
const relay1Events = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Relay 1 version'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Relay 1 version",
),
];
const relay2Events = [
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'Relay 2 version'),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"Relay 2 version",
),
];
const eventSets = [
new Set(relay1Events as NDKEvent[]),
new Set(relay2Events as NDKEvent[]),
];
const eventSets = [new Set(relay1Events as NDKEvent[]), new Set(relay2Events as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Should keep the newest version from any relay
expect(result.size).toBe(1);
const keptEvent = result.get('30041:pubkey1:chapter-1');
expect(keptEvent?.id).toBe('event2');
expect(keptEvent?.content).toBe('Relay 2 version');
const keptEvent = result.get("30041:pubkey1:chapter-1");
expect(keptEvent?.id).toBe("event2");
expect(keptEvent?.content).toBe("Relay 2 version");
});
});
describe('Real Relay Deduplication Tests', () => {
describe("Real Relay Deduplication Tests", () => {
// These tests actually query real relays to see if they deduplicate
// Note: These are integration tests and may be flaky due to network conditions
it('should detect if relays are returning duplicate replaceable events', async () => {
// This test queries real relays to see if they return duplicates
// We'll use a known author who has published multiple versions of content
// Known author with multiple publication content events
const testAuthor = 'npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks';
// Query for publication content events (kind 30041) from this author
// We expect relays to return only the most recent version of each d-tag
// This is a placeholder - in a real test, we would:
// 1. Query multiple relays for the same author's 30041 events
// 2. Check if any relay returns multiple events with the same d-tag
// 3. Verify that if duplicates exist, our deduplication logic handles them
console.log('Note: This test would require actual relay queries to verify deduplication behavior');
console.log('To run this test properly, we would need to:');
console.log('1. Query real relays for replaceable events');
console.log('2. Check if relays return duplicates');
console.log('3. Verify our deduplication logic works on real data');
// For now, we'll just assert that our logic is ready to handle real data
expect(true).toBe(true);
}, 30000); // 30 second timeout for network requests
it('should verify that our deduplication logic works on real relay data', async () => {
// This test would:
// 1. Fetch real events from relays
// 2. Apply our deduplication logic
// 3. Verify that the results are correct
console.log('Note: This test would require actual relay queries');
console.log('To implement this test, we would need to:');
console.log('1. Set up NDK with real relays');
console.log('2. Fetch events for a known author with multiple versions');
console.log('3. Apply deduplication and verify results');
expect(true).toBe(true);
}, 30000);
it(
"should detect if relays are returning duplicate replaceable events",
async () => {
// This test queries real relays to see if they return duplicates
// We'll use a known author who has published multiple versions of content
// Known author with multiple publication content events
const testAuthor =
"npub1z4m7gkva6yxgvdyclc7zp0qt69x9zgn8lu8sllg06wx6432h77qs0k97ks";
// Query for publication content events (kind 30041) from this author
// We expect relays to return only the most recent version of each d-tag
// This is a placeholder - in a real test, we would:
// 1. Query multiple relays for the same author's 30041 events
// 2. Check if any relay returns multiple events with the same d-tag
// 3. Verify that if duplicates exist, our deduplication logic handles them
console.log(
"Note: This test would require actual relay queries to verify deduplication behavior",
);
console.log("To run this test properly, we would need to:");
console.log("1. Query real relays for replaceable events");
console.log("2. Check if relays return duplicates");
console.log("3. Verify our deduplication logic works on real data");
// For now, we'll just assert that our logic is ready to handle real data
expect(true).toBe(true);
},
30000,
); // 30 second timeout for network requests
it(
"should verify that our deduplication logic works on real relay data",
async () => {
// This test would:
// 1. Fetch real events from relays
// 2. Apply our deduplication logic
// 3. Verify that the results are correct
console.log("Note: This test would require actual relay queries");
console.log("To implement this test, we would need to:");
console.log("1. Set up NDK with real relays");
console.log("2. Fetch events for a known author with multiple versions");
console.log("3. Apply deduplication and verify results");
expect(true).toBe(true);
},
30000,
);
});
describe('Practical Relay Behavior Analysis', () => {
it('should document what we know about relay deduplication behavior', () => {
describe("Practical Relay Behavior Analysis", () => {
it("should document what we know about relay deduplication behavior", () => {
// This test documents our current understanding of relay behavior
// based on the code analysis and the comment from onedev
console.log('\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ===');
console.log('\nBased on the code analysis and the comment from onedev:');
console.log('\n1. THEORETICAL BEHAVIOR:');
console.log(' - Relays SHOULD handle deduplication for replaceable events');
console.log(' - Only the most recent version of each coordinate should be stored');
console.log(' - Client-side deduplication should only be needed for cached/local events');
console.log('\n2. REALITY CHECK:');
console.log(' - Not all relays implement deduplication correctly');
console.log(' - Some relays may return multiple versions of the same event');
console.log(' - Network conditions and relay availability can cause inconsistencies');
console.log('\n3. ALEXANDRIA\'S APPROACH:');
console.log(' - Implements client-side deduplication as a safety net');
console.log(' - Uses coordinate system (kind:pubkey:d-tag) for addressable events');
console.log(' - Keeps the most recent version based on created_at timestamp');
console.log(' - Only applies to replaceable events (kinds 30000-39999)');
console.log('\n4. WHY KEEP THE DEDUPLICATION:');
console.log(' - Defensive programming against imperfect relay implementations');
console.log(' - Handles multiple relay sources with different data');
console.log(' - Works with cached events that might be outdated');
console.log(' - Ensures consistent user experience regardless of relay behavior');
console.log('\n5. TESTING STRATEGY:');
console.log(' - Unit tests verify our deduplication logic works correctly');
console.log(' - Integration tests would verify relay behavior (when network allows)');
console.log(' - Monitoring can help determine if relays improve over time');
console.log("\n=== RELAY DEDUPLICATION BEHAVIOR ANALYSIS ===");
console.log("\nBased on the code analysis and the comment from onedev:");
console.log("\n1. THEORETICAL BEHAVIOR:");
console.log(
" - Relays SHOULD handle deduplication for replaceable events",
);
console.log(
" - Only the most recent version of each coordinate should be stored",
);
console.log(
" - Client-side deduplication should only be needed for cached/local events",
);
console.log("\n2. REALITY CHECK:");
console.log(" - Not all relays implement deduplication correctly");
console.log(
" - Some relays may return multiple versions of the same event",
);
console.log(
" - Network conditions and relay availability can cause inconsistencies",
);
console.log("\n3. ALEXANDRIA'S APPROACH:");
console.log(" - Implements client-side deduplication as a safety net");
console.log(
" - Uses coordinate system (kind:pubkey:d-tag) for addressable events",
);
console.log(
" - Keeps the most recent version based on created_at timestamp",
);
console.log(" - Only applies to replaceable events (kinds 30000-39999)");
console.log("\n4. WHY KEEP THE DEDUPLICATION:");
console.log(
" - Defensive programming against imperfect relay implementations",
);
console.log(" - Handles multiple relay sources with different data");
console.log(" - Works with cached events that might be outdated");
console.log(
" - Ensures consistent user experience regardless of relay behavior",
);
console.log("\n5. TESTING STRATEGY:");
console.log(
" - Unit tests verify our deduplication logic works correctly",
);
console.log(
" - Integration tests would verify relay behavior (when network allows)",
);
console.log(
" - Monitoring can help determine if relays improve over time",
);
// This test documents our understanding rather than asserting specific behavior
expect(true).toBe(true);
});
it('should provide recommendations for when to remove deduplication', () => {
console.log('\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ===');
console.log('\nThe deduplication logic should be kept until:');
console.log('\n1. RELAY STANDARDS:');
console.log(' - NIP-33 (replaceable events) is widely implemented by relays');
console.log(' - Relays consistently return only the most recent version');
console.log(' - No major relay implementations return duplicates');
console.log('\n2. TESTING EVIDENCE:');
console.log(' - Real-world testing shows relays don\'t return duplicates');
console.log(' - Multiple relay operators confirm deduplication behavior');
console.log(' - No user reports of duplicate content issues');
console.log('\n3. MONITORING:');
console.log(' - Add logging to track when deduplication is actually used');
console.log(' - Monitor relay behavior over time');
console.log(' - Collect metrics on duplicate events found');
console.log('\n4. GRADUAL REMOVAL:');
console.log(' - Make deduplication configurable (on/off)');
console.log(' - Test with deduplication disabled in controlled environments');
console.log(' - Monitor for issues before removing completely');
console.log('\n5. FALLBACK STRATEGY:');
console.log(' - Keep deduplication as a fallback option');
console.log(' - Allow users to enable it if they experience issues');
console.log(' - Maintain the code for potential future use');
it("should provide recommendations for when to remove deduplication", () => {
console.log("\n=== RECOMMENDATIONS FOR REMOVING DEDUPLICATION ===");
console.log("\nThe deduplication logic should be kept until:");
console.log("\n1. RELAY STANDARDS:");
console.log(
" - NIP-33 (replaceable events) is widely implemented by relays",
);
console.log(" - Relays consistently return only the most recent version");
console.log(" - No major relay implementations return duplicates");
console.log("\n2. TESTING EVIDENCE:");
console.log(" - Real-world testing shows relays don't return duplicates");
console.log(" - Multiple relay operators confirm deduplication behavior");
console.log(" - No user reports of duplicate content issues");
console.log("\n3. MONITORING:");
console.log(
" - Add logging to track when deduplication is actually used",
);
console.log(" - Monitor relay behavior over time");
console.log(" - Collect metrics on duplicate events found");
console.log("\n4. GRADUAL REMOVAL:");
console.log(" - Make deduplication configurable (on/off)");
console.log(
" - Test with deduplication disabled in controlled environments",
);
console.log(" - Monitor for issues before removing completely");
console.log("\n5. FALLBACK STRATEGY:");
console.log(" - Keep deduplication as a fallback option");
console.log(" - Allow users to enable it if they experience issues");
console.log(" - Maintain the code for potential future use");
expect(true).toBe(true);
});
});
describe('Logging and Monitoring Tests', () => {
it('should verify that logging works when duplicates are found', () => {
describe("Logging and Monitoring Tests", () => {
it("should verify that logging works when duplicates are found", () => {
// Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events with duplicates
const duplicateEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'),
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'),
new MockNDKEvent('event3', 30041, 'pubkey1', 1500, 'chapter-1', 'Middle version'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
new MockNDKEvent(
"event3",
30041,
"pubkey1",
1500,
"chapter-1",
"Middle version",
),
];
const eventSets = [new Set(duplicateEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Verify the deduplication worked
expect(result.size).toBe(1);
// Verify that logging was called
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] Found 2 duplicate events out of 3 total events')
expect.stringContaining(
"[eventDeduplication] Found 2 duplicate events out of 3 total events",
),
);
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] Reduced to 1 unique coordinates')
expect.stringContaining(
"[eventDeduplication] Reduced to 1 unique coordinates",
),
);
// Restore console.log
consoleSpy.mockRestore();
});
it('should verify that logging works when no duplicates are found', () => {
it("should verify that logging works when no duplicates are found", () => {
// Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events without duplicates
const uniqueEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Content 1'),
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-2', 'Content 2'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Content 1",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-2",
"Content 2",
),
];
const eventSets = [new Set(uniqueEvents as NDKEvent[])];
const result = deduplicateContentEvents(eventSets);
// Verify no deduplication was needed
expect(result.size).toBe(2);
// Verify that logging was called with "no duplicates" message
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] No duplicates found in 2 events')
expect.stringContaining(
"[eventDeduplication] No duplicates found in 2 events",
),
);
// Restore console.log
consoleSpy.mockRestore();
});
it('should verify that deduplicateAndCombineEvents logging works', () => {
it("should verify that deduplicateAndCombineEvents logging works", () => {
// Mock console.log to capture output
const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {});
const consoleSpy = vi.spyOn(console, "log").mockImplementation(() => {});
// Create events with duplicates
const duplicateEvents = [
new MockNDKEvent('event1', 30041, 'pubkey1', 1000, 'chapter-1', 'Old version'),
new MockNDKEvent('event2', 30041, 'pubkey1', 2000, 'chapter-1', 'New version'),
new MockNDKEvent(
"event1",
30041,
"pubkey1",
1000,
"chapter-1",
"Old version",
),
new MockNDKEvent(
"event2",
30041,
"pubkey1",
2000,
"chapter-1",
"New version",
),
];
const result = deduplicateAndCombineEvents(
[] as NDKEvent[],
new Set(),
new Set(duplicateEvents as NDKEvent[])
new Set(duplicateEvents as NDKEvent[]),
);
// Verify the deduplication worked
expect(result.length).toBe(1);
// Verify that logging was called
expect(consoleSpy).toHaveBeenCalledWith(
expect.stringContaining('[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates')
expect.stringContaining(
"[eventDeduplication] deduplicateAndCombineEvents: Found 1 duplicate coordinates",
),
);
// Restore console.log
consoleSpy.mockRestore();
});
});
});

353
tests/unit/tagExpansion.test.ts

@ -1,11 +1,11 @@ @@ -1,11 +1,11 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import type { NDKEvent } from '@nostr-dev-kit/ndk';
import {
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { NDKEvent } from "@nostr-dev-kit/ndk";
import {
fetchProfilesForNewEvents,
fetchTaggedEventsFromRelays,
findTaggedEventsInFetched,
fetchProfilesForNewEvents,
type TagExpansionResult
} from '../../src/lib/utils/tag_event_fetch';
type TagExpansionResult,
} from "../../src/lib/utils/tag_event_fetch";
// Mock NDKEvent for testing
class MockNDKEvent {
@ -16,7 +16,14 @@ class MockNDKEvent { @@ -16,7 +16,14 @@ class MockNDKEvent {
content: string;
tags: string[][];
constructor(id: string, kind: number, pubkey: string, created_at: number, content: string = '', tags: string[][] = []) {
constructor(
id: string,
kind: number,
pubkey: string,
created_at: number,
content: string = "",
tags: string[][] = [],
) {
this.id = id;
this.kind = kind;
this.pubkey = pubkey;
@ -26,151 +33,192 @@ class MockNDKEvent { @@ -26,151 +33,192 @@ class MockNDKEvent {
}
tagValue(tagName: string): string | undefined {
const tag = this.tags.find(t => t[0] === tagName);
const tag = this.tags.find((t) => t[0] === tagName);
return tag ? tag[1] : undefined;
}
getMatchingTags(tagName: string): string[][] {
return this.tags.filter(tag => tag[0] === tagName);
return this.tags.filter((tag) => tag[0] === tagName);
}
}
// Mock NDK instance
const mockNDK = {
fetchEvents: vi.fn()
fetchEvents: vi.fn(),
};
// Mock the ndkInstance store
vi.mock('../../src/lib/ndk', () => ({
vi.mock("../../src/lib/ndk", () => ({
ndkInstance: {
subscribe: vi.fn((fn) => {
fn(mockNDK);
return { unsubscribe: vi.fn() };
})
}
}),
},
}));
// Mock the profile cache utilities
vi.mock('../../src/lib/utils/profileCache', () => ({
vi.mock("../../src/lib/utils/profileCache", () => ({
extractPubkeysFromEvents: vi.fn((events: NDKEvent[]) => {
const pubkeys = new Set<string>();
events.forEach(event => {
events.forEach((event) => {
if (event.pubkey) pubkeys.add(event.pubkey);
});
return pubkeys;
}),
batchFetchProfiles: vi.fn(async (pubkeys: string[], onProgress: (fetched: number, total: number) => void) => {
// Simulate progress updates
onProgress(0, pubkeys.length);
onProgress(pubkeys.length, pubkeys.length);
return [];
})
batchFetchProfiles: vi.fn(
async (
pubkeys: string[],
onProgress: (fetched: number, total: number) => void,
) => {
// Simulate progress updates
onProgress(0, pubkeys.length);
onProgress(pubkeys.length, pubkeys.length);
return [];
},
),
}));
describe('Tag Expansion Tests', () => {
describe("Tag Expansion Tests", () => {
let mockPublications: MockNDKEvent[];
let mockContentEvents: MockNDKEvent[];
let mockAllEvents: MockNDKEvent[];
beforeEach(() => {
vi.clearAllMocks();
// Create test publication index events (kind 30040)
mockPublications = [
new MockNDKEvent('pub1', 30040, 'author1', 1000, 'Book 1', [
['t', 'bitcoin'],
['t', 'cryptocurrency'],
['a', '30041:author1:chapter-1'],
['a', '30041:author1:chapter-2']
new MockNDKEvent("pub1", 30040, "author1", 1000, "Book 1", [
["t", "bitcoin"],
["t", "cryptocurrency"],
["a", "30041:author1:chapter-1"],
["a", "30041:author1:chapter-2"],
]),
new MockNDKEvent("pub2", 30040, "author2", 1100, "Book 2", [
["t", "bitcoin"],
["t", "blockchain"],
["a", "30041:author2:chapter-1"],
]),
new MockNDKEvent('pub2', 30040, 'author2', 1100, 'Book 2', [
['t', 'bitcoin'],
['t', 'blockchain'],
['a', '30041:author2:chapter-1']
new MockNDKEvent("pub3", 30040, "author3", 1200, "Book 3", [
["t", "ethereum"],
["a", "30041:author3:chapter-1"],
]),
new MockNDKEvent('pub3', 30040, 'author3', 1200, 'Book 3', [
['t', 'ethereum'],
['a', '30041:author3:chapter-1']
])
];
// Create test content events (kind 30041)
mockContentEvents = [
new MockNDKEvent('content1', 30041, 'author1', 1000, 'Chapter 1 content', [['d', 'chapter-1']]),
new MockNDKEvent('content2', 30041, 'author1', 1100, 'Chapter 2 content', [['d', 'chapter-2']]),
new MockNDKEvent('content3', 30041, 'author2', 1200, 'Author 2 Chapter 1', [['d', 'chapter-1']]),
new MockNDKEvent('content4', 30041, 'author3', 1300, 'Author 3 Chapter 1', [['d', 'chapter-1']])
new MockNDKEvent(
"content1",
30041,
"author1",
1000,
"Chapter 1 content",
[["d", "chapter-1"]],
),
new MockNDKEvent(
"content2",
30041,
"author1",
1100,
"Chapter 2 content",
[["d", "chapter-2"]],
),
new MockNDKEvent(
"content3",
30041,
"author2",
1200,
"Author 2 Chapter 1",
[["d", "chapter-1"]],
),
new MockNDKEvent(
"content4",
30041,
"author3",
1300,
"Author 3 Chapter 1",
[["d", "chapter-1"]],
),
];
// Combine all events for testing
mockAllEvents = [...mockPublications, ...mockContentEvents];
});
describe('fetchTaggedEventsFromRelays', () => {
it('should fetch publications with matching tags from relays', async () => {
describe("fetchTaggedEventsFromRelays", () => {
it("should fetch publications with matching tags from relays", async () => {
// Mock the NDK fetch to return publications with 'bitcoin' tag
const bitcoinPublications = mockPublications.filter(pub =>
pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin')
const bitcoinPublications = mockPublications.filter((pub) =>
pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin")
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(bitcoinPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[]));
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[]));
const existingEventIds = new Set<string>(['existing-event']);
const existingEventIds = new Set<string>(["existing-event"]);
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = await fetchTaggedEventsFromRelays(
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should fetch publications with bitcoin tag
expect(mockNDK.fetchEvents).toHaveBeenCalledWith({
kinds: [30040],
"#t": ['bitcoin'],
limit: 30
"#t": ["bitcoin"],
limit: 30,
});
// Should return the matching publications
expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).toContain('pub1');
expect(result.publications.map(p => p.id)).toContain('pub2');
expect(result.publications.map((p) => p.id)).toContain("pub1");
expect(result.publications.map((p) => p.id)).toContain("pub2");
// Should fetch content events for the publications
expect(mockNDK.fetchEvents).toHaveBeenCalledWith({
kinds: [30041, 30818],
"#d": ['chapter-1', 'chapter-2']
"#d": ["chapter-1", "chapter-2"],
});
});
it('should filter out existing events to avoid duplicates', async () => {
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockPublications as NDKEvent[]));
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[]));
it("should filter out existing events to avoid duplicates", async () => {
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
);
const existingEventIds = new Set<string>(['pub1']); // pub1 already exists
const existingEventIds = new Set<string>(["pub1"]); // pub1 already exists
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = await fetchTaggedEventsFromRelays(
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should exclude pub1 since it already exists
expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).not.toContain('pub1');
expect(result.publications.map(p => p.id)).toContain('pub2');
expect(result.publications.map(p => p.id)).toContain('pub3');
expect(result.publications.map((p) => p.id)).not.toContain("pub1");
expect(result.publications.map((p) => p.id)).toContain("pub2");
expect(result.publications.map((p) => p.id)).toContain("pub3");
});
it('should handle empty tag array gracefully', async () => {
it("should handle empty tag array gracefully", async () => {
// Mock empty result for empty tags
mockNDK.fetchEvents.mockResolvedValueOnce(new Set());
const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
@ -179,7 +227,7 @@ describe('Tag Expansion Tests', () => { @@ -179,7 +227,7 @@ describe('Tag Expansion Tests', () => {
[],
existingEventIds,
baseEvents,
debug
debug,
);
expect(result.publications).toHaveLength(0);
@ -187,95 +235,101 @@ describe('Tag Expansion Tests', () => { @@ -187,95 +235,101 @@ describe('Tag Expansion Tests', () => {
});
});
describe('findTaggedEventsInFetched', () => {
it('should find publications with matching tags in already fetched events', () => {
const existingEventIds = new Set<string>(['existing-event']);
describe("findTaggedEventsInFetched", () => {
it("should find publications with matching tags in already fetched events", () => {
const existingEventIds = new Set<string>(["existing-event"]);
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should find publications with bitcoin tag
expect(result.publications).toHaveLength(2);
expect(result.publications.map(p => p.id)).toContain('pub1');
expect(result.publications.map(p => p.id)).toContain('pub2');
expect(result.publications.map((p) => p.id)).toContain("pub1");
expect(result.publications.map((p) => p.id)).toContain("pub2");
// Should find content events for those publications
expect(result.contentEvents).toHaveLength(4);
expect(result.contentEvents.map(c => c.id)).toContain('content1');
expect(result.contentEvents.map(c => c.id)).toContain('content2');
expect(result.contentEvents.map(c => c.id)).toContain('content3');
expect(result.contentEvents.map(c => c.id)).toContain('content4');
expect(result.contentEvents.map((c) => c.id)).toContain("content1");
expect(result.contentEvents.map((c) => c.id)).toContain("content2");
expect(result.contentEvents.map((c) => c.id)).toContain("content3");
expect(result.contentEvents.map((c) => c.id)).toContain("content4");
});
it('should exclude base events from search results', () => {
const existingEventIds = new Set<string>(['pub1']); // pub1 is a base event
it("should exclude base events from search results", () => {
const existingEventIds = new Set<string>(["pub1"]); // pub1 is a base event
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should exclude pub1 since it's a base event
expect(result.publications).toHaveLength(1);
expect(result.publications.map(p => p.id)).not.toContain('pub1');
expect(result.publications.map(p => p.id)).toContain('pub2');
expect(result.publications.map((p) => p.id)).not.toContain("pub1");
expect(result.publications.map((p) => p.id)).toContain("pub2");
});
it('should handle multiple tags (OR logic)', () => {
it("should handle multiple tags (OR logic)", () => {
const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[],
['bitcoin', 'ethereum'],
["bitcoin", "ethereum"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should find publications with either bitcoin OR ethereum tags
expect(result.publications).toHaveLength(3);
expect(result.publications.map(p => p.id)).toContain('pub1'); // bitcoin
expect(result.publications.map(p => p.id)).toContain('pub2'); // bitcoin
expect(result.publications.map(p => p.id)).toContain('pub3'); // ethereum
expect(result.publications.map((p) => p.id)).toContain("pub1"); // bitcoin
expect(result.publications.map((p) => p.id)).toContain("pub2"); // bitcoin
expect(result.publications.map((p) => p.id)).toContain("pub3"); // ethereum
});
it('should handle events without tags gracefully', () => {
const eventWithoutTags = new MockNDKEvent('no-tags', 30040, 'author4', 1000, 'No tags');
it("should handle events without tags gracefully", () => {
const eventWithoutTags = new MockNDKEvent(
"no-tags",
30040,
"author4",
1000,
"No tags",
);
const allEventsWithNoTags = [...mockAllEvents, eventWithoutTags];
const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = findTaggedEventsInFetched(
allEventsWithNoTags as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should not include events without tags
expect(result.publications.map(p => p.id)).not.toContain('no-tags');
expect(result.publications.map((p) => p.id)).not.toContain("no-tags");
});
});
describe('fetchProfilesForNewEvents', () => {
it('should extract pubkeys and fetch profiles for new events', async () => {
describe("fetchProfilesForNewEvents", () => {
it("should extract pubkeys and fetch profiles for new events", async () => {
const onProgressUpdate = vi.fn();
const debug = vi.fn();
@ -283,7 +337,7 @@ describe('Tag Expansion Tests', () => { @@ -283,7 +337,7 @@ describe('Tag Expansion Tests', () => {
mockPublications as NDKEvent[],
mockContentEvents as NDKEvent[],
onProgressUpdate,
debug
debug,
);
// Should call progress update with initial state
@ -296,7 +350,7 @@ describe('Tag Expansion Tests', () => { @@ -296,7 +350,7 @@ describe('Tag Expansion Tests', () => {
expect(onProgressUpdate).toHaveBeenCalledWith(null);
});
it('should handle empty event arrays gracefully', async () => {
it("should handle empty event arrays gracefully", async () => {
const onProgressUpdate = vi.fn();
const debug = vi.fn();
@ -304,7 +358,7 @@ describe('Tag Expansion Tests', () => { @@ -304,7 +358,7 @@ describe('Tag Expansion Tests', () => {
[],
[],
onProgressUpdate,
debug
debug,
);
// Should not call progress update for empty arrays
@ -312,27 +366,31 @@ describe('Tag Expansion Tests', () => { @@ -312,27 +366,31 @@ describe('Tag Expansion Tests', () => {
});
});
describe('Tag Expansion Integration', () => {
it('should demonstrate the complete tag expansion flow', async () => {
describe("Tag Expansion Integration", () => {
it("should demonstrate the complete tag expansion flow", async () => {
// This test simulates the complete flow from the visualize page
// Step 1: Mock relay fetch for 'bitcoin' tag
const bitcoinPublications = mockPublications.filter(pub =>
pub.tags.some(tag => tag[0] === 't' && tag[1] === 'bitcoin')
const bitcoinPublications = mockPublications.filter((pub) =>
pub.tags.some((tag) => tag[0] === "t" && tag[1] === "bitcoin")
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(bitcoinPublications as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(
new Set(mockContentEvents as NDKEvent[]),
);
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(bitcoinPublications as NDKEvent[]));
mockNDK.fetchEvents.mockResolvedValueOnce(new Set(mockContentEvents as NDKEvent[]));
const existingEventIds = new Set<string>(['base-event']);
const existingEventIds = new Set<string>(["base-event"]);
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
// Step 2: Fetch from relays
const relayResult = await fetchTaggedEventsFromRelays(
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
expect(relayResult.publications).toHaveLength(2);
@ -341,10 +399,10 @@ describe('Tag Expansion Tests', () => { @@ -341,10 +399,10 @@ describe('Tag Expansion Tests', () => {
// Step 3: Search in fetched events
const searchResult = findTaggedEventsInFetched(
mockAllEvents as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
expect(searchResult.publications).toHaveLength(2);
@ -356,20 +414,27 @@ describe('Tag Expansion Tests', () => { @@ -356,20 +414,27 @@ describe('Tag Expansion Tests', () => {
relayResult.publications,
relayResult.contentEvents,
onProgressUpdate,
debug
debug,
);
expect(onProgressUpdate).toHaveBeenCalledWith(null);
});
});
describe('Edge Cases and Error Handling', () => {
it('should handle malformed a-tags gracefully', () => {
const malformedPublication = new MockNDKEvent('malformed', 30040, 'author1', 1000, 'Malformed', [
['t', 'bitcoin'],
['a', 'invalid-tag-format'], // Missing parts
['a', '30041:author1:chapter-1'] // Valid format
]);
describe("Edge Cases and Error Handling", () => {
it("should handle malformed a-tags gracefully", () => {
const malformedPublication = new MockNDKEvent(
"malformed",
30040,
"author1",
1000,
"Malformed",
[
["t", "bitcoin"],
["a", "invalid-tag-format"], // Missing parts
["a", "30041:author1:chapter-1"], // Valid format
],
);
const allEventsWithMalformed = [...mockAllEvents, malformedPublication];
const existingEventIds = new Set<string>();
@ -378,10 +443,10 @@ describe('Tag Expansion Tests', () => { @@ -378,10 +443,10 @@ describe('Tag Expansion Tests', () => {
const result = findTaggedEventsInFetched(
allEventsWithMalformed as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should still work and include the publication with valid a-tags
@ -389,32 +454,50 @@ describe('Tag Expansion Tests', () => { @@ -389,32 +454,50 @@ describe('Tag Expansion Tests', () => {
expect(result.contentEvents.length).toBeGreaterThan(0);
});
it('should handle events with d-tags containing colons', () => {
const publicationWithColonDTag = new MockNDKEvent('colon-pub', 30040, 'author1', 1000, 'Colon d-tag', [
['t', 'bitcoin'],
['a', '30041:author1:chapter:with:colons']
]);
it("should handle events with d-tags containing colons", () => {
const publicationWithColonDTag = new MockNDKEvent(
"colon-pub",
30040,
"author1",
1000,
"Colon d-tag",
[
["t", "bitcoin"],
["a", "30041:author1:chapter:with:colons"],
],
);
const contentWithColonDTag = new MockNDKEvent('colon-content', 30041, 'author1', 1100, 'Content with colon d-tag', [
['d', 'chapter:with:colons']
]);
const contentWithColonDTag = new MockNDKEvent(
"colon-content",
30041,
"author1",
1100,
"Content with colon d-tag",
[
["d", "chapter:with:colons"],
],
);
const allEventsWithColons = [...mockAllEvents, publicationWithColonDTag, contentWithColonDTag];
const allEventsWithColons = [
...mockAllEvents,
publicationWithColonDTag,
contentWithColonDTag,
];
const existingEventIds = new Set<string>();
const baseEvents: NDKEvent[] = [];
const debug = vi.fn();
const result = findTaggedEventsInFetched(
allEventsWithColons as NDKEvent[],
['bitcoin'],
["bitcoin"],
existingEventIds,
baseEvents,
debug
debug,
);
// Should handle d-tags with colons correctly
expect(result.publications).toHaveLength(3);
expect(result.contentEvents.map(c => c.id)).toContain('colon-content');
expect(result.contentEvents.map((c) => c.id)).toContain("colon-content");
});
});
});
});

8
vite.config.ts

@ -43,18 +43,20 @@ export default defineConfig({ @@ -43,18 +43,20 @@ export default defineConfig({
// Expose the app version as a global variable
"import.meta.env.APP_VERSION": JSON.stringify(getAppVersionString()),
// Enable debug logging for relays when needed
"process.env.DEBUG_RELAYS": JSON.stringify(process.env.DEBUG_RELAYS || "false"),
"process.env.DEBUG_RELAYS": JSON.stringify(
process.env.DEBUG_RELAYS || "false",
),
},
optimizeDeps: {
esbuildOptions: {
define: {
global: 'globalThis',
global: "globalThis",
},
},
},
server: {
fs: {
allow: ['..'],
allow: [".."],
},
hmr: {
overlay: false, // Disable HMR overlay to prevent ESM URL scheme errors

Loading…
Cancel
Save