21 changed files with 602 additions and 59 deletions
@ -0,0 +1,4 @@ |
|||||||
|
{ |
||||||
|
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", |
||||||
|
"python.analysis.extraPaths": ["${workspaceFolder}/src"] |
||||||
|
} |
||||||
@ -0,0 +1,110 @@ |
|||||||
|
"""Offline Markdown → HTML using vendored ``marked`` (QuickJS) with ``nh3`` sanitization.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import html |
||||||
|
import json |
||||||
|
import logging |
||||||
|
import re |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
import nh3 |
||||||
|
|
||||||
|
log = logging.getLogger(__name__) |
||||||
|
|
||||||
|
_MARKED_PATH = Path(__file__).resolve().parents[1] / "ui" / "assets" / "vendor" / "marked.min.js" |
||||||
|
_qjs_ctx = None |
||||||
|
_marked_load_failed = False |
||||||
|
|
||||||
|
|
||||||
|
def _marked_quickjs_ctx(): |
||||||
|
"""Singleton QuickJS context with ``marked`` loaded, or None if unavailable.""" |
||||||
|
global _qjs_ctx, _marked_load_failed |
||||||
|
if _marked_load_failed: |
||||||
|
return None |
||||||
|
if _qjs_ctx is not None: |
||||||
|
return _qjs_ctx |
||||||
|
if not _MARKED_PATH.is_file(): |
||||||
|
log.warning("Vendored marked not found: %s", _MARKED_PATH) |
||||||
|
_marked_load_failed = True |
||||||
|
return None |
||||||
|
try: |
||||||
|
import quickjs |
||||||
|
except ImportError: |
||||||
|
log.info("quickjs-ng not installed; Markdown uses Python fallback renderer") |
||||||
|
_marked_load_failed = True |
||||||
|
return None |
||||||
|
try: |
||||||
|
ctx = quickjs.Context() |
||||||
|
ctx.eval(_MARKED_PATH.read_text(encoding="utf-8")) |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
log.warning("Could not initialize marked in QuickJS: %s", e) |
||||||
|
_marked_load_failed = True |
||||||
|
return None |
||||||
|
_qjs_ctx = ctx |
||||||
|
return _qjs_ctx |
||||||
|
|
||||||
|
|
||||||
|
def _render_marked_js(md: str) -> str | None: |
||||||
|
ctx = _marked_quickjs_ctx() |
||||||
|
if ctx is None: |
||||||
|
return None |
||||||
|
try: |
||||||
|
expr = "marked.parse(" + json.dumps(md or "") + ", {gfm: true, breaks: true})" |
||||||
|
return str(ctx.eval(expr)) |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
log.debug("marked.parse failed: %s", e) |
||||||
|
return None |
||||||
|
|
||||||
|
|
||||||
|
def _render_markdown_fallback(md: str) -> str: |
||||||
|
import markdown |
||||||
|
|
||||||
|
return markdown.markdown( |
||||||
|
md or "", |
||||||
|
extensions=["fenced_code", "tables", "nl2br", "sane_lists"], |
||||||
|
output_format="html", |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def markdown_html_fragment(md: str) -> str: |
||||||
|
"""Sanitized HTML fragment (body inner HTML) for embedding in templates.""" |
||||||
|
raw = _render_marked_js(md) |
||||||
|
if raw is None: |
||||||
|
raw = _render_markdown_fallback(md) |
||||||
|
return nh3.clean(raw) |
||||||
|
|
||||||
|
|
||||||
|
def markdown_plain_summary(md: str, *, max_len: int = 100) -> str: |
||||||
|
""" |
||||||
|
Plain-text one-line preview for list widgets: same pipeline as ``markdown_html_fragment``, |
||||||
|
then strip tags and collapse whitespace (no Markdown noise in the UI chrome). |
||||||
|
""" |
||||||
|
# Cap source length so list views (search, notifications, threads) do not parse huge notes. |
||||||
|
src = (md or "")[:1200] |
||||||
|
frag = markdown_html_fragment(src) |
||||||
|
plain = html.unescape(re.sub(r"<[^>]+>", " ", frag)) |
||||||
|
plain = re.sub(r"\s+", " ", plain).strip() |
||||||
|
if len(plain) <= max_len: |
||||||
|
return plain |
||||||
|
return plain[: max_len - 1] + "…" |
||||||
|
|
||||||
|
|
||||||
|
_PREVIEW_CSS = """<style> |
||||||
|
body{font-family:system-ui,-apple-system,"Segoe UI",Roboto,sans-serif;font-size:15px;margin:0;padding:12px;line-height:1.45;color:#1a1a1a;} |
||||||
|
pre,code{font-family:ui-monospace,"Cascadia Code","Consolas",monospace;font-size:13px;} |
||||||
|
pre{background:#f4f4f4;padding:10px;border-radius:6px;overflow-x:auto;} |
||||||
|
blockquote{border-left:3px solid #bbb;margin:8px 0;padding:4px 0 4px 12px;color:#444;} |
||||||
|
table{border-collapse:collapse;margin:8px 0;width:100%;} |
||||||
|
th,td{border:1px solid #ccc;padding:6px;} |
||||||
|
img{max-width:100%;height:auto;} |
||||||
|
</style>""" |
||||||
|
|
||||||
|
|
||||||
|
def markdown_html_document(md: str) -> str: |
||||||
|
"""Full HTML document for ``QTextBrowser`` preview panes.""" |
||||||
|
inner = markdown_html_fragment(md) |
||||||
|
return ( |
||||||
|
"<!DOCTYPE html><html><head><meta charset=\"utf-8\">" |
||||||
|
f"{_PREVIEW_CSS}</head><body>{inner}</body></html>" |
||||||
|
) |
||||||
@ -0,0 +1,125 @@ |
|||||||
|
"""NIP-65 kind 10002 relay lists: parse tags and resolve read/write URLs for an account.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from dataclasses import dataclass |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.relay_policy import DEFAULT_READ_RELAYS, DEFAULT_WRITE_RELAYS |
||||||
|
|
||||||
|
_MAX_RELAYS_PER_BUCKET = 24 |
||||||
|
|
||||||
|
|
||||||
|
def _is_ws_relay_url(url: str) -> bool: |
||||||
|
u = url.strip() |
||||||
|
return u.startswith("wss://") or u.startswith("ws://") |
||||||
|
|
||||||
|
|
||||||
|
def _dedupe_preserve(urls: list[str]) -> list[str]: |
||||||
|
seen: set[str] = set() |
||||||
|
out: list[str] = [] |
||||||
|
for u in urls: |
||||||
|
if u not in seen: |
||||||
|
seen.add(u) |
||||||
|
out.append(u) |
||||||
|
return out |
||||||
|
|
||||||
|
|
||||||
|
def parse_kind10002_tags(tags: Any) -> tuple[list[str], list[str]]: |
||||||
|
""" |
||||||
|
Parse NIP-65 `r` tags into (read_urls, write_urls). |
||||||
|
- ``["r", url]`` or unknown third value → both read and write. |
||||||
|
- ``["r", url, "read"|"write"]`` → that direction only. |
||||||
|
- Legacy imwald/jumble-style pairs: ``["r", url], ["read","true"]`` / ``["write","true"]``. |
||||||
|
""" |
||||||
|
if not isinstance(tags, list): |
||||||
|
return [], [] |
||||||
|
|
||||||
|
read: list[str] = [] |
||||||
|
write: list[str] = [] |
||||||
|
i = 0 |
||||||
|
while i < len(tags): |
||||||
|
t = tags[i] |
||||||
|
i += 1 |
||||||
|
if not t or not isinstance(t, list) or len(t) < 2: |
||||||
|
continue |
||||||
|
if str(t[0]) != "r": |
||||||
|
continue |
||||||
|
url = str(t[1]).strip() |
||||||
|
if not _is_ws_relay_url(url): |
||||||
|
continue |
||||||
|
|
||||||
|
mode = "both" |
||||||
|
if len(t) >= 3: |
||||||
|
m = str(t[2]).lower() |
||||||
|
if m == "read": |
||||||
|
mode = "read" |
||||||
|
elif m == "write": |
||||||
|
mode = "write" |
||||||
|
else: |
||||||
|
if i < len(tags): |
||||||
|
nxt = tags[i] |
||||||
|
if nxt and isinstance(nxt, list) and len(nxt) >= 2: |
||||||
|
name = str(nxt[0]).lower() |
||||||
|
val = str(nxt[1]).lower() |
||||||
|
if name == "read" and val in ("true", "1", "yes"): |
||||||
|
mode = "read" |
||||||
|
i += 1 |
||||||
|
elif name == "write" and val in ("true", "1", "yes"): |
||||||
|
mode = "write" |
||||||
|
i += 1 |
||||||
|
|
||||||
|
if mode in ("both", "read"): |
||||||
|
read.append(url) |
||||||
|
if mode in ("both", "write"): |
||||||
|
write.append(url) |
||||||
|
|
||||||
|
return _dedupe_preserve(read[:_MAX_RELAYS_PER_BUCKET]), _dedupe_preserve(write[:_MAX_RELAYS_PER_BUCKET]) |
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True) |
||||||
|
class ResolvedRelays: |
||||||
|
"""Read/write websocket URLs after NIP-65 + fallbacks for empty halves.""" |
||||||
|
|
||||||
|
read_urls: list[str] |
||||||
|
write_urls: list[str] |
||||||
|
had_kind10002: bool |
||||||
|
|
||||||
|
|
||||||
|
def resolve_for_account(db: Database, pubkey: str | None) -> ResolvedRelays: |
||||||
|
""" |
||||||
|
Load the latest kind 10002 for ``pubkey``. If missing or no usable ``r`` tags, |
||||||
|
use default read and write relays. If the list exists but one side is empty, |
||||||
|
fill that side from defaults. |
||||||
|
""" |
||||||
|
if not pubkey or len(pubkey.strip()) != 64: |
||||||
|
return ResolvedRelays( |
||||||
|
read_urls=list(DEFAULT_READ_RELAYS), |
||||||
|
write_urls=list(DEFAULT_WRITE_RELAYS), |
||||||
|
had_kind10002=False, |
||||||
|
) |
||||||
|
|
||||||
|
ev = db.get_latest_kind10002_event(pubkey.strip().lower()) |
||||||
|
if not ev: |
||||||
|
return ResolvedRelays( |
||||||
|
read_urls=list(DEFAULT_READ_RELAYS), |
||||||
|
write_urls=list(DEFAULT_WRITE_RELAYS), |
||||||
|
had_kind10002=False, |
||||||
|
) |
||||||
|
|
||||||
|
parsed_read, parsed_write = parse_kind10002_tags(ev.get("tags") or []) |
||||||
|
if not parsed_read and not parsed_write: |
||||||
|
return ResolvedRelays( |
||||||
|
read_urls=list(DEFAULT_READ_RELAYS), |
||||||
|
write_urls=list(DEFAULT_WRITE_RELAYS), |
||||||
|
had_kind10002=True, |
||||||
|
) |
||||||
|
|
||||||
|
read_urls = parsed_read or list(DEFAULT_READ_RELAYS) |
||||||
|
write_urls = parsed_write or list(DEFAULT_WRITE_RELAYS) |
||||||
|
return ResolvedRelays( |
||||||
|
read_urls=_dedupe_preserve(read_urls), |
||||||
|
write_urls=_dedupe_preserve(write_urls), |
||||||
|
had_kind10002=True, |
||||||
|
) |
||||||
File diff suppressed because one or more lines are too long
@ -0,0 +1,57 @@ |
|||||||
|
"""Split Markdown composer: monospace source + offline rendered preview (marked via QuickJS).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt, QTimer |
||||||
|
from PySide6.QtGui import QFont |
||||||
|
from PySide6.QtWidgets import QPlainTextEdit, QSizePolicy, QSplitter, QTextBrowser, QVBoxLayout, QWidget |
||||||
|
|
||||||
|
from imwald.core.md_render import markdown_html_document |
||||||
|
|
||||||
|
|
||||||
|
class MarkdownBodyEditor(QWidget): |
||||||
|
"""Plain-text Markdown editor with live rendered preview (local ``marked`` + nh3).""" |
||||||
|
|
||||||
|
def __init__(self, parent: QWidget | None = None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self._split = QSplitter(Qt.Orientation.Horizontal) |
||||||
|
self._source = QPlainTextEdit() |
||||||
|
self._source.setPlaceholderText("Markdown source — preview updates as you type") |
||||||
|
mono = QFont("monospace") |
||||||
|
if not mono.exactMatch(): |
||||||
|
mono = QFont("Courier New") |
||||||
|
self._source.setFont(mono) |
||||||
|
self._source.setMinimumHeight(260) |
||||||
|
self._source.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding) |
||||||
|
|
||||||
|
self._preview = QTextBrowser() |
||||||
|
self._preview.setOpenExternalLinks(True) |
||||||
|
self._preview.setMinimumHeight(260) |
||||||
|
self._preview.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding) |
||||||
|
|
||||||
|
self._split.addWidget(self._source) |
||||||
|
self._split.addWidget(self._preview) |
||||||
|
self._split.setStretchFactor(0, 1) |
||||||
|
self._split.setStretchFactor(1, 1) |
||||||
|
|
||||||
|
lay = QVBoxLayout(self) |
||||||
|
lay.setContentsMargins(0, 0, 0, 0) |
||||||
|
lay.addWidget(self._split) |
||||||
|
|
||||||
|
self._debounce = QTimer(self) |
||||||
|
self._debounce.setSingleShot(True) |
||||||
|
self._debounce.setInterval(280) |
||||||
|
self._debounce.timeout.connect(self._update_preview) |
||||||
|
self._source.textChanged.connect(lambda: self._debounce.start()) |
||||||
|
|
||||||
|
self._update_preview() |
||||||
|
|
||||||
|
def _update_preview(self) -> None: |
||||||
|
self._preview.setHtml(markdown_html_document(self._source.toPlainText())) |
||||||
|
|
||||||
|
def setPlainText(self, text: str) -> None: |
||||||
|
self._source.setPlainText(text) |
||||||
|
self._debounce.start() |
||||||
|
|
||||||
|
def toPlainText(self) -> str: |
||||||
|
return self._source.toPlainText() |
||||||
@ -0,0 +1,25 @@ |
|||||||
|
from imwald.core.nostr_crypto import build_signed_event, verify_nostr_event |
||||||
|
|
||||||
|
|
||||||
|
def _sk() -> bytes: |
||||||
|
return bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683") |
||||||
|
|
||||||
|
|
||||||
|
def test_verify_accepts_signed_roundtrip() -> None: |
||||||
|
sk = _sk() |
||||||
|
ev = build_signed_event(sk, created_at=1700000000, kind=1, tags=[["client", "imwald"]], content="hello") |
||||||
|
assert verify_nostr_event(ev) is True |
||||||
|
|
||||||
|
|
||||||
|
def test_verify_rejects_bad_id() -> None: |
||||||
|
sk = _sk() |
||||||
|
ev = build_signed_event(sk, created_at=1700000000, kind=1, tags=[], content="x") |
||||||
|
ev["id"] = "f" * 64 |
||||||
|
assert verify_nostr_event(ev) is False |
||||||
|
|
||||||
|
|
||||||
|
def test_verify_rejects_tampered_content() -> None: |
||||||
|
sk = _sk() |
||||||
|
ev = build_signed_event(sk, created_at=1700000000, kind=1, tags=[], content="x") |
||||||
|
ev["content"] = "y" |
||||||
|
assert verify_nostr_event(ev) is False |
||||||
@ -0,0 +1,17 @@ |
|||||||
|
from imwald.core.md_render import markdown_html_fragment, markdown_plain_summary |
||||||
|
|
||||||
|
|
||||||
|
def test_plain_summary_strips_markdown_noise() -> None: |
||||||
|
s = markdown_plain_summary("# Title\n\nHello **world**", max_len=80) |
||||||
|
assert "Title" in s and "world" in s |
||||||
|
assert "**" not in s and "#" not in s |
||||||
|
|
||||||
|
|
||||||
|
def test_markdown_renders_strong() -> None: |
||||||
|
html = markdown_html_fragment("Hello **world**") |
||||||
|
assert "<strong>world</strong>" in html or "<b>world</b>" in html |
||||||
|
|
||||||
|
|
||||||
|
def test_markdown_fenced_code() -> None: |
||||||
|
html = markdown_html_fragment("```\n1 + 1\n```") |
||||||
|
assert "<pre>" in html and "<code>" in html |
||||||
@ -0,0 +1,57 @@ |
|||||||
|
import tempfile |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_crypto import build_signed_event |
||||||
|
from imwald.core.relay_list import parse_kind10002_tags, resolve_for_account |
||||||
|
from imwald.core.relay_policy import DEFAULT_READ_RELAYS, DEFAULT_WRITE_RELAYS |
||||||
|
|
||||||
|
|
||||||
|
def _sk() -> bytes: |
||||||
|
return bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683") |
||||||
|
|
||||||
|
|
||||||
|
def test_parse_nip65_triple_tags() -> None: |
||||||
|
tags = [ |
||||||
|
["r", "wss://read.only", "read"], |
||||||
|
["r", "wss://write.only", "write"], |
||||||
|
["r", "wss://both.relay"], |
||||||
|
] |
||||||
|
reads, writes = parse_kind10002_tags(tags) |
||||||
|
assert "wss://read.only" in reads and "wss://read.only" not in writes |
||||||
|
assert "wss://write.only" in writes and "wss://write.only" not in reads |
||||||
|
assert "wss://both.relay" in reads and "wss://both.relay" in writes |
||||||
|
|
||||||
|
|
||||||
|
def test_parse_legacy_read_write_pairs() -> None: |
||||||
|
tags = [["r", "wss://a"], ["read", "true"], ["r", "wss://b"], ["write", "true"]] |
||||||
|
reads, writes = parse_kind10002_tags(tags) |
||||||
|
assert reads == ["wss://a"] |
||||||
|
assert writes == ["wss://b"] |
||||||
|
|
||||||
|
|
||||||
|
def test_resolve_defaults_without_kind10002() -> None: |
||||||
|
with tempfile.TemporaryDirectory() as td: |
||||||
|
db = Database(Path(td) / "x.sqlite") |
||||||
|
db.connect() |
||||||
|
sk = _sk() |
||||||
|
me = build_signed_event(sk, created_at=1, kind=0, tags=[], content="{}")["pubkey"] |
||||||
|
r = resolve_for_account(db, me) |
||||||
|
assert r.read_urls == list(DEFAULT_READ_RELAYS) |
||||||
|
assert r.write_urls == list(DEFAULT_WRITE_RELAYS) |
||||||
|
assert r.had_kind10002 is False |
||||||
|
|
||||||
|
|
||||||
|
def test_resolve_uses_stored_kind10002() -> None: |
||||||
|
with tempfile.TemporaryDirectory() as td: |
||||||
|
db = Database(Path(td) / "y.sqlite") |
||||||
|
db.connect() |
||||||
|
sk = _sk() |
||||||
|
me = build_signed_event(sk, created_at=1, kind=0, tags=[], content="{}")["pubkey"] |
||||||
|
tags = [["r", "wss://custom.read", "read"], ["r", "wss://custom.write", "write"]] |
||||||
|
ev = build_signed_event(sk, created_at=2, kind=10002, tags=tags, content="") |
||||||
|
db.upsert_event(ev) |
||||||
|
r = resolve_for_account(db, me) |
||||||
|
assert r.read_urls == ["wss://custom.read"] |
||||||
|
assert r.write_urls == ["wss://custom.write"] |
||||||
|
assert r.had_kind10002 is True |
||||||
Loading…
Reference in new issue