Browse Source

bug-fixes

master
Silberengel 2 weeks ago
parent
commit
51a1f826ca
  1. 160
      src/imwald/core/database.py
  2. 76
      src/imwald/core/nip11_relay_info.py
  3. 135
      src/imwald/core/nostr_engine.py
  4. 3
      src/imwald/core/relay_manager.py
  5. 65
      src/imwald/ui/feed_page.py
  6. 36
      src/imwald/ui/main_window.py
  7. 320
      src/imwald/ui/relay_status_panel.py
  8. 45
      tests/test_author_meta_db.py
  9. 68
      tests/test_thread_links.py

160
src/imwald/core/database.py

@ -7,7 +7,7 @@ import sqlite3
import time import time
from contextlib import contextmanager from contextlib import contextmanager
from pathlib import Path from pathlib import Path
from collections.abc import Generator, Iterable from collections.abc import Generator, Iterable, Sequence
from typing import Any, TypedDict, cast from typing import Any, TypedDict, cast
from imwald.core.nip30_emoji import nip30_emoji_urls_from_tags, parse_kind30030_a_coordinate from imwald.core.nip30_emoji import nip30_emoji_urls_from_tags, parse_kind30030_a_coordinate
@ -31,8 +31,73 @@ class StoredEventRow(TypedDict):
deleted: bool deleted: bool
source_relay: str | None source_relay: str | None
# Kind-1 text notes plus thread kinds that tag the root via ``e`` (show in feed thread column). # Thread panel + "reply count": kinds Jumble treats as thread/backlink-shaped when linked via
THREAD_REPLY_KINDS: tuple[int, ...] = (1, 16, 1111, 1244) # ``e``/``E``/``a``/``A``/``q`` to the root (hex id and, for addressable roots, ``kind:pubkey:d``).
THREAD_LINK_TAG_NAMES: tuple[str, ...] = ("e", "E", "a", "A", "q")
THREAD_REPLY_KINDS: tuple[int, ...] = (
1, # short note
6, # nip-18 kind-1 repost
8, # badge award (backlink)
11, # forum thread
16, # generic repost
1111, # nip-22 comment
1244, # voice comment
1984, # report
1985, # label
9802, # nip-84 highlight
10001, # pin list
10003, # bookmark list
30001, # generic lists
30003, # bookmark sets
30004, # curation sets
30023, # long-form
30041, # publication content
30817, # wiki markdown
30818, # wiki article
9735, # zap receipt
)
def _is_addressable_kind(kind: int) -> bool:
return 30000 <= kind < 40000
def thread_root_link_targets(root: dict[str, Any]) -> list[str]:
"""
Values that may appear on ``e``/``E``/``a``/``A``/``q`` tags pointing at this root.
Always includes the event hex id; for addressable roots (NIP-33) also ``kind:pubkey:d``.
"""
eid = str(root.get("id") or "").strip().lower()
out: list[str] = []
if len(eid) == 64 and all(c in "0123456789abcdef" for c in eid):
out.append(eid)
try:
kind = int(root.get("kind") or -1)
except (TypeError, ValueError):
kind = -1
pk = str(root.get("pubkey") or "").strip().lower()
tags_raw = root.get("tags")
if _is_addressable_kind(kind) and len(pk) == 64 and all(c in "0123456789abcdef" for c in pk):
if isinstance(tags_raw, list):
for t_obj in cast(list[object], tags_raw):
if not isinstance(t_obj, list):
continue
tr = cast(list[object], t_obj)
if len(tr) < 2:
continue
if str(tr[0]).lower() == "d":
d = str(tr[1] or "")
if d:
out.append(f"{kind}:{pk}:{d}")
break
seen: set[str] = set()
uniq: list[str] = []
for x in out:
if x in seen:
continue
seen.add(x)
uniq.append(x)
return uniq
DDL = """ DDL = """
PRAGMA journal_mode=WAL; PRAGMA journal_mode=WAL;
@ -503,19 +568,29 @@ class Database:
out.add(pk) out.add(pk)
return out return out
def list_replies_to(self, event_id: str, limit: int = 80) -> list[dict[str, Any]]: def list_replies_to(self, root_event: dict[str, Any], limit: int = 80) -> list[dict[str, Any]]:
"""Notes whose ``kind`` is in ``THREAD_REPLY_KINDS`` and tag this event (``e``); excludes reactions (7), etc.""" """Events in ``THREAD_REPLY_KINDS`` that link the root via ``e``/``E``/``a``/``A``/``q`` (Jumble-style)."""
targets = thread_root_link_targets(root_event)
if not targets:
return []
kind_ph = ",".join("?" * len(THREAD_REPLY_KINDS)) kind_ph = ",".join("?" * len(THREAD_REPLY_KINDS))
tag_ph = ",".join("?" * len(THREAD_LINK_TAG_NAMES))
tgt_ph = ",".join("?" * len(targets))
cur = self.conn().execute( cur = self.conn().execute(
f""" f"""
SELECT e.id, e.pubkey, e.created_at, e.kind, e.content, e.sig, e.tags_json SELECT e.id, e.pubkey, e.created_at, e.kind, e.content, e.sig, e.tags_json
FROM events e FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ?
WHERE e.deleted = 0 AND e.kind IN ({kind_ph}) WHERE e.deleted = 0 AND e.kind IN ({kind_ph})
ORDER BY e.created_at ASC AND EXISTS (
SELECT 1 FROM tags t
WHERE t.event_id = e.id
AND t.name IN ({tag_ph})
AND lower(t.value) IN ({tgt_ph})
)
ORDER BY e.created_at ASC, e.id ASC
LIMIT ? LIMIT ?
""", """,
(event_id, *THREAD_REPLY_KINDS, limit), (*THREAD_REPLY_KINDS, *THREAD_LINK_TAG_NAMES, *targets, limit),
) )
return [ return [
{ {
@ -635,6 +710,37 @@ class Database:
out[pk] = {"content": row["content"] or "", "created_at": int(row["created_at"])} out[pk] = {"content": row["content"] or "", "created_at": int(row["created_at"])}
return out return out
def max_created_at_for_author_kinds(self, pubkey: str, kinds: Sequence[int]) -> int:
"""Largest ``created_at`` we store for this author among ``kinds`` (for relay ``since``)."""
if not kinds:
return 0
pk = pubkey.strip().lower()
if len(pk) != 64 or any(c not in "0123456789abcdef" for c in pk):
return 0
placeholders = ",".join("?" * len(kinds))
row = self.conn().execute(
f"""
SELECT COALESCE(MAX(created_at), 0) AS m FROM events
WHERE deleted = 0 AND lower(pubkey) = lower(?) AND kind IN ({placeholders})
""",
(pk, *kinds),
).fetchone()
return int(row["m"]) if row and row["m"] is not None else 0
def distinct_pubkeys_recent(self, limit: int = 400) -> list[str]:
"""Distinct author pubkeys, most recently active first (for metadata backfill)."""
cur = self.conn().execute(
"""
SELECT lower(pubkey) AS pk FROM events
WHERE deleted = 0
GROUP BY lower(pubkey)
ORDER BY MAX(created_at) DESC
LIMIT ?
""",
(limit,),
)
return [str(r["pk"]) for r in cur]
def _nip30_d_tag_from_tags_json(self, tags_json: str) -> str: def _nip30_d_tag_from_tags_json(self, tags_json: str) -> str:
try: try:
raw = json.loads(tags_json or "[]") raw = json.loads(tags_json or "[]")
@ -743,13 +849,15 @@ class Database:
merge(cast(list[list[str]], raw)) merge(cast(list[list[str]], raw))
return merged return merged
def event_engagement_stats(self, event_id: str) -> dict[str, Any]: def event_engagement_stats(self, root_event: dict[str, Any]) -> dict[str, Any]:
"""Counts from local DB: zaps (9735), reactions (7), boosts (6), quotes (``q`` on kind 1).""" """Counts from local DB: zaps (9735), reactions (7), boosts (6), quotes (``q`` on kind 1)."""
event_id = str(root_event.get("id") or "")
link_targets = thread_root_link_targets(root_event)
c = self.conn() c = self.conn()
zaps = c.execute( zaps = c.execute(
""" """
SELECT COUNT(*) AS n FROM events e SELECT COUNT(*) AS n FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ? JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND lower(t.value) = lower(?)
WHERE e.deleted = 0 AND e.kind = 9735 WHERE e.deleted = 0 AND e.kind = 9735
""", """,
(event_id,), (event_id,),
@ -757,23 +865,27 @@ class Database:
boosts = c.execute( boosts = c.execute(
""" """
SELECT COUNT(*) AS n FROM events e SELECT COUNT(*) AS n FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ? JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND lower(t.value) = lower(?)
WHERE e.deleted = 0 AND e.kind = 6 WHERE e.deleted = 0 AND e.kind = 6
""", """,
(event_id,), (event_id,),
).fetchone()["n"] ).fetchone()["n"]
if not link_targets:
quotes = 0
else:
qp = ",".join("?" * len(link_targets))
quotes = c.execute( quotes = c.execute(
""" f"""
SELECT COUNT(*) AS n FROM events e SELECT COUNT(*) AS n FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'q' AND t.value = ? JOIN tags t ON t.event_id = e.id AND t.name = 'q'
WHERE e.deleted = 0 AND e.kind = 1 WHERE e.deleted = 0 AND e.kind = 1 AND lower(t.value) IN ({qp})
""", """,
(event_id,), link_targets,
).fetchone()["n"] ).fetchone()["n"]
reactions_total = c.execute( reactions_total = c.execute(
""" """
SELECT COUNT(*) AS n FROM events e SELECT COUNT(*) AS n FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ? JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND lower(t.value) = lower(?)
WHERE e.deleted = 0 AND e.kind = 7 WHERE e.deleted = 0 AND e.kind = 7
""", """,
(event_id,), (event_id,),
@ -782,7 +894,7 @@ class Database:
""" """
SELECT COALESCE(NULLIF(TRIM(e.content), ''), '+') AS emoji, COUNT(*) AS c SELECT COALESCE(NULLIF(TRIM(e.content), ''), '+') AS emoji, COUNT(*) AS c
FROM events e FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ? JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND lower(t.value) = lower(?)
WHERE e.deleted = 0 AND e.kind = 7 WHERE e.deleted = 0 AND e.kind = 7
GROUP BY 1 GROUP BY 1
ORDER BY c DESC, emoji ASC ORDER BY c DESC, emoji ASC
@ -793,14 +905,24 @@ class Database:
reaction_breakdown: list[tuple[str, int]] = [ reaction_breakdown: list[tuple[str, int]] = [
(str(row["emoji"]), int(row["c"])) for row in cur_rx (str(row["emoji"]), int(row["c"])) for row in cur_rx
] ]
if not link_targets:
replies = 0
else:
kind_ph = ",".join("?" * len(THREAD_REPLY_KINDS)) kind_ph = ",".join("?" * len(THREAD_REPLY_KINDS))
tag_ph = ",".join("?" * len(THREAD_LINK_TAG_NAMES))
tgt_ph2 = ",".join("?" * len(link_targets))
replies = c.execute( replies = c.execute(
f""" f"""
SELECT COUNT(*) AS n FROM events e SELECT COUNT(*) AS n FROM events e
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ?
WHERE e.deleted = 0 AND e.kind IN ({kind_ph}) WHERE e.deleted = 0 AND e.kind IN ({kind_ph})
AND EXISTS (
SELECT 1 FROM tags t
WHERE t.event_id = e.id
AND t.name IN ({tag_ph})
AND lower(t.value) IN ({tgt_ph2})
)
""", """,
(event_id, *THREAD_REPLY_KINDS), (*THREAD_REPLY_KINDS, *THREAD_LINK_TAG_NAMES, *link_targets),
).fetchone()["n"] ).fetchone()["n"]
return { return {
"zaps": int(zaps), "zaps": int(zaps),

76
src/imwald/core/nip11_relay_info.py

@ -0,0 +1,76 @@
"""NIP-11 relay information document (HTTP JSON) for display names and icons."""
from __future__ import annotations
import json
import logging
import re
from typing import Any, cast
from urllib.error import HTTPError, URLError
from urllib.parse import urljoin
from urllib.request import Request, urlopen
log = logging.getLogger(__name__)
def ws_to_http_base(ws_url: str) -> str:
"""``wss://host/path`` → ``https://host/path`` (NIP-11 is HTTP on the same host)."""
u = ws_url.strip()
if u.lower().startswith("ws://"):
return "http://" + u[5:]
if u.lower().startswith("wss://"):
return "https://" + u[6:]
if u.lower().startswith("https://"):
return u
if u.lower().startswith("http://"):
return u
return "https://" + u
def fetch_nip11(ws_url: str, *, timeout: float = 10.0) -> dict[str, Any] | None:
"""
GET NIP-11 JSON from the relay's HTTP URL with ``Accept: application/nostr+json``.
Returns parsed object or ``None`` on failure.
"""
base = ws_to_http_base(ws_url).rstrip("/") + "/"
req = Request(
base,
headers={"Accept": "application/nostr+json, application/json"},
method="GET",
)
try:
with urlopen(req, timeout=timeout) as resp: # noqa: S310 — intentional relay fetch
raw = resp.read()
except (HTTPError, URLError, TimeoutError, OSError) as e:
log.info("NIP-11 HTTP fetch failed for %s: %s", ws_url, e)
return None
try:
data = json.loads(raw.decode("utf-8"))
except (UnicodeDecodeError, json.JSONDecodeError) as e:
log.info("NIP-11 invalid JSON for %s: %s", ws_url, e)
return None
if not isinstance(data, dict):
return None
return cast(dict[str, Any], data)
def relay_display_name(nip11: dict[str, Any] | None, ws_url: str) -> str:
if nip11:
name = nip11.get("name")
if isinstance(name, str) and name.strip():
return name.strip()
host = re.sub(r"^wss?://", "", ws_url.strip(), flags=re.I).split("/")[0]
return host or ws_url
def absolute_icon_url(ws_url: str, icon_field: str | None) -> str | None:
"""Resolve NIP-11 ``icon`` (often relative) against the relay HTTP origin."""
if not icon_field:
return None
icon = icon_field.strip()
if not icon:
return None
base = ws_to_http_base(ws_url).rstrip("/") + "/"
if icon.startswith(("http://", "https://")):
return icon
return urljoin(base, icon.lstrip("/"))

135
src/imwald/core/nostr_engine.py

@ -3,16 +3,18 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import contextlib
import json import json
import logging import logging
import threading import threading
import time import time
from collections.abc import Iterable
from typing import Any, cast from typing import Any, cast
from PySide6.QtCore import QObject, Signal from PySide6.QtCore import QObject, Signal
from imwald.core.accounts_store import StoredAccount, unlock_secret from imwald.core.accounts_store import StoredAccount, unlock_secret
from imwald.core.database import Database from imwald.core.database import THREAD_REPLY_KINDS, Database
from imwald.core.nostr_crypto import build_signed_event, verify_nostr_event from imwald.core.nostr_crypto import build_signed_event, verify_nostr_event
from imwald.core.relay_list import resolve_for_account from imwald.core.relay_list import resolve_for_account
from imwald.core.nostr_publish import publish_to_relays_sync from imwald.core.nostr_publish import publish_to_relays_sync
@ -30,12 +32,17 @@ from imwald.core.relay_policy import (
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
# Per-author backfill: profile + lists + NIP-30 inventory (Jumble-style).
AUTHOR_METADATA_KINDS: tuple[int, ...] = (0, 10015, 30000, 10030, 30030)
_AUTHOR_META_SUB_ID = "imwald-ameta"
class NostrEngine(QObject): class NostrEngine(QObject):
"""Relay worker thread emits `event_ingested` — connect on UI thread to write SQLite.""" """Relay worker thread emits `event_ingested` — connect on UI thread to write SQLite."""
event_ingested = Signal(str, object) event_ingested = Signal(str, object)
relay_status = Signal(str) relay_status = Signal(str)
relay_snapshot = Signal(object)
def __init__(self, db: Database) -> None: def __init__(self, db: Database) -> None:
super().__init__() super().__init__()
@ -45,6 +52,9 @@ class NostrEngine(QObject):
self._loop: asyncio.AbstractEventLoop | None = None self._loop: asyncio.AbstractEventLoop | None = None
self._manager: RelayManager | None = None self._manager: RelayManager | None = None
self._app_stop: asyncio.Event | None = None self._app_stop: asyncio.Event | None = None
self._read_urls_snapshot: list[str] = []
self._author_meta_pending: set[str] = set()
self._author_meta_timer: asyncio.TimerHandle | None = None
def start_relays( def start_relays(
self, self,
@ -69,10 +79,16 @@ class NostrEngine(QObject):
async def amain() -> None: async def amain() -> None:
app_stop = asyncio.Event() app_stop = asyncio.Event()
self._app_stop = app_stop self._app_stop = app_stop
self._read_urls_snapshot = list(urls)
mgr = RelayManager(on_event=on_ev, on_notice=on_notice) mgr = RelayManager(on_event=on_ev, on_notice=on_notice)
self._manager = mgr self._manager = mgr
# Kind 0 metadata, 6 reposts, 7 reactions, 9735 zap receipts — for feed UI + engagement counts. # Feed ingest kinds + thread/backlink kinds (see ``THREAD_REPLY_KINDS`` in ``database``).
kinds_main = [0, 1, 6, 7, 16, 1111, 1244, 9735, 20, 21, 30023, 9802, 11] kinds_main = sorted(
set(
[0, 1, 6, 7, 20, 21, 30023, 9735, 11]
+ list(THREAD_REPLY_KINDS)
)
)
for u in urls: for u in urls:
mgr.register(u) mgr.register(u)
kinds = list(WISP_TRENDING_FEED_KINDS) if is_wisp_trending_relay_url(u) else kinds_main kinds = list(WISP_TRENDING_FEED_KINDS) if is_wisp_trending_relay_url(u) else kinds_main
@ -86,7 +102,7 @@ class NostrEngine(QObject):
mgr.request_subscribe( mgr.request_subscribe(
AGGR_THREAD_RELAY, AGGR_THREAD_RELAY,
"imwald-aggr", "imwald-aggr",
[{"kinds": [1, 16, 1111, 1244], "limit": 120}], [{"kinds": sorted(set([1, 11, 16, 1111, 1244] + list(THREAD_REPLY_KINDS))), "limit": 160}],
) )
if len(k3000_owner) == 64 and all(c in "0123456789abcdef" for c in k3000_owner): if len(k3000_owner) == 64 and all(c in "0123456789abcdef" for c in k3000_owner):
for u in urls: for u in urls:
@ -96,9 +112,38 @@ class NostrEngine(QObject):
sid, sid,
[{"kinds": [30000], "authors": [k3000_owner], "limit": 150}], [{"kinds": [30000], "authors": [k3000_owner], "limit": 150}],
) )
log.info(
"relay worker: %d relay(s) registered, calling connect_all()",
len(mgr.all_relays()),
)
await mgr.connect_all() await mgr.connect_all()
log.info("relay worker: connect_all() returned (per-relay runners active)")
async def relay_pulse() -> None:
while not app_stop.is_set():
snap = [
{"url": r.url, "state": r.state.value, "error": r.last_error}
for r in mgr.all_relays()
]
self.relay_snapshot.emit(snap)
try:
await asyncio.wait_for(app_stop.wait(), timeout=1.1)
except asyncio.TimeoutError:
continue
pulse_task = asyncio.create_task(relay_pulse())
try:
await app_stop.wait() await app_stop.wait()
finally:
pulse_task.cancel()
with contextlib.suppress(asyncio.CancelledError):
await pulse_task
await mgr.stop() await mgr.stop()
self._read_urls_snapshot = []
self._author_meta_pending.clear()
if self._author_meta_timer is not None:
self._author_meta_timer.cancel()
self._author_meta_timer = None
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
self._loop = loop self._loop = loop
@ -113,14 +158,92 @@ class NostrEngine(QObject):
self._thread = threading.Thread(target=runner, name="nostr-relay", daemon=True) self._thread = threading.Thread(target=runner, name="nostr-relay", daemon=True)
self._thread.start() self._thread.start()
log.info("relay worker thread started (%d read URL(s))", len(urls))
def stop_relays(self) -> None: def stop_relays(self) -> None:
if self._loop and self._app_stop and self._loop.is_running(): loop = self._loop
self._loop.call_soon_threadsafe(self._app_stop.set) stop_ev = self._app_stop
if loop and loop.is_running() and stop_ev:
def _stop() -> None:
if self._author_meta_timer is not None:
self._author_meta_timer.cancel()
self._author_meta_timer = None
self._author_meta_pending.clear()
stop_ev.set()
loop.call_soon_threadsafe(_stop)
if self._thread: if self._thread:
self._thread.join(timeout=5.0) self._thread.join(timeout=5.0)
self._thread = None self._thread = None
def enqueue_author_metadata(self, pubkey: str) -> None:
"""Queue relay REQ for this author's profile + list + emoji metadata (debounced, coalesced)."""
pk = pubkey.strip().lower()
if len(pk) != 64 or any(c not in "0123456789abcdef" for c in pk):
return
loop = self._loop
if not loop or not loop.is_running():
return
loop.call_soon_threadsafe(self._author_meta_bump, pk)
def enqueue_author_metadata_many(self, pubkeys: Iterable[str]) -> None:
for p in pubkeys:
self.enqueue_author_metadata(p)
def _author_meta_bump(self, pk: str) -> None:
"""Must run on the asyncio relay loop."""
loop = self._loop
if not loop:
return
self._author_meta_pending.add(pk)
if self._author_meta_timer is not None:
self._author_meta_timer.cancel()
self._author_meta_timer = loop.call_later(0.85, self._author_meta_flush_sync)
def _author_meta_flush_sync(self) -> None:
self._author_meta_timer = None
if self._app_stop is not None and self._app_stop.is_set():
self._author_meta_pending.clear()
return
loop = self._loop
if not loop or not loop.is_running():
return
loop.create_task(self._author_meta_flush_async())
async def _author_meta_flush_async(self) -> None:
if self._app_stop is not None and self._app_stop.is_set():
return
mgr = self._manager
if not mgr or not self._read_urls_snapshot:
return
kinds = AUTHOR_METADATA_KINDS
batch: list[str] = []
while self._author_meta_pending and len(batch) < 12:
batch.append(self._author_meta_pending.pop())
if not batch:
return
filters: list[dict[str, Any]] = []
for auth_pk in batch:
since = self.db.max_created_at_for_author_kinds(auth_pk, kinds)
fd: dict[str, Any] = {
"authors": [auth_pk],
"kinds": list(kinds),
"limit": 500,
}
if since > 0:
fd["since"] = since
filters.append(fd)
for url in self._read_urls_snapshot:
mgr.request_subscribe(url, _AUTHOR_META_SUB_ID, filters)
if (
self._author_meta_pending
and self._loop
and self._app_stop is not None
and not self._app_stop.is_set()
):
self._author_meta_timer = self._loop.call_later(0.12, self._author_meta_flush_sync)
@staticmethod @staticmethod
def apply_ingest_to_db(db: Database, ev: dict[str, Any], source_relay: str | None = None) -> None: def apply_ingest_to_db(db: Database, ev: dict[str, Any], source_relay: str | None = None) -> None:
if "id" not in ev: if "id" not in ev:

3
src/imwald/core/relay_manager.py

@ -136,6 +136,7 @@ class RelayManager:
if sub["relay"] == url: if sub["relay"] == url:
await self._send_req(url, sub["sub_id"], sub["filters"]) await self._send_req(url, sub["sub_id"], sub["filters"])
attempt = 0 attempt = 0
log.info("relay %s connected (websocket open)", url)
async for raw in ws: async for raw in ws:
if self._shutdown.is_set(): if self._shutdown.is_set():
break break
@ -164,6 +165,8 @@ class RelayManager:
delay = min(60.0, 1.5**attempt) + random.random() delay = min(60.0, 1.5**attempt) + random.random()
r.backoff_until = time.monotonic() + delay r.backoff_until = time.monotonic() + delay
try: try:
if not self._shutdown.is_set():
log.info("relay %s will retry in %.1fs (attempt %d)", url, delay, attempt)
await asyncio.wait_for(self._shutdown.wait(), timeout=delay) await asyncio.wait_for(self._shutdown.wait(), timeout=delay)
break break
except TimeoutError: except TimeoutError:

65
src/imwald/ui/feed_page.py

@ -147,6 +147,8 @@ class FeedPage(QWidget):
self._my_pubkey: str | None = None self._my_pubkey: str | None = None
self._following: set[str] = set() self._following: set[str] = set()
self._list30000_pubkeys: set[str] = set() self._list30000_pubkeys: set[str] = set()
self._rendered_op_id: str | None = None
self._rendered_reply_sig: tuple[str, ...] | None = None
self._engagement = QFrame() self._engagement = QFrame()
self._engagement.setObjectName("EngagementBar") self._engagement.setObjectName("EngagementBar")
@ -190,6 +192,10 @@ class FeedPage(QWidget):
self._thread_scroll.setWidget(self._thread_host) self._thread_scroll.setWidget(self._thread_host)
self._thread_scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff) self._thread_scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff)
self._thread_scroll.viewport().installEventFilter(self) self._thread_scroll.viewport().installEventFilter(self)
self._refit_timer = QTimer(self)
self._refit_timer.setSingleShot(True)
self._refit_timer.setInterval(48)
self._refit_timer.timeout.connect(self._refit_thread_reply_sizes)
prev = QPushButton("◀ Previous") prev = QPushButton("◀ Previous")
next_ = QPushButton("Next ▶") next_ = QPushButton("Next ▶")
@ -213,7 +219,6 @@ class FeedPage(QWidget):
left = QVBoxLayout() left = QVBoxLayout()
left.setSpacing(8) left.setSpacing(8)
left.addWidget(self._engagement)
left.addWidget(self._op_card, stretch=1) left.addWidget(self._op_card, stretch=1)
left.addLayout(nav) left.addLayout(nav)
@ -222,6 +227,7 @@ class FeedPage(QWidget):
right = QVBoxLayout() right = QVBoxLayout()
right.setSpacing(4) right.setSpacing(4)
right.addWidget(self._thread_title) right.addWidget(self._thread_title)
right.addWidget(self._engagement)
right.addWidget(self._thread_scroll, stretch=1) right.addWidget(self._thread_scroll, stretch=1)
rw = QWidget() rw = QWidget()
@ -258,7 +264,7 @@ class FeedPage(QWidget):
and obj is thread_scroll.viewport() and obj is thread_scroll.viewport()
and event.type() == QEvent.Type.Resize and event.type() == QEvent.Type.Resize
): ):
self._refit_thread_reply_sizes() self._refit_timer.start()
return super().eventFilter(obj, event) return super().eventFilter(obj, event)
def _refit_thread_reply_sizes(self) -> None: def _refit_thread_reply_sizes(self) -> None:
@ -302,8 +308,35 @@ class FeedPage(QWidget):
self._show_current() self._show_current()
def refresh_tail(self) -> None: def refresh_tail(self) -> None:
"""After ingest: rebuild ranked queue (picks up trending + read relays).""" """After ingest: merge new feed rows while keeping the current OP selected (stable pager)."""
self.reload_queue() if not self._queue:
return
cur_id = self._queue[self._index % len(self._queue)]["id"]
if len(self._queue) == 1:
ev = self._db.get_event(str(cur_id))
if ev:
self._queue = [cast(dict[str, Any], ev)]
self._show_current()
return
hide = self._db.get_setting("hide_nsfw", "1") == "1"
raw = self._db.feed_candidates(
FEED_KINDS,
hide_nsfw=hide,
limit=500,
viewer_pubkey=self._feed_viewer_key(),
exclude_viewed=True,
)
ranked = self._ranker.rank_feed(raw, self._my_pubkey, self._following, self._list30000_pubkeys)
self._queue = ranked
found = False
for i, ev in enumerate(self._queue):
if ev["id"] == cur_id:
self._index = i
found = True
break
if not found:
self._index = 0
self._show_current()
def show_event(self, event_id: str) -> None: def show_event(self, event_id: str) -> None:
ev = self._db.get_event(event_id) ev = self._db.get_event(event_id)
@ -324,9 +357,13 @@ class FeedPage(QWidget):
w = item.widget() w = item.widget()
if w is not None: if w is not None:
w.deleteLater() w.deleteLater()
else:
del item
def _show_current(self) -> None: def _show_current(self) -> None:
if not self._queue: if not self._queue:
self._rendered_op_id = None
self._rendered_reply_sig = None
self._op.setPlainText("No events in local database yet — wait for relay sync.") self._op.setPlainText("No events in local database yet — wait for relay sync.")
self._clear_thread_rows() self._clear_thread_rows()
self._why.setText("") self._why.setText("")
@ -334,6 +371,8 @@ class FeedPage(QWidget):
return return
ev = self._queue[self._index % len(self._queue)] ev = self._queue[self._index % len(self._queue)]
if ev.get("deleted"): if ev.get("deleted"):
self._rendered_op_id = None
self._rendered_reply_sig = None
raw = html.escape(ev.get("content") or "") raw = html.escape(ev.get("content") or "")
self._op.setHtml( self._op.setHtml(
f"<body style=\"color:{TEXT};background:transparent\">" f"<body style=\"color:{TEXT};background:transparent\">"
@ -354,13 +393,22 @@ class FeedPage(QWidget):
self._why.setText(f"score={score:.2f}") self._why.setText(f"score={score:.2f}")
self._why.setToolTip(json.dumps(why, ensure_ascii=False, indent=2)) self._why.setToolTip(json.dumps(why, ensure_ascii=False, indent=2))
stats = self._db.event_engagement_stats(ev["id"]) stats = self._db.event_engagement_stats(ev)
op_pk = str(ev["pubkey"]) op_pk = str(ev["pubkey"])
author_nip30 = self._db.get_author_nip30_emoji_urls(op_pk) author_nip30 = self._db.get_author_nip30_emoji_urls(op_pk)
self._engagement_label.setText( self._engagement_label.setText(
_format_engagement_html(stats, reaction_nip30_urls=author_nip30) _format_engagement_html(stats, reaction_nip30_urls=author_nip30)
) )
root_id = str(ev["id"])
replies = self._db.list_replies_to(ev)
reply_sig = tuple(str(r["id"]) for r in replies)
if root_id == self._rendered_op_id and reply_sig == self._rendered_reply_sig:
return
self._rendered_op_id = root_id
self._rendered_reply_sig = reply_sig
pk = op_pk pk = op_pk
prof_row = self._db.get_latest_kind0_profile(pk) prof_row = self._db.get_latest_kind0_profile(pk)
parsed = parse_kind0_profile(prof_row["content"] if prof_row else "") parsed = parse_kind0_profile(prof_row["content"] if prof_row else "")
@ -408,8 +456,9 @@ class FeedPage(QWidget):
) )
self._op.setHtml(body) self._op.setHtml(body)
self._thread_scroll.setUpdatesEnabled(False)
try:
self._clear_thread_rows() self._clear_thread_rows()
replies = self._db.list_replies_to(ev["id"])
pubkeys = [str(r["pubkey"]) for r in replies] pubkeys = [str(r["pubkey"]) for r in replies]
profiles = self._db.get_latest_kind0_profiles(pubkeys) profiles = self._db.get_latest_kind0_profiles(pubkeys)
for r in replies: for r in replies:
@ -461,7 +510,9 @@ class FeedPage(QWidget):
vl.addWidget(body_te) vl.addWidget(body_te)
self._thread_layout.addWidget(card) self._thread_layout.addWidget(card)
self._thread_layout.addStretch(1) self._thread_layout.addStretch(1)
QTimer.singleShot(0, self._refit_thread_reply_sizes) finally:
self._thread_scroll.setUpdatesEnabled(True)
self._refit_timer.start()
def _prev(self) -> None: def _prev(self) -> None:
if self._queue: if self._queue:

36
src/imwald/ui/main_window.py

@ -16,6 +16,7 @@ from PySide6.QtWidgets import (
QListWidgetItem, QListWidgetItem,
QMainWindow, QMainWindow,
QMessageBox, QMessageBox,
QSplitter,
QStackedWidget, QStackedWidget,
QToolBar, QToolBar,
QVBoxLayout, QVBoxLayout,
@ -24,7 +25,7 @@ from PySide6.QtWidgets import (
from imwald.core.accounts_store import StoredAccount, load_accounts from imwald.core.accounts_store import StoredAccount, load_accounts
from imwald.core.database import Database from imwald.core.database import Database
from imwald.core.nostr_engine import NostrEngine from imwald.core.nostr_engine import AUTHOR_METADATA_KINDS, NostrEngine
from imwald.core.md_render import markdown_plain_summary from imwald.core.md_render import markdown_plain_summary
from imwald.core.relay_list import resolve_for_account from imwald.core.relay_list import resolve_for_account
from imwald.core.relay_policy import augment_feed_with_trending from imwald.core.relay_policy import augment_feed_with_trending
@ -33,6 +34,7 @@ from imwald.ui.db_admin_page import DbAdminPage
from imwald.ui.feed_page import FeedPage from imwald.ui.feed_page import FeedPage
from imwald.ui.notifications_page import NotificationsPage from imwald.ui.notifications_page import NotificationsPage
from imwald.ui.onboarding_wizard import run_onboarding_wizard from imwald.ui.onboarding_wizard import run_onboarding_wizard
from imwald.ui.relay_status_panel import RelayStatusPanel
from imwald.ui.search_page import SearchPage from imwald.ui.search_page import SearchPage
@ -58,7 +60,14 @@ class MainWindow(QMainWindow):
self._stack.addWidget(self._notif) # 2 self._stack.addWidget(self._notif) # 2
self._stack.addWidget(self._dbadm) # 3 self._stack.addWidget(self._dbadm) # 3
self.setCentralWidget(self._stack) self._relay_panel = RelayStatusPanel(engine)
split = QSplitter(Qt.Orientation.Horizontal)
split.addWidget(self._stack)
split.addWidget(self._relay_panel)
split.setStretchFactor(0, 1)
split.setStretchFactor(1, 0)
split.setSizes([920, 280])
self.setCentralWidget(split)
self._acct_combo = QComboBox() self._acct_combo = QComboBox()
self._acct_combo.setMinimumWidth(220) self._acct_combo.setMinimumWidth(220)
@ -78,6 +87,11 @@ class MainWindow(QMainWindow):
self._ingest_ui_timer.setInterval(450) self._ingest_ui_timer.setInterval(450)
self._ingest_ui_timer.timeout.connect(self._flush_ingest_ui_refresh) self._ingest_ui_timer.timeout.connect(self._flush_ingest_ui_refresh)
self._author_bootstrap_timer = QTimer(self)
self._author_bootstrap_timer.setSingleShot(True)
self._author_bootstrap_timer.setInterval(2800)
self._author_bootstrap_timer.timeout.connect(self._bootstrap_author_metadata_queue)
self._acct_combo.currentIndexChanged.connect(self._on_account_changed) self._acct_combo.currentIndexChanged.connect(self._on_account_changed)
self._on_account_changed() self._on_account_changed()
@ -141,6 +155,11 @@ class MainWindow(QMainWindow):
user_write_urls=resolved.write_urls, user_write_urls=resolved.write_urls,
list30000_owner=self.list_owner_pubkey_for_relays(), list30000_owner=self.list_owner_pubkey_for_relays(),
) )
self._author_bootstrap_timer.start()
def _bootstrap_author_metadata_queue(self) -> None:
"""Backfill profile/list/emoji metadata for authors already in the local DB."""
self._engine.enqueue_author_metadata_many(self._db.distinct_pubkeys_recent(450))
def _flush_ingest_ui_refresh(self) -> None: def _flush_ingest_ui_refresh(self) -> None:
if self._stack.currentWidget() is self._feed: if self._stack.currentWidget() is self._feed:
@ -194,6 +213,7 @@ class MainWindow(QMainWindow):
def _wire_engine(self) -> None: def _wire_engine(self) -> None:
self._engine.event_ingested.connect(self._on_event_ingested) self._engine.event_ingested.connect(self._on_event_ingested)
self._engine.relay_status.connect(self._relay_status_message) self._engine.relay_status.connect(self._relay_status_message)
self._engine.relay_status.connect(self._relay_panel.log_line.emit)
def _relay_status_message(self, s: str) -> None: def _relay_status_message(self, s: str) -> None:
self.statusBar().showMessage(s, 8000) self.statusBar().showMessage(s, 8000)
@ -201,7 +221,16 @@ class MainWindow(QMainWindow):
def _on_event_ingested(self, relay_url: str, ev: object) -> None: def _on_event_ingested(self, relay_url: str, ev: object) -> None:
if not isinstance(ev, dict): if not isinstance(ev, dict):
return return
NostrEngine.apply_ingest_to_db(self._db, cast(dict[str, Any], ev), relay_url) evd = cast(dict[str, Any], ev)
NostrEngine.apply_ingest_to_db(self._db, evd, relay_url)
pk = evd.get("pubkey")
if isinstance(pk, str):
try:
ek = int(evd.get("kind", -1))
except (TypeError, ValueError):
ek = -1
if ek not in AUTHOR_METADATA_KINDS:
self._engine.enqueue_author_metadata(pk)
self._ingest_ui_timer.start() self._ingest_ui_timer.start()
def _wire_pages(self) -> None: def _wire_pages(self) -> None:
@ -316,5 +345,6 @@ class MainWindow(QMainWindow):
return acc, self._password_for(pk) return acc, self._password_for(pk)
def closeEvent(self, event: QCloseEvent) -> None: # noqa: N802 def closeEvent(self, event: QCloseEvent) -> None: # noqa: N802
self._relay_panel.shutdown_logging()
self._engine.stop_relays() self._engine.stop_relays()
super().closeEvent(event) super().closeEvent(event)

320
src/imwald/ui/relay_status_panel.py

@ -0,0 +1,320 @@
"""Right-side relay health (NIP-11 names/icons) + rolling client log."""
from __future__ import annotations
import logging
from typing import Any, Callable, cast
from PySide6.QtCore import QObject, QRunnable, Qt, QThreadPool, Signal
from PySide6.QtGui import QFont, QPixmap, QTextCursor
from PySide6.QtWidgets import (
QFrame,
QHBoxLayout,
QLabel,
QScrollArea,
QSizePolicy,
QSplitter,
QTextEdit,
QVBoxLayout,
QWidget,
)
from imwald.core.nip11_relay_info import absolute_icon_url, fetch_nip11, relay_display_name
from imwald.core.nostr_engine import NostrEngine
from imwald.ui.theme import BG_CARD, BG_FIELD, BORDER, TEXT, TEXT_DIM, TEXT_MUTED
_LOG_MAX_CHARS = 120_000
log = logging.getLogger(__name__)
class QtLogHandler(logging.Handler):
"""Thread-safe append via Qt ``Signal.emit`` (queued across threads)."""
def __init__(self, sink_emit: Callable[[str], None]) -> None:
super().__init__()
self._sink_emit = sink_emit
def emit(self, record: logging.LogRecord) -> None:
try:
self._sink_emit(self.format(record))
except RuntimeError:
pass
class _Nip11FetchSignals(QObject):
done = Signal(str, object, object) # ws_url, nip11 dict|None, pixmap: QPixmap|None
class _Nip11Runnable(QRunnable):
def __init__(self, ws_url: str, sigs: _Nip11FetchSignals) -> None:
super().__init__()
self._ws_url = ws_url
self._sigs = sigs
def run(self) -> None:
lg = logging.getLogger(__name__)
lg.info("NIP-11 fetching %s", self._ws_url)
nip = fetch_nip11(self._ws_url)
pm: QPixmap | None = None
if nip:
dn = relay_display_name(nip, self._ws_url)
lg.info("NIP-11 metadata for %s: %r", self._ws_url, dn)
iu = absolute_icon_url(self._ws_url, cast(str | None, nip.get("icon")))
if iu:
try:
from urllib.request import urlopen
with urlopen(iu, timeout=8) as resp: # noqa: S310
data = resp.read()
p = QPixmap()
if p.loadFromData(data) and not p.isNull():
pm = p.scaled(36, 36, Qt.AspectRatioMode.KeepAspectRatio, Qt.TransformationMode.SmoothTransformation)
lg.info("NIP-11 icon ok for %s", self._ws_url)
else:
lg.info("NIP-11 icon decode failed for %s", self._ws_url)
except OSError as e:
pm = None
lg.info("NIP-11 icon download failed for %s: %s", self._ws_url, e)
else:
lg.info("NIP-11 no icon URL for %s", self._ws_url)
else:
lg.info("NIP-11 no document for %s", self._ws_url)
self._sigs.done.emit(self._ws_url, nip, pm)
class _RelayRow(QFrame):
def __init__(self, ws_url: str, parent: QWidget | None = None) -> None:
super().__init__(parent)
self._url = ws_url
self.setSizePolicy(QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Minimum)
self.setObjectName("RelayRow")
self.setStyleSheet(
f"#RelayRow {{ background-color: {BG_CARD}; border: 1px solid {BORDER}; border-radius: 8px; }}"
)
self._icon = QLabel()
self._icon.setFixedSize(40, 40)
self._icon.setAlignment(Qt.AlignmentFlag.AlignCenter)
self._icon.setStyleSheet(f"color: {TEXT_MUTED}; font-size: 11px;")
self._icon.setText("")
self._name = QLabel(ws_url.replace("wss://", "").replace("ws://", "").split("/")[0][:42])
self._name.setWordWrap(False)
self._name.setStyleSheet(f"color: {TEXT}; font-weight: 600; font-size: 13px;")
self._state = QLabel("")
self._state.setStyleSheet(f"color: {TEXT_DIM}; font-size: 12px;")
self._state.setWordWrap(False)
txt = QVBoxLayout()
txt.setSpacing(2)
txt.addWidget(self._name)
txt.addWidget(self._state)
row = QHBoxLayout(self)
row.setContentsMargins(8, 6, 8, 6)
row.setSpacing(8)
row.addWidget(self._icon)
row.addLayout(txt, stretch=1)
def set_snapshot(self, state: str, err: str | None) -> None:
err_t = (err or "").strip()
self._state.setText(f"{state}{f'{err_t}' if err_t else ''}")
healthy = state == "connected"
self.setStyleSheet(
f"#RelayRow {{ background-color: {BG_CARD}; border: 1px solid {BORDER}; "
f"border-radius: 8px; opacity: {'1' if healthy else '0.45'}; }}"
)
self.setToolTip(self._url if not err_t else f"{self._url}\n{err_t}")
def set_nip11(self, name: str, pm: QPixmap | None) -> None:
self._name.setText(name[:80])
if pm is not None and not pm.isNull():
self._icon.setPixmap(pm)
self._icon.setText("")
else:
self._icon.clear()
self._icon.setPixmap(QPixmap())
host = self._url.replace("wss://", "").replace("ws://", "").split("/")[0]
self._icon.setText((host[:2] or "?").upper())
class RelayStatusPanel(QWidget):
"""Relays (top) + log (bottom)."""
log_line = Signal(str)
def __init__(self, engine: NostrEngine, parent: QWidget | None = None) -> None:
super().__init__(parent)
self._engine = engine
self._rows: dict[str, _RelayRow] = {}
self._relay_order: tuple[str, ...] = ()
self._last_relay_states: dict[str, tuple[str, str | None]] = {}
self._nip11_started: set[str] = set()
self._nip11_sigs = _Nip11FetchSignals(self)
self._pool = QThreadPool(self)
self._pool.setMaxThreadCount(3)
root = QVBoxLayout(self)
root.setContentsMargins(6, 6, 6, 6)
root.setSpacing(6)
title = QLabel("Relays")
title.setStyleSheet(f"color: {TEXT_MUTED}; font-size: 12px; font-weight: 600;")
root.addWidget(title)
self._relay_host = QWidget()
self._relay_lay = QVBoxLayout(self._relay_host)
self._relay_lay.setContentsMargins(0, 0, 0, 0)
self._relay_lay.setSpacing(6)
self._relay_lay.setAlignment(Qt.AlignmentFlag.AlignTop)
self._relay_lay.addStretch(1)
self._pin_relay_layout_stretch()
relay_scroll = QScrollArea()
relay_scroll.setWidgetResizable(True)
relay_scroll.setWidget(self._relay_host)
relay_scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff)
relay_scroll.setFrameShape(QFrame.Shape.NoFrame)
relay_scroll.setStyleSheet(f"QScrollArea {{ background: transparent; border: none; }}")
relay_scroll.setSizePolicy(QSizePolicy.Policy.Preferred, QSizePolicy.Policy.Preferred)
self._log = QTextEdit()
self._log.setReadOnly(True)
self._log.setPlaceholderText("Client log…")
mono = QFont("monospace")
if not mono.exactMatch():
mono = QFont("Courier New")
self._log.setFont(mono)
self._log.setStyleSheet(
f"QTextEdit {{ background-color: {BG_FIELD}; color: {TEXT_DIM}; "
f"border: 1px solid {BORDER}; border-radius: 8px; padding: 6px; font-size: 12px; }}"
)
self._log.setSizePolicy(QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Expanding)
split = QSplitter(Qt.Orientation.Vertical)
split.setChildrenCollapsible(False)
split.addWidget(relay_scroll)
split.addWidget(self._log)
split.setStretchFactor(0, 0)
split.setStretchFactor(1, 1)
split.setSizes([200, 360])
root.addWidget(split, stretch=1)
self.setMaximumWidth(360)
self.setMinimumWidth(200)
self.log_line.connect(self._append_log)
self._nip11_sigs.done.connect(self._on_nip11_ready_slot)
engine.relay_snapshot.connect(self._on_relay_snapshot)
self._log_handler = QtLogHandler(self.log_line.emit)
self._log_handler.setLevel(logging.DEBUG)
self._log_handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s: %(message)s", "%H:%M:%S")
)
imwald_log = logging.getLogger("imwald")
imwald_log.addHandler(self._log_handler)
imwald_log.setLevel(logging.DEBUG)
# Markdown renderer debug is noisy; keep it at INFO while the panel shows DEBUG elsewhere.
logging.getLogger("imwald.core.md_render").setLevel(logging.INFO)
def _append_log(self, line: str) -> None:
cur = self._log.textCursor()
cur.movePosition(QTextCursor.MoveOperation.End)
self._log.setTextCursor(cur)
self._log.insertPlainText(line + "\n")
doc = self._log.document()
if doc.characterCount() > _LOG_MAX_CHARS:
doc.setPlainText(doc.toPlainText()[-_LOG_MAX_CHARS:])
def _on_relay_snapshot(self, rows: object) -> None:
if not isinstance(rows, list):
return
rows_list = cast(list[Any], rows)
urls: list[str] = []
states: dict[str, tuple[str, str | None]] = {}
for raw in rows_list:
if not isinstance(raw, dict):
continue
item = cast(dict[str, Any], raw)
u = str(item.get("url") or "")
if not u:
continue
urls.append(u)
err_o = item.get("error")
err_s = str(err_o).strip() if err_o is not None else None
states[u] = (str(item.get("state") or "?"), err_s or None)
key = tuple(urls)
if key == self._relay_order and set(urls) == set(self._rows.keys()):
for u in urls:
row = self._rows.get(u)
if row is None:
continue
st = states.get(u, ("?", None))
row.set_snapshot(st[0], st[1])
self._log_relay_transitions(states)
return
for u in list(self._rows.keys()):
if u not in urls:
w = self._rows.pop(u)
w.deleteLater()
for u in urls:
if u not in self._rows:
row = _RelayRow(u, self._relay_host)
self._rows[u] = row
self._relay_lay.insertWidget(self._relay_lay.count() - 1, row)
if u not in self._nip11_started:
self._nip11_started.add(u)
self._pool.start(_Nip11Runnable(u, self._nip11_sigs))
if key != self._relay_order:
for i, u in enumerate(urls):
row = self._rows.get(u)
if row is None:
continue
self._relay_lay.removeWidget(row)
self._relay_lay.insertWidget(i, row)
self._relay_order = key
self._pin_relay_layout_stretch()
for u in urls:
row = self._rows.get(u)
if row is None:
continue
st = states.get(u, ("?", None))
row.set_snapshot(st[0], st[1])
self._log_relay_transitions(states)
def _log_relay_transitions(self, states: dict[str, tuple[str, str | None]]) -> None:
"""INFO only when roster membership or per-relay state/error changes (not every pulse)."""
for u in list(self._last_relay_states):
if u not in states:
log.info("relay roster: removed %s", u)
del self._last_relay_states[u]
for u, cur in states.items():
prev = self._last_relay_states.get(u)
if prev == cur:
continue
self._last_relay_states[u] = cur
st, err = cur
extra = f" ({err})" if err else ""
log.info("relay state %s%s%s", u, st, extra)
def _pin_relay_layout_stretch(self) -> None:
lay = self._relay_lay
n = lay.count()
if n <= 0:
return
for i in range(n - 1):
lay.setStretch(i, 0)
lay.setStretch(n - 1, 1)
def _on_nip11_ready_slot(self, ws_url: str, nip: object, pm: object) -> None:
row = self._rows.get(ws_url)
if not row:
return
nip_d = cast(dict[str, Any] | None, nip if isinstance(nip, dict) else None)
name = relay_display_name(nip_d, ws_url)
qpm: QPixmap | None = pm if isinstance(pm, QPixmap) else None
row.set_nip11(name, qpm)
def shutdown_logging(self) -> None:
imwald_log = logging.getLogger("imwald")
imwald_log.removeHandler(self._log_handler)
imwald_log.setLevel(logging.NOTSET)
logging.getLogger("imwald.core.md_render").setLevel(logging.NOTSET)

45
tests/test_author_meta_db.py

@ -0,0 +1,45 @@
"""Database helpers for per-author metadata relay ``since`` cursors."""
import tempfile
from hashlib import sha256
from pathlib import Path
from imwald.core.database import Database
from imwald.core.nostr_crypto import build_signed_event, pubkey_hex_from_secret
def _sk() -> bytes:
return bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683")
def test_max_created_at_for_author_kinds() -> None:
sk = _sk()
pk = pubkey_hex_from_secret(sk)
k0 = build_signed_event(sk, created_at=10, kind=0, tags=[], content="{}")
k1 = build_signed_event(sk, created_at=50, kind=1, tags=[], content="hi")
with tempfile.TemporaryDirectory() as td:
db = Database(Path(td) / "m.sqlite")
db.connect()
db.upsert_event(k0)
db.upsert_event(k1)
assert db.max_created_at_for_author_kinds(pk, (0, 30000)) == 10
assert db.max_created_at_for_author_kinds(pk, (0, 1)) == 50
def test_distinct_pubkeys_recent_orders_by_max_created() -> None:
sk_a = _sk()
sk_b = sha256(b"author-b").digest()
pk_a = pubkey_hex_from_secret(sk_a)
pk_b = pubkey_hex_from_secret(sk_b)
ea = build_signed_event(sk_a, created_at=100, kind=1, tags=[], content="a")
eb_old = build_signed_event(sk_b, created_at=50, kind=1, tags=[], content="old")
eb_new = build_signed_event(sk_b, created_at=300, kind=1, tags=[], content="new")
with tempfile.TemporaryDirectory() as td:
db = Database(Path(td) / "d.sqlite")
db.connect()
db.upsert_event(ea)
db.upsert_event(eb_old)
db.upsert_event(eb_new)
got = db.distinct_pubkeys_recent(10)
assert got[0] == pk_b.lower()
assert got[1] == pk_a.lower()

68
tests/test_thread_links.py

@ -0,0 +1,68 @@
"""Thread root matching via ``e``/``E``/``a``/``A``/``q`` (Jumble-style)."""
import tempfile
from pathlib import Path
from typing import Any, cast
from imwald.core.database import Database, thread_root_link_targets
from imwald.core.nostr_crypto import build_signed_event, pubkey_hex_from_secret
def _sk() -> bytes:
return bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683")
def test_thread_root_link_targets_kind1() -> None:
root = cast(dict[str, Any], {"id": "a" * 64, "kind": 1, "pubkey": "b" * 64, "tags": []})
assert thread_root_link_targets(root) == ["a" * 64]
def test_thread_root_link_targets_addressable_adds_a_coordinate() -> None:
pk = "c" * 64
root = cast(
dict[str, Any],
{"id": "d" * 64, "kind": 30023, "pubkey": pk, "tags": [["d", "slug-x"]]},
)
t = thread_root_link_targets(root)
assert "d" * 64 in t
assert f"30023:{pk}:slug-x" in t
def test_list_replies_to_matches_q_tag() -> None:
sk = _sk()
root = build_signed_event(sk, created_at=1, kind=1, tags=[], content="root")
rid = root["id"]
rep = build_signed_event(
sk,
created_at=2,
kind=1,
tags=[["q", rid]],
content="quote",
)
with tempfile.TemporaryDirectory() as td:
db = Database(Path(td) / "t.sqlite")
db.connect()
db.upsert_event(root)
db.upsert_event(rep)
got = db.list_replies_to(root, limit=20)
assert len(got) == 1 and got[0]["id"] == rep["id"]
def test_list_replies_to_matches_uppercase_e() -> None:
sk = _sk()
root = build_signed_event(sk, created_at=1, kind=1, tags=[], content="root")
rid = root["id"]
rep = build_signed_event(
sk,
created_at=2,
kind=1111,
tags=[["E", rid, "", "1" * 64], ["p", pubkey_hex_from_secret(sk)]],
content="c",
)
with tempfile.TemporaryDirectory() as td:
db = Database(Path(td) / "t2.sqlite")
db.connect()
db.upsert_event(root)
db.upsert_event(rep)
got = db.list_replies_to(root, limit=20)
assert len(got) == 1 and got[0]["id"] == rep["id"]
Loading…
Cancel
Save