30 changed files with 2733 additions and 3 deletions
@ -0,0 +1,12 @@ |
|||||||
|
.venv/ |
||||||
|
__pycache__/ |
||||||
|
*.py[cod] |
||||||
|
*.egg-info/ |
||||||
|
dist/ |
||||||
|
build/ |
||||||
|
.coverage |
||||||
|
htmlcov/ |
||||||
|
.mypy_cache/ |
||||||
|
.pytest_cache/ |
||||||
|
*.sqlite |
||||||
|
*.db |
||||||
@ -0,0 +1,20 @@ |
|||||||
|
# Flatpak manifest sketch — fill app-id, finish-args, and icon before publishing. |
||||||
|
app-id: org.imwald.Imwald |
||||||
|
runtime: org.kde.Platform |
||||||
|
runtime-version: '6.7' |
||||||
|
sdk: org.kde.Sdk |
||||||
|
command: imwald |
||||||
|
finish-args: |
||||||
|
- --share=network |
||||||
|
- --share=ipc |
||||||
|
- --socket=fallback-x11 |
||||||
|
- --socket=wayland |
||||||
|
- --device=dri |
||||||
|
modules: |
||||||
|
- name: imwald |
||||||
|
buildsystem: simple |
||||||
|
build-commands: |
||||||
|
- pip3 install --prefix=/app --no-build-isolation . |
||||||
|
sources: |
||||||
|
- type: dir |
||||||
|
path: .. |
||||||
@ -0,0 +1,33 @@ |
|||||||
|
[build-system] |
||||||
|
requires = ["setuptools>=61"] |
||||||
|
build-backend = "setuptools.build_meta" |
||||||
|
|
||||||
|
[project] |
||||||
|
name = "imwald" |
||||||
|
version = "0.1.0" |
||||||
|
description = "Linux-native Nostr client (Qt / PySide6)" |
||||||
|
readme = "README.md" |
||||||
|
requires-python = ">=3.11" |
||||||
|
dependencies = [ |
||||||
|
"PySide6>=6.6", |
||||||
|
"cryptography>=42", |
||||||
|
"bech32>=1.2", |
||||||
|
"coincurve>=20", |
||||||
|
"PyNaCl>=1.5", |
||||||
|
"websockets>=12", |
||||||
|
] |
||||||
|
|
||||||
|
[project.optional-dependencies] |
||||||
|
dev = ["pytest>=8"] |
||||||
|
|
||||||
|
[project.scripts] |
||||||
|
imwald = "imwald.__main__:main" |
||||||
|
|
||||||
|
[tool.setuptools.packages.find] |
||||||
|
where = ["src"] |
||||||
|
|
||||||
|
[tool.setuptools.package-dir] |
||||||
|
"" = "src" |
||||||
|
|
||||||
|
[tool.pytest.ini_options] |
||||||
|
pythonpath = ["src"] |
||||||
@ -0,0 +1,4 @@ |
|||||||
|
from imwald.app import main |
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
main() |
||||||
@ -0,0 +1,32 @@ |
|||||||
|
"""QApplication bootstrap.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import logging |
||||||
|
import sys |
||||||
|
|
||||||
|
from PySide6.QtWidgets import QApplication |
||||||
|
|
||||||
|
from imwald.config import db_path |
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_engine import NostrEngine |
||||||
|
from imwald.ui.main_window import MainWindow |
||||||
|
|
||||||
|
|
||||||
|
def main() -> None: |
||||||
|
logging.basicConfig(level=logging.INFO, format="%(levelname)s %(name)s: %(message)s") |
||||||
|
app = QApplication(sys.argv) |
||||||
|
app.setApplicationName("imwald") |
||||||
|
app.setOrganizationName("imwald") |
||||||
|
|
||||||
|
db = Database(db_path()) |
||||||
|
db.connect() |
||||||
|
|
||||||
|
engine = NostrEngine(db) |
||||||
|
win = MainWindow(db=db, engine=engine) |
||||||
|
win.show() |
||||||
|
engine.start_relays(list30000_owner=win.list_owner_pubkey_for_relays()) |
||||||
|
rc = app.exec() |
||||||
|
engine.stop_relays() |
||||||
|
db.close() |
||||||
|
raise SystemExit(rc) |
||||||
@ -0,0 +1,33 @@ |
|||||||
|
"""XDG paths and app constants.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import os |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
|
||||||
|
def _xdg_data_home() -> Path: |
||||||
|
base = os.environ.get("XDG_DATA_HOME") |
||||||
|
if base: |
||||||
|
return Path(base) |
||||||
|
return Path.home() / ".local" / "share" |
||||||
|
|
||||||
|
|
||||||
|
def data_dir() -> Path: |
||||||
|
d = _xdg_data_home() / "imwald" |
||||||
|
d.mkdir(parents=True, exist_ok=True) |
||||||
|
return d |
||||||
|
|
||||||
|
|
||||||
|
def db_path() -> Path: |
||||||
|
return data_dir() / "imwald.sqlite" |
||||||
|
|
||||||
|
|
||||||
|
def media_cache_dir() -> Path: |
||||||
|
d = data_dir() / "media_cache" |
||||||
|
d.mkdir(parents=True, exist_ok=True) |
||||||
|
return d |
||||||
|
|
||||||
|
|
||||||
|
def accounts_json_path() -> Path: |
||||||
|
return data_dir() / "accounts.json" |
||||||
@ -0,0 +1 @@ |
|||||||
|
# Core: database, nostr, relays, ranking. |
||||||
@ -0,0 +1,97 @@ |
|||||||
|
"""Stored accounts — Jumble-compatible fields (nsec / ncryptsec / pubkey).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
from dataclasses import asdict, dataclass |
||||||
|
from pathlib import Path |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from coincurve import PrivateKey |
||||||
|
|
||||||
|
from imwald.config import accounts_json_path |
||||||
|
|
||||||
|
from .nip19 import decode_nsec, encode_nsec |
||||||
|
from .nip49 import decrypt_ncryptsec, encrypt_to_ncryptsec |
||||||
|
from .nostr_crypto import pubkey_hex_from_secret |
||||||
|
|
||||||
|
|
||||||
|
@dataclass |
||||||
|
class StoredAccount: |
||||||
|
pubkey: str |
||||||
|
signer_type: str # 'nsec' | 'ncryptsec' |
||||||
|
nsec: str | None = None |
||||||
|
ncryptsec: str | None = None |
||||||
|
label: str | None = None |
||||||
|
|
||||||
|
def to_json(self) -> dict[str, Any]: |
||||||
|
d: dict[str, Any] = {"pubkey": self.pubkey, "signerType": self.signer_type} |
||||||
|
if self.nsec: |
||||||
|
d["nsec"] = self.nsec |
||||||
|
if self.ncryptsec: |
||||||
|
d["ncryptsec"] = self.ncryptsec |
||||||
|
if self.label: |
||||||
|
d["label"] = self.label |
||||||
|
return d |
||||||
|
|
||||||
|
@classmethod |
||||||
|
def from_json(cls, d: dict[str, Any]) -> StoredAccount: |
||||||
|
return cls( |
||||||
|
pubkey=str(d["pubkey"]).lower(), |
||||||
|
signer_type=str(d.get("signerType") or d.get("signer_type") or "nsec"), |
||||||
|
nsec=d.get("nsec"), |
||||||
|
ncryptsec=d.get("ncryptsec"), |
||||||
|
label=d.get("label"), |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def load_accounts(path: Path | None = None) -> list[StoredAccount]: |
||||||
|
p = path or accounts_json_path() |
||||||
|
if not p.exists(): |
||||||
|
return [] |
||||||
|
data = json.loads(p.read_text(encoding="utf-8")) |
||||||
|
if not isinstance(data, list): |
||||||
|
return [] |
||||||
|
return [StoredAccount.from_json(x) for x in data if isinstance(x, dict)] |
||||||
|
|
||||||
|
|
||||||
|
def save_accounts(accounts: list[StoredAccount], path: Path | None = None) -> None: |
||||||
|
p = path or accounts_json_path() |
||||||
|
p.write_text( |
||||||
|
json.dumps([a.to_json() for a in accounts], indent=2, ensure_ascii=False) + "\n", |
||||||
|
encoding="utf-8", |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def add_account_nsec_hex(secret_hex: str, password: str | None, label: str | None = None) -> StoredAccount: |
||||||
|
h = secret_hex.strip() |
||||||
|
sk = decode_nsec(h) if h.startswith("nsec") else bytes.fromhex(h) |
||||||
|
pk_hex = pubkey_hex_from_secret(sk) |
||||||
|
if password: |
||||||
|
nc = encrypt_to_ncryptsec(sk, password) |
||||||
|
acc = StoredAccount(pubkey=pk_hex, signer_type="ncryptsec", ncryptsec=nc, label=label) |
||||||
|
else: |
||||||
|
acc = StoredAccount(pubkey=pk_hex, signer_type="nsec", nsec=encode_nsec(sk), label=label) |
||||||
|
return acc |
||||||
|
|
||||||
|
|
||||||
|
def unlock_secret(account: StoredAccount, password: str | None) -> bytes: |
||||||
|
if account.signer_type == "nsec" and account.nsec: |
||||||
|
h = account.nsec.strip() |
||||||
|
if h.startswith("nsec"): |
||||||
|
return decode_nsec(h) |
||||||
|
if len(h) == 64: |
||||||
|
return bytes.fromhex(h) |
||||||
|
raise ValueError("invalid stored nsec") |
||||||
|
if account.signer_type == "ncryptsec" and account.ncryptsec: |
||||||
|
if not password: |
||||||
|
raise ValueError("password required for ncryptsec") |
||||||
|
return decrypt_ncryptsec(account.ncryptsec, password) |
||||||
|
raise ValueError("cannot unlock account") |
||||||
|
|
||||||
|
|
||||||
|
def generate_new_identity() -> tuple[bytes, str]: |
||||||
|
sk = PrivateKey() |
||||||
|
sec = sk.secret |
||||||
|
pub = pubkey_hex_from_secret(sec) |
||||||
|
return sec, pub |
||||||
@ -0,0 +1,538 @@ |
|||||||
|
"""SQLite WAL store — single source of truth for imwald.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
import sqlite3 |
||||||
|
import time |
||||||
|
from contextlib import contextmanager |
||||||
|
from pathlib import Path |
||||||
|
from typing import Any, Generator, Iterable |
||||||
|
|
||||||
|
SCHEMA_VERSION = 2 |
||||||
|
|
||||||
|
DDL = """ |
||||||
|
PRAGMA journal_mode=WAL; |
||||||
|
PRAGMA foreign_keys=ON; |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS meta ( |
||||||
|
key TEXT PRIMARY KEY, |
||||||
|
value TEXT NOT NULL |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS events ( |
||||||
|
id TEXT PRIMARY KEY, |
||||||
|
pubkey TEXT NOT NULL, |
||||||
|
created_at INTEGER NOT NULL, |
||||||
|
kind INTEGER NOT NULL, |
||||||
|
content TEXT NOT NULL, |
||||||
|
sig TEXT NOT NULL, |
||||||
|
tags_json TEXT NOT NULL, |
||||||
|
raw_json TEXT, |
||||||
|
deleted INTEGER NOT NULL DEFAULT 0, |
||||||
|
received_at INTEGER NOT NULL, |
||||||
|
source_relay TEXT |
||||||
|
); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_pubkey ON events(pubkey); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_kind ON events(kind); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_events_created ON events(created_at DESC); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS tags ( |
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||||
|
event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE, |
||||||
|
name TEXT NOT NULL, |
||||||
|
value TEXT NOT NULL, |
||||||
|
pos INTEGER NOT NULL, |
||||||
|
UNIQUE(event_id, pos) |
||||||
|
); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_tags_name_val ON tags(name, value); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS event_relations ( |
||||||
|
event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE, |
||||||
|
relation TEXT NOT NULL, |
||||||
|
target TEXT NOT NULL, |
||||||
|
PRIMARY KEY(event_id, relation, target) |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS relays ( |
||||||
|
url TEXT PRIMARY KEY, |
||||||
|
read INTEGER NOT NULL DEFAULT 1, |
||||||
|
write INTEGER NOT NULL DEFAULT 1, |
||||||
|
last_connected_at INTEGER, |
||||||
|
last_error TEXT, |
||||||
|
state TEXT NOT NULL DEFAULT 'disconnected' |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS sync_cursors ( |
||||||
|
relay_url TEXT NOT NULL, |
||||||
|
subscription_key TEXT NOT NULL, |
||||||
|
until INTEGER, |
||||||
|
since INTEGER, |
||||||
|
PRIMARY KEY(relay_url, subscription_key) |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS media_objects ( |
||||||
|
sha256 TEXT PRIMARY KEY, |
||||||
|
path TEXT NOT NULL, |
||||||
|
bytes INTEGER NOT NULL, |
||||||
|
created_at INTEGER NOT NULL |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS media_event_links ( |
||||||
|
sha256 TEXT NOT NULL REFERENCES media_objects(sha256) ON DELETE CASCADE, |
||||||
|
event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE, |
||||||
|
tag_name TEXT, |
||||||
|
PRIMARY KEY(sha256, event_id, tag_name) |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS user_votes ( |
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||||
|
event_id TEXT NOT NULL REFERENCES events(id) ON DELETE CASCADE, |
||||||
|
voter_pubkey TEXT NOT NULL, |
||||||
|
vote INTEGER NOT NULL, |
||||||
|
created_at INTEGER NOT NULL, |
||||||
|
UNIQUE(event_id, voter_pubkey) |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS rank_features ( |
||||||
|
event_id TEXT PRIMARY KEY REFERENCES events(id) ON DELETE CASCADE, |
||||||
|
score REAL NOT NULL, |
||||||
|
why_json TEXT |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS accounts ( |
||||||
|
pubkey TEXT PRIMARY KEY, |
||||||
|
label TEXT, |
||||||
|
signer_type TEXT NOT NULL, |
||||||
|
nsec TEXT, |
||||||
|
ncryptsec TEXT, |
||||||
|
sort_order INTEGER NOT NULL DEFAULT 0 |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS settings ( |
||||||
|
key TEXT PRIMARY KEY, |
||||||
|
value TEXT NOT NULL |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS translation_cache ( |
||||||
|
key TEXT PRIMARY KEY, |
||||||
|
lang TEXT NOT NULL, |
||||||
|
text TEXT NOT NULL, |
||||||
|
created_at INTEGER NOT NULL |
||||||
|
); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS notifications ( |
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||||
|
recipient_pubkey TEXT NOT NULL, |
||||||
|
source_event_id TEXT NOT NULL, |
||||||
|
kind TEXT NOT NULL, |
||||||
|
read INTEGER NOT NULL DEFAULT 0, |
||||||
|
created_at INTEGER NOT NULL, |
||||||
|
UNIQUE(recipient_pubkey, source_event_id, kind) |
||||||
|
); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_notif_recipient ON notifications(recipient_pubkey, read); |
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS feed_views ( |
||||||
|
viewer_pubkey TEXT NOT NULL, |
||||||
|
event_id TEXT NOT NULL, |
||||||
|
seen_at INTEGER NOT NULL, |
||||||
|
PRIMARY KEY(viewer_pubkey, event_id) |
||||||
|
); |
||||||
|
CREATE INDEX IF NOT EXISTS idx_feed_views_event ON feed_views(event_id); |
||||||
|
""" |
||||||
|
|
||||||
|
|
||||||
|
class Database: |
||||||
|
def __init__(self, path: Path) -> None: |
||||||
|
self.path = path |
||||||
|
path.parent.mkdir(parents=True, exist_ok=True) |
||||||
|
self._conn: sqlite3.Connection | None = None |
||||||
|
|
||||||
|
def connect(self) -> None: |
||||||
|
self._conn = sqlite3.connect(str(self.path), check_same_thread=False) |
||||||
|
self._conn.row_factory = sqlite3.Row |
||||||
|
self._conn.executescript(DDL) |
||||||
|
cur = self._conn.execute("SELECT value FROM meta WHERE key='schema_version'") |
||||||
|
row = cur.fetchone() |
||||||
|
if row is None: |
||||||
|
self._conn.execute( |
||||||
|
"INSERT INTO meta(key,value) VALUES ('schema_version', ?)", |
||||||
|
(str(SCHEMA_VERSION),), |
||||||
|
) |
||||||
|
self._conn.commit() |
||||||
|
self._migrate_schema() |
||||||
|
|
||||||
|
def close(self) -> None: |
||||||
|
if self._conn: |
||||||
|
self._conn.close() |
||||||
|
self._conn = None |
||||||
|
|
||||||
|
@contextmanager |
||||||
|
def write_lock(self) -> Generator[sqlite3.Connection, None, None]: |
||||||
|
if not self._conn: |
||||||
|
raise RuntimeError("database not connected") |
||||||
|
try: |
||||||
|
yield self._conn |
||||||
|
self._conn.commit() |
||||||
|
except Exception: |
||||||
|
self._conn.rollback() |
||||||
|
raise |
||||||
|
|
||||||
|
def conn(self) -> sqlite3.Connection: |
||||||
|
if not self._conn: |
||||||
|
raise RuntimeError("database not connected") |
||||||
|
return self._conn |
||||||
|
|
||||||
|
def _migrate_schema(self) -> None: |
||||||
|
"""Light migrations for existing DBs created before new columns/tables.""" |
||||||
|
c = self._conn |
||||||
|
if not c: |
||||||
|
return |
||||||
|
cols = {str(r[1]) for r in c.execute("PRAGMA table_info(events)")} |
||||||
|
if "source_relay" not in cols: |
||||||
|
c.execute("ALTER TABLE events ADD COLUMN source_relay TEXT") |
||||||
|
c.execute( |
||||||
|
""" |
||||||
|
CREATE TABLE IF NOT EXISTS feed_views ( |
||||||
|
viewer_pubkey TEXT NOT NULL, |
||||||
|
event_id TEXT NOT NULL, |
||||||
|
seen_at INTEGER NOT NULL, |
||||||
|
PRIMARY KEY(viewer_pubkey, event_id) |
||||||
|
) |
||||||
|
""" |
||||||
|
) |
||||||
|
c.execute("CREATE INDEX IF NOT EXISTS idx_feed_views_event ON feed_views(event_id)") |
||||||
|
c.commit() |
||||||
|
|
||||||
|
def upsert_event( |
||||||
|
self, |
||||||
|
ev: dict[str, Any], |
||||||
|
*, |
||||||
|
deleted: bool = False, |
||||||
|
source_relay: str | None = None, |
||||||
|
) -> None: |
||||||
|
"""Insert or replace event; expand tags into tags table.""" |
||||||
|
eid = ev["id"] |
||||||
|
tags = ev.get("tags") or [] |
||||||
|
tags_json = json.dumps(tags, ensure_ascii=False) |
||||||
|
raw = json.dumps(ev, ensure_ascii=False) |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute("DELETE FROM tags WHERE event_id=?", (eid,)) |
||||||
|
c.execute("DELETE FROM event_relations WHERE event_id=?", (eid,)) |
||||||
|
c.execute( |
||||||
|
""" |
||||||
|
INSERT INTO events(id,pubkey,created_at,kind,content,sig,tags_json,raw_json,deleted,received_at,source_relay) |
||||||
|
VALUES(?,?,?,?,?,?,?,?,?,?,?) |
||||||
|
ON CONFLICT(id) DO UPDATE SET |
||||||
|
pubkey=excluded.pubkey, |
||||||
|
created_at=excluded.created_at, |
||||||
|
kind=excluded.kind, |
||||||
|
content=excluded.content, |
||||||
|
sig=excluded.sig, |
||||||
|
tags_json=excluded.tags_json, |
||||||
|
raw_json=excluded.raw_json, |
||||||
|
deleted=excluded.deleted, |
||||||
|
received_at=excluded.received_at, |
||||||
|
source_relay=COALESCE(excluded.source_relay, events.source_relay) |
||||||
|
""", |
||||||
|
( |
||||||
|
eid, |
||||||
|
ev["pubkey"], |
||||||
|
int(ev["created_at"]), |
||||||
|
int(ev["kind"]), |
||||||
|
ev.get("content") or "", |
||||||
|
ev["sig"], |
||||||
|
tags_json, |
||||||
|
raw, |
||||||
|
1 if deleted else 0, |
||||||
|
int(time.time()), |
||||||
|
source_relay, |
||||||
|
), |
||||||
|
) |
||||||
|
for pos, t in enumerate(tags): |
||||||
|
if not t: |
||||||
|
continue |
||||||
|
name = str(t[0]) |
||||||
|
val = str(t[1]) if len(t) > 1 else "" |
||||||
|
c.execute( |
||||||
|
"INSERT INTO tags(event_id,name,value,pos) VALUES(?,?,?,?)", |
||||||
|
(eid, name, val, pos), |
||||||
|
) |
||||||
|
if name in ("e", "p", "a"): |
||||||
|
c.execute( |
||||||
|
"INSERT OR IGNORE INTO event_relations(event_id,relation,target) VALUES(?,?,?)", |
||||||
|
(eid, name, val), |
||||||
|
) |
||||||
|
|
||||||
|
def tombstone_event(self, event_id: str) -> None: |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute( |
||||||
|
"UPDATE events SET deleted=1, received_at=? WHERE id=?", |
||||||
|
(int(time.time()), event_id), |
||||||
|
) |
||||||
|
|
||||||
|
def purge_event_local(self, event_id: str) -> None: |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute("DELETE FROM events WHERE id=?", (event_id,)) |
||||||
|
|
||||||
|
def get_event(self, event_id: str) -> dict[str, Any] | None: |
||||||
|
cur = self.conn().execute( |
||||||
|
"SELECT id,pubkey,created_at,kind,content,sig,tags_json,deleted,source_relay FROM events WHERE id=?", |
||||||
|
(event_id,), |
||||||
|
) |
||||||
|
row = cur.fetchone() |
||||||
|
if not row: |
||||||
|
return None |
||||||
|
return { |
||||||
|
"id": row["id"], |
||||||
|
"pubkey": row["pubkey"], |
||||||
|
"created_at": row["created_at"], |
||||||
|
"kind": row["kind"], |
||||||
|
"content": row["content"], |
||||||
|
"sig": row["sig"], |
||||||
|
"tags": json.loads(row["tags_json"] or "[]"), |
||||||
|
"deleted": bool(row["deleted"]), |
||||||
|
"source_relay": row["source_relay"], |
||||||
|
} |
||||||
|
|
||||||
|
def feed_candidates( |
||||||
|
self, |
||||||
|
kinds: Iterable[int], |
||||||
|
*, |
||||||
|
hide_nsfw: bool = True, |
||||||
|
limit: int = 200, |
||||||
|
viewer_pubkey: str | None = None, |
||||||
|
exclude_viewed: bool = True, |
||||||
|
) -> list[dict[str, Any]]: |
||||||
|
kind_list = list(kinds) |
||||||
|
placeholders = ",".join("?" * len(kind_list)) |
||||||
|
sql = f""" |
||||||
|
SELECT id,pubkey,created_at,kind,content,sig,tags_json,deleted,source_relay FROM events |
||||||
|
WHERE deleted=0 AND kind IN ({placeholders}) |
||||||
|
""" |
||||||
|
args: list[Any] = list(kind_list) |
||||||
|
if hide_nsfw: |
||||||
|
sql += ( |
||||||
|
" AND id NOT IN (SELECT event_id FROM tags WHERE name='t' AND lower(value)='nsfw') " |
||||||
|
"AND id NOT IN (SELECT event_id FROM tags WHERE name='content-warning')" |
||||||
|
) |
||||||
|
if exclude_viewed and viewer_pubkey: |
||||||
|
sql += " AND id NOT IN (SELECT event_id FROM feed_views WHERE viewer_pubkey=?)" |
||||||
|
args.append(viewer_pubkey.lower()) |
||||||
|
sql += " ORDER BY created_at DESC LIMIT ?" |
||||||
|
args.append(limit) |
||||||
|
out: list[dict[str, Any]] = [] |
||||||
|
for row in self.conn().execute(sql, args): |
||||||
|
out.append( |
||||||
|
{ |
||||||
|
"id": row["id"], |
||||||
|
"pubkey": row["pubkey"], |
||||||
|
"created_at": row["created_at"], |
||||||
|
"kind": row["kind"], |
||||||
|
"content": row["content"], |
||||||
|
"sig": row["sig"], |
||||||
|
"tags": json.loads(row["tags_json"] or "[]"), |
||||||
|
"source_relay": row["source_relay"], |
||||||
|
} |
||||||
|
) |
||||||
|
return out |
||||||
|
|
||||||
|
def mark_feed_viewed(self, viewer_pubkey: str, event_id: str) -> None: |
||||||
|
"""Remember this event was shown in the feed so we do not surface it again.""" |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute( |
||||||
|
""" |
||||||
|
INSERT INTO feed_views(viewer_pubkey, event_id, seen_at) |
||||||
|
VALUES(?,?,?) |
||||||
|
ON CONFLICT(viewer_pubkey, event_id) DO UPDATE SET seen_at=excluded.seen_at |
||||||
|
""", |
||||||
|
(viewer_pubkey.lower(), event_id, int(time.time())), |
||||||
|
) |
||||||
|
|
||||||
|
def is_feed_viewed(self, viewer_pubkey: str, event_id: str) -> bool: |
||||||
|
cur = self.conn().execute( |
||||||
|
"SELECT 1 FROM feed_views WHERE viewer_pubkey=? AND event_id=? LIMIT 1", |
||||||
|
(viewer_pubkey.lower(), event_id), |
||||||
|
) |
||||||
|
return cur.fetchone() is not None |
||||||
|
|
||||||
|
def list_following_pubkeys(self, my_pubkey: str) -> set[str]: |
||||||
|
"""Pubkeys from the latest kind-3 contact list for `my_pubkey` (NIP-02 JSON + `p` tags).""" |
||||||
|
cur = self.conn().execute( |
||||||
|
""" |
||||||
|
SELECT content, tags_json FROM events |
||||||
|
WHERE deleted=0 AND kind=3 AND pubkey=? ORDER BY created_at DESC LIMIT 1 |
||||||
|
""", |
||||||
|
(my_pubkey.lower(),), |
||||||
|
) |
||||||
|
row = cur.fetchone() |
||||||
|
if not row: |
||||||
|
return set() |
||||||
|
out: set[str] = set() |
||||||
|
content = row["content"] or "" |
||||||
|
try: |
||||||
|
data = json.loads(content) |
||||||
|
if isinstance(data, list): |
||||||
|
for x in data: |
||||||
|
if isinstance(x, str) and len(x) == 64: |
||||||
|
out.add(x.lower()) |
||||||
|
elif isinstance(x, dict) and "pubkey" in x: |
||||||
|
pk = str(x["pubkey"]) |
||||||
|
if len(pk) == 64: |
||||||
|
out.add(pk.lower()) |
||||||
|
except json.JSONDecodeError: |
||||||
|
pass |
||||||
|
try: |
||||||
|
tags = json.loads(row["tags_json"] or "[]") |
||||||
|
for t in tags: |
||||||
|
if t and t[0] == "p" and len(t) > 1 and len(t[1]) == 64: |
||||||
|
out.add(str(t[1]).lower()) |
||||||
|
except json.JSONDecodeError: |
||||||
|
pass |
||||||
|
return out |
||||||
|
|
||||||
|
def list_kind30000_list_pubkeys(self, my_pubkey: str) -> set[str]: |
||||||
|
"""Hex pubkeys listed on this user's kind 30000 (NIP-51 people lists) via `p` tags.""" |
||||||
|
cur = self.conn().execute( |
||||||
|
""" |
||||||
|
SELECT DISTINCT lower(t.value) AS pk |
||||||
|
FROM tags t |
||||||
|
INNER JOIN events e ON e.id = t.event_id |
||||||
|
WHERE e.deleted = 0 AND e.kind = 30000 AND lower(e.pubkey) = lower(?) |
||||||
|
AND t.name = 'p' AND length(t.value) = 64 |
||||||
|
""", |
||||||
|
(my_pubkey,), |
||||||
|
) |
||||||
|
out: set[str] = set() |
||||||
|
for row in cur: |
||||||
|
pk = str(row["pk"]) |
||||||
|
if len(pk) == 64 and all(c in "0123456789abcdef" for c in pk): |
||||||
|
out.add(pk) |
||||||
|
return out |
||||||
|
|
||||||
|
def list_replies_to(self, event_id: str, limit: int = 80) -> list[dict[str, Any]]: |
||||||
|
cur = self.conn().execute( |
||||||
|
""" |
||||||
|
SELECT e.id, e.pubkey, e.created_at, e.kind, e.content, e.sig, e.tags_json |
||||||
|
FROM events e |
||||||
|
JOIN tags t ON t.event_id = e.id AND t.name = 'e' AND t.value = ? |
||||||
|
WHERE e.deleted = 0 |
||||||
|
ORDER BY e.created_at ASC |
||||||
|
LIMIT ? |
||||||
|
""", |
||||||
|
(event_id, limit), |
||||||
|
) |
||||||
|
return [ |
||||||
|
{ |
||||||
|
"id": r["id"], |
||||||
|
"pubkey": r["pubkey"], |
||||||
|
"created_at": r["created_at"], |
||||||
|
"kind": r["kind"], |
||||||
|
"content": r["content"], |
||||||
|
"sig": r["sig"], |
||||||
|
"tags": json.loads(r["tags_json"] or "[]"), |
||||||
|
} |
||||||
|
for r in cur |
||||||
|
] |
||||||
|
|
||||||
|
def search_local(self, query: str, limit: int = 100) -> list[dict[str, Any]]: |
||||||
|
q = f"%{query}%" |
||||||
|
cur = self.conn().execute( |
||||||
|
""" |
||||||
|
SELECT id,pubkey,created_at,kind,content,sig,tags_json FROM events |
||||||
|
WHERE deleted=0 AND (content LIKE ? OR id LIKE ? OR pubkey LIKE ?) |
||||||
|
ORDER BY created_at DESC LIMIT ? |
||||||
|
""", |
||||||
|
(q, q, q, limit), |
||||||
|
) |
||||||
|
rows = [] |
||||||
|
for row in cur: |
||||||
|
rows.append( |
||||||
|
{ |
||||||
|
"id": row["id"], |
||||||
|
"pubkey": row["pubkey"], |
||||||
|
"created_at": row["created_at"], |
||||||
|
"kind": row["kind"], |
||||||
|
"content": row["content"], |
||||||
|
"sig": row["sig"], |
||||||
|
"tags": json.loads(row["tags_json"] or "[]"), |
||||||
|
} |
||||||
|
) |
||||||
|
return rows |
||||||
|
|
||||||
|
def list_my_events(self, pubkey: str, limit: int = 200) -> list[dict[str, Any]]: |
||||||
|
cur = self.conn().execute( |
||||||
|
""" |
||||||
|
SELECT id,pubkey,created_at,kind,content,sig,tags_json FROM events |
||||||
|
WHERE deleted=0 AND pubkey=? ORDER BY created_at DESC LIMIT ? |
||||||
|
""", |
||||||
|
(pubkey, limit), |
||||||
|
) |
||||||
|
return [ |
||||||
|
{ |
||||||
|
"id": r["id"], |
||||||
|
"pubkey": r["pubkey"], |
||||||
|
"created_at": r["created_at"], |
||||||
|
"kind": r["kind"], |
||||||
|
"content": r["content"], |
||||||
|
"sig": r["sig"], |
||||||
|
"tags": json.loads(r["tags_json"] or "[]"), |
||||||
|
} |
||||||
|
for r in cur |
||||||
|
] |
||||||
|
|
||||||
|
def set_vote(self, event_id: str, voter_pubkey: str, vote: int) -> None: |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute( |
||||||
|
""" |
||||||
|
INSERT INTO user_votes(event_id,voter_pubkey,vote,created_at) |
||||||
|
VALUES(?,?,?,?) |
||||||
|
ON CONFLICT(event_id,voter_pubkey) DO UPDATE SET vote=excluded.vote, created_at=excluded.created_at |
||||||
|
""", |
||||||
|
(event_id, voter_pubkey, vote, int(time.time())), |
||||||
|
) |
||||||
|
|
||||||
|
def get_vote(self, event_id: str, voter_pubkey: str) -> int | None: |
||||||
|
cur = self.conn().execute( |
||||||
|
"SELECT vote FROM user_votes WHERE event_id=? AND voter_pubkey=?", |
||||||
|
(event_id, voter_pubkey), |
||||||
|
) |
||||||
|
row = cur.fetchone() |
||||||
|
return int(row["vote"]) if row else None |
||||||
|
|
||||||
|
def get_setting(self, key: str, default: str | None = None) -> str | None: |
||||||
|
cur = self.conn().execute("SELECT value FROM settings WHERE key=?", (key,)) |
||||||
|
row = cur.fetchone() |
||||||
|
if row: |
||||||
|
return str(row[0]) |
||||||
|
return default |
||||||
|
|
||||||
|
def set_setting(self, key: str, value: str) -> None: |
||||||
|
with self.write_lock() as c: |
||||||
|
c.execute( |
||||||
|
"INSERT INTO settings(key,value) VALUES(?,?) ON CONFLICT(key) DO UPDATE SET value=excluded.value", |
||||||
|
(key, value), |
||||||
|
) |
||||||
|
|
||||||
|
def list_tables(self) -> list[str]: |
||||||
|
cur = self.conn().execute( |
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name" |
||||||
|
) |
||||||
|
return [r[0] for r in cur.fetchall()] |
||||||
|
|
||||||
|
def table_columns(self, table: str) -> list[str]: |
||||||
|
cur = self.conn().execute(f'PRAGMA table_info("{table}")') |
||||||
|
return [str(r[1]) for r in cur.fetchall()] |
||||||
|
|
||||||
|
def select_safe(self, sql: str, params: tuple[Any, ...] = ()) -> tuple[list[str], list[sqlite3.Row]]: |
||||||
|
s = sql.strip().lower() |
||||||
|
if not s.startswith("select"): |
||||||
|
raise ValueError("only SELECT queries are allowed") |
||||||
|
forbidden = (" insert ", " update ", " delete ", " drop ", " attach ", " pragma ") |
||||||
|
padded = f" {s} " |
||||||
|
for f in forbidden: |
||||||
|
if f in padded: |
||||||
|
raise ValueError(f"forbidden token in query: {f.strip()}") |
||||||
|
cur = self.conn().execute(sql, params) |
||||||
|
cols = [d[0] for d in cur.description] if cur.description else [] |
||||||
|
return cols, cur.fetchall() |
||||||
@ -0,0 +1,27 @@ |
|||||||
|
"""Minimal NIP-19: decode nsec / npub hex payload.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import bech32 |
||||||
|
|
||||||
|
|
||||||
|
def decode_nsec(nsec: str) -> bytes: |
||||||
|
hrp, data = bech32.bech32_decode(nsec.strip()) |
||||||
|
if hrp != "nsec" or data is None: |
||||||
|
raise ValueError("invalid nsec") |
||||||
|
return bytes(bech32.convertbits(data, 5, 8, False)) |
||||||
|
|
||||||
|
|
||||||
|
def encode_nsec(secret: bytes) -> str: |
||||||
|
if len(secret) != 32: |
||||||
|
raise ValueError("secret must be 32 bytes") |
||||||
|
conv = bech32.convertbits(list(secret), 8, 5, True) |
||||||
|
return bech32.bech32_encode("nsec", conv) |
||||||
|
|
||||||
|
|
||||||
|
def encode_npub(pubkey_hex: str) -> str: |
||||||
|
raw = bytes.fromhex(pubkey_hex.strip()) |
||||||
|
if len(raw) != 32: |
||||||
|
raise ValueError("pubkey must be 32 bytes hex") |
||||||
|
conv = bech32.convertbits(list(raw), 8, 5, True) |
||||||
|
return bech32.bech32_encode("npub", conv) |
||||||
@ -0,0 +1,87 @@ |
|||||||
|
"""NIP-49 ncryptsec encrypt/decrypt — compatible with Jumble (nostr-tools nip49).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import hashlib |
||||||
|
import os |
||||||
|
import unicodedata |
||||||
|
from typing import Final |
||||||
|
|
||||||
|
import bech32 |
||||||
|
from nacl.bindings import crypto_aead_xchacha20poly1305_ietf_decrypt, crypto_aead_xchacha20poly1305_ietf_encrypt |
||||||
|
|
||||||
|
VERSION: Final = 0x02 |
||||||
|
KEY_SECURITY_MEDIUM: Final = 0x01 |
||||||
|
_DEFAULT_LOG_N: Final = 18 |
||||||
|
_SCRYPT_R: Final = 8 |
||||||
|
_SCRYPT_P: Final = 1 |
||||||
|
|
||||||
|
|
||||||
|
def _normalize_password(password: str) -> bytes: |
||||||
|
return unicodedata.normalize("NFKC", password).encode("utf-8") |
||||||
|
|
||||||
|
|
||||||
|
def _decode_ncryptsec(ncryptsec: str) -> bytes: |
||||||
|
hrp, data = bech32.bech32_decode(ncryptsec) |
||||||
|
if hrp != "ncryptsec" or data is None: |
||||||
|
raise ValueError("invalid ncryptsec hrp or data") |
||||||
|
return bytes(bech32.convertbits(data, 5, 8, False)) |
||||||
|
|
||||||
|
|
||||||
|
def _encode_ncryptsec(payload: bytes) -> str: |
||||||
|
data = bech32.convertbits(list(payload), 8, 5, True) |
||||||
|
return bech32.bech32_encode("ncryptsec", data) |
||||||
|
|
||||||
|
|
||||||
|
def decrypt_ncryptsec(ncryptsec: str, password: str) -> bytes: |
||||||
|
"""Return 32-byte secret key from ncryptsec bech32 string.""" |
||||||
|
raw = _decode_ncryptsec(ncryptsec.strip()) |
||||||
|
if len(raw) != 91: |
||||||
|
raise ValueError(f"invalid ncryptsec payload length {len(raw)}") |
||||||
|
version = raw[0] |
||||||
|
if version != VERSION: |
||||||
|
raise ValueError(f"unsupported ncryptsec version {version:#x}") |
||||||
|
log_n = raw[1] |
||||||
|
salt = raw[2:18] |
||||||
|
nonce = raw[18:42] |
||||||
|
aad = raw[42:43] |
||||||
|
ciphertext = raw[43:] |
||||||
|
n = 1 << log_n |
||||||
|
# OpenSSL scrypt enforces a max memory bound; raise for large log_n (e.g. 20 ~= 1 GiB). |
||||||
|
maxmem = max(256 * 1024 * 1024, 128 * n * _SCRYPT_R) |
||||||
|
key = hashlib.scrypt( |
||||||
|
_normalize_password(password), |
||||||
|
salt=salt, |
||||||
|
n=n, |
||||||
|
r=_SCRYPT_R, |
||||||
|
p=_SCRYPT_P, |
||||||
|
maxmem=maxmem, |
||||||
|
dklen=32, |
||||||
|
) |
||||||
|
plain = crypto_aead_xchacha20poly1305_ietf_decrypt(ciphertext, aad, nonce, key) |
||||||
|
if len(plain) != 32: |
||||||
|
raise ValueError("invalid decrypted key length") |
||||||
|
return plain |
||||||
|
|
||||||
|
|
||||||
|
def encrypt_to_ncryptsec(secret_key: bytes, password: str, log_n: int = _DEFAULT_LOG_N) -> str: |
||||||
|
"""Encrypt 32-byte secret key to ncryptsec (NIP-49).""" |
||||||
|
if len(secret_key) != 32: |
||||||
|
raise ValueError("secret key must be 32 bytes") |
||||||
|
salt = os.urandom(16) |
||||||
|
nonce = os.urandom(24) |
||||||
|
aad = bytes([KEY_SECURITY_MEDIUM]) |
||||||
|
n = 1 << log_n |
||||||
|
maxmem = max(256 * 1024 * 1024, 128 * n * _SCRYPT_R) |
||||||
|
sym = hashlib.scrypt( |
||||||
|
_normalize_password(password), |
||||||
|
salt=salt, |
||||||
|
n=n, |
||||||
|
r=_SCRYPT_R, |
||||||
|
p=_SCRYPT_P, |
||||||
|
maxmem=maxmem, |
||||||
|
dklen=32, |
||||||
|
) |
||||||
|
ciphertext = crypto_aead_xchacha20poly1305_ietf_encrypt(secret_key, aad, nonce, sym) |
||||||
|
payload = bytes([VERSION, log_n]) + salt + nonce + aad + ciphertext |
||||||
|
return _encode_ncryptsec(payload) |
||||||
@ -0,0 +1,57 @@ |
|||||||
|
"""Nostr event ids (NIP-01) and Schnorr signatures (BIP-340) via coincurve.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
from hashlib import sha256 |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from coincurve import PrivateKey |
||||||
|
|
||||||
|
|
||||||
|
def serialize_event_for_id(pubkey: str, created_at: int, kind: int, tags: list, content: str) -> str: |
||||||
|
arr = [0, pubkey, created_at, kind, tags, content] |
||||||
|
return json.dumps(arr, ensure_ascii=False, separators=(",", ":")) |
||||||
|
|
||||||
|
|
||||||
|
def event_id_hex(pubkey: str, created_at: int, kind: int, tags: list, content: str) -> str: |
||||||
|
ser = serialize_event_for_id(pubkey, created_at, kind, tags, content) |
||||||
|
return sha256(ser.encode("utf-8")).hexdigest() |
||||||
|
|
||||||
|
|
||||||
|
def pubkey_hex_from_secret(secret: bytes) -> str: |
||||||
|
pk = PrivateKey(secret) |
||||||
|
fmt = pk.public_key.format(compressed=False) |
||||||
|
return fmt[1:33].hex() |
||||||
|
|
||||||
|
|
||||||
|
def sign_event_id(secret: bytes, event_id_hex_str: str) -> str: |
||||||
|
"""Schnorr sign the 32-byte event id (binary), return 128-char hex sig.""" |
||||||
|
pk = PrivateKey(secret) |
||||||
|
digest = bytes.fromhex(event_id_hex_str) |
||||||
|
if len(digest) != 32: |
||||||
|
raise ValueError("event id must be 32 bytes hex") |
||||||
|
sig = pk.sign_schnorr(digest) |
||||||
|
return sig.hex() |
||||||
|
|
||||||
|
|
||||||
|
def build_signed_event( |
||||||
|
secret: bytes, |
||||||
|
*, |
||||||
|
created_at: int, |
||||||
|
kind: int, |
||||||
|
tags: list, |
||||||
|
content: str, |
||||||
|
) -> dict[str, Any]: |
||||||
|
pubkey = pubkey_hex_from_secret(secret) |
||||||
|
eid = event_id_hex(pubkey, created_at, kind, tags, content) |
||||||
|
sig = sign_event_id(secret, eid) |
||||||
|
return { |
||||||
|
"id": eid, |
||||||
|
"pubkey": pubkey, |
||||||
|
"created_at": created_at, |
||||||
|
"kind": kind, |
||||||
|
"tags": tags, |
||||||
|
"content": content, |
||||||
|
"sig": sig, |
||||||
|
} |
||||||
@ -0,0 +1,174 @@ |
|||||||
|
"""Coordinates relay ingest (signals), ranking, and publish.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import asyncio |
||||||
|
import json |
||||||
|
import logging |
||||||
|
import threading |
||||||
|
import time |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from PySide6.QtCore import QObject, Signal |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount, unlock_secret |
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_crypto import build_signed_event |
||||||
|
from imwald.core.nostr_publish import publish_to_relays_sync |
||||||
|
from imwald.core.ranker import Ranker |
||||||
|
from imwald.core.relay_manager import RelayManager |
||||||
|
from imwald.core.relay_policy import ( |
||||||
|
AGGR_THREAD_RELAY, |
||||||
|
DEFAULT_WRITE_RELAYS, |
||||||
|
WISP_TRENDING_FEED_KINDS, |
||||||
|
default_feed_read_relays, |
||||||
|
is_wisp_trending_relay_url, |
||||||
|
use_aggr_for_threads, |
||||||
|
) |
||||||
|
|
||||||
|
log = logging.getLogger(__name__) |
||||||
|
|
||||||
|
|
||||||
|
class NostrEngine(QObject): |
||||||
|
"""Relay worker thread emits `event_ingested` — connect on UI thread to write SQLite.""" |
||||||
|
|
||||||
|
event_ingested = Signal(str, object) |
||||||
|
relay_status = Signal(str) |
||||||
|
|
||||||
|
def __init__(self, db: Database) -> None: |
||||||
|
super().__init__() |
||||||
|
self.db = db |
||||||
|
self.ranker = Ranker(db) |
||||||
|
self._thread: threading.Thread | None = None |
||||||
|
self._loop: asyncio.AbstractEventLoop | None = None |
||||||
|
self._manager: RelayManager | None = None |
||||||
|
self._app_stop: asyncio.Event | None = None |
||||||
|
|
||||||
|
def start_relays( |
||||||
|
self, |
||||||
|
read_urls: list[str] | None = None, |
||||||
|
*, |
||||||
|
list30000_owner: str | None = None, |
||||||
|
) -> None: |
||||||
|
if self._thread and self._thread.is_alive(): |
||||||
|
return |
||||||
|
urls = list(read_urls or default_feed_read_relays()) |
||||||
|
k3000_owner = (list30000_owner or "").strip().lower() |
||||||
|
|
||||||
|
def runner() -> None: |
||||||
|
async def on_ev(url: str, ev: dict[str, Any]) -> None: |
||||||
|
self.event_ingested.emit(url, ev) |
||||||
|
|
||||||
|
async def on_notice(url: str, text: str) -> None: |
||||||
|
self.relay_status.emit(f"{url} NOTICE {text}") |
||||||
|
|
||||||
|
async def amain() -> None: |
||||||
|
app_stop = asyncio.Event() |
||||||
|
self._app_stop = app_stop |
||||||
|
mgr = RelayManager(on_event=on_ev, on_notice=on_notice) |
||||||
|
self._manager = mgr |
||||||
|
kinds_main = [1, 20, 21, 30023, 9802, 11] |
||||||
|
for u in urls: |
||||||
|
mgr.register(u) |
||||||
|
kinds = list(WISP_TRENDING_FEED_KINDS) if is_wisp_trending_relay_url(u) else kinds_main |
||||||
|
mgr.request_subscribe( |
||||||
|
u, |
||||||
|
f"imwald-{abs(hash(u)) % 10**8}", |
||||||
|
[{"kinds": kinds, "limit": 220}], |
||||||
|
) |
||||||
|
if use_aggr_for_threads(set(DEFAULT_WRITE_RELAYS)): |
||||||
|
mgr.register(AGGR_THREAD_RELAY) |
||||||
|
mgr.request_subscribe( |
||||||
|
AGGR_THREAD_RELAY, |
||||||
|
"imwald-aggr", |
||||||
|
[{"kinds": [1], "limit": 120}], |
||||||
|
) |
||||||
|
if len(k3000_owner) == 64 and all(c in "0123456789abcdef" for c in k3000_owner): |
||||||
|
for u in urls: |
||||||
|
sid = f"imwald-k30000-{abs(hash(f'{u}:{k3000_owner}')) % 10**9}" |
||||||
|
mgr.request_subscribe( |
||||||
|
u, |
||||||
|
sid, |
||||||
|
[{"kinds": [30000], "authors": [k3000_owner], "limit": 150}], |
||||||
|
) |
||||||
|
await mgr.connect_all() |
||||||
|
await app_stop.wait() |
||||||
|
await mgr.stop() |
||||||
|
|
||||||
|
loop = asyncio.new_event_loop() |
||||||
|
self._loop = loop |
||||||
|
asyncio.set_event_loop(loop) |
||||||
|
try: |
||||||
|
loop.run_until_complete(amain()) |
||||||
|
finally: |
||||||
|
self._loop = None |
||||||
|
self._manager = None |
||||||
|
self._app_stop = None |
||||||
|
loop.close() |
||||||
|
|
||||||
|
self._thread = threading.Thread(target=runner, name="nostr-relay", daemon=True) |
||||||
|
self._thread.start() |
||||||
|
|
||||||
|
def stop_relays(self) -> None: |
||||||
|
if self._loop and self._app_stop and self._loop.is_running(): |
||||||
|
self._loop.call_soon_threadsafe(self._app_stop.set) |
||||||
|
if self._thread: |
||||||
|
self._thread.join(timeout=5.0) |
||||||
|
self._thread = None |
||||||
|
|
||||||
|
@staticmethod |
||||||
|
def apply_ingest_to_db(db: Database, ev: dict[str, Any], source_relay: str | None = None) -> None: |
||||||
|
if not isinstance(ev, dict) or "id" not in ev: |
||||||
|
return |
||||||
|
if ev.get("kind") == 5: |
||||||
|
for t in ev.get("tags") or []: |
||||||
|
if t and t[0] == "e" and len(t) > 1: |
||||||
|
db.tombstone_event(t[1]) |
||||||
|
db.upsert_event(ev, source_relay=source_relay) |
||||||
|
|
||||||
|
def publish_kind0_and_lists( |
||||||
|
self, |
||||||
|
account: StoredAccount, |
||||||
|
password: str | None, |
||||||
|
*, |
||||||
|
name: str, |
||||||
|
about: str, |
||||||
|
interest_tags: list[str], |
||||||
|
languages: list[str], |
||||||
|
) -> None: |
||||||
|
sec = unlock_secret(account, password) |
||||||
|
now = int(time.time()) |
||||||
|
kind0 = build_signed_event( |
||||||
|
sec, |
||||||
|
created_at=now, |
||||||
|
kind=0, |
||||||
|
tags=[["client", "imwald"]], |
||||||
|
content=json.dumps({"name": name, "about": about}, ensure_ascii=False), |
||||||
|
) |
||||||
|
tags_10002: list[list[str]] = [["client", "imwald"]] |
||||||
|
for r in DEFAULT_READ_RELAYS: |
||||||
|
tags_10002.extend([["r", r], ["read", "true"]]) |
||||||
|
for r in DEFAULT_WRITE_RELAYS: |
||||||
|
tags_10002.extend([["r", r], ["write", "true"]]) |
||||||
|
ev10002 = build_signed_event(sec, created_at=now, kind=10002, tags=tags_10002, content="") |
||||||
|
tags_10015 = [["client", "imwald"]] |
||||||
|
for t in interest_tags[:50]: |
||||||
|
tt = t if t.startswith("#") else f"#{t}" |
||||||
|
tags_10015.append(["t", tt[1:] if tt.startswith("#") else tt]) |
||||||
|
for lang in languages[:3]: |
||||||
|
tags_10015.append(["l", lang]) |
||||||
|
ev10015 = build_signed_event(sec, created_at=now, kind=10015, tags=tags_10015, content="") |
||||||
|
write_urls = list(DEFAULT_WRITE_RELAYS) |
||||||
|
publish_to_relays_sync(write_urls, kind0) |
||||||
|
publish_to_relays_sync(write_urls, ev10002) |
||||||
|
publish_to_relays_sync(write_urls, ev10015) |
||||||
|
for ev in (kind0, ev10002, ev10015): |
||||||
|
self.db.upsert_event(ev) |
||||||
|
|
||||||
|
def publish_nip09_deletion(self, account: StoredAccount, password: str | None, target_event_id: str) -> None: |
||||||
|
sec = unlock_secret(account, password) |
||||||
|
now = int(time.time()) |
||||||
|
tags = [["e", target_event_id], ["k", "1"]] |
||||||
|
ev = build_signed_event(sec, created_at=now, kind=5, tags=tags, content="") |
||||||
|
publish_to_relays_sync(list(DEFAULT_WRITE_RELAYS), ev) |
||||||
|
self.db.upsert_event(ev) |
||||||
@ -0,0 +1,44 @@ |
|||||||
|
"""Publish signed events to write relays (WebSocket).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import asyncio |
||||||
|
import json |
||||||
|
import logging |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
import websockets |
||||||
|
|
||||||
|
log = logging.getLogger(__name__) |
||||||
|
|
||||||
|
|
||||||
|
async def publish_to_relays(urls: list[str], event: dict[str, Any], timeout: float = 12.0) -> dict[str, bool]: |
||||||
|
results: dict[str, bool] = {} |
||||||
|
|
||||||
|
async def one(url: str) -> None: |
||||||
|
u = url.strip() |
||||||
|
if u.startswith("http://") or u.startswith("https://"): |
||||||
|
log.warning("HTTP relay publish not implemented: %s", u) |
||||||
|
results[url] = False |
||||||
|
return |
||||||
|
if u.startswith("ws://") or u.startswith("wss://"): |
||||||
|
ws_url = u |
||||||
|
else: |
||||||
|
ws_url = "wss://" + u |
||||||
|
try: |
||||||
|
async with websockets.connect(ws_url, ping_interval=20, open_timeout=timeout) as ws: |
||||||
|
await ws.send(json.dumps(["EVENT", event])) |
||||||
|
raw = await asyncio.wait_for(ws.recv(), timeout=timeout) |
||||||
|
msg = json.loads(raw) |
||||||
|
ok = isinstance(msg, list) and len(msg) >= 4 and msg[0] == "OK" and bool(msg[2]) |
||||||
|
results[url] = ok |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
log.info("publish fail %s: %s", url, e) |
||||||
|
results[url] = False |
||||||
|
|
||||||
|
await asyncio.gather(*(one(u) for u in urls)) |
||||||
|
return results |
||||||
|
|
||||||
|
|
||||||
|
def publish_to_relays_sync(urls: list[str], event: dict[str, Any]) -> dict[str, bool]: |
||||||
|
return asyncio.run(publish_to_relays(urls, event)) |
||||||
@ -0,0 +1,39 @@ |
|||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from dataclasses import dataclass, field |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
|
||||||
|
@dataclass |
||||||
|
class NostrEvent: |
||||||
|
id: str |
||||||
|
pubkey: str |
||||||
|
created_at: int |
||||||
|
kind: int |
||||||
|
tags: list[list[str]] |
||||||
|
content: str |
||||||
|
sig: str |
||||||
|
|
||||||
|
@classmethod |
||||||
|
def from_row(cls, row: dict[str, Any]) -> NostrEvent: |
||||||
|
import json |
||||||
|
|
||||||
|
tags = json.loads(row["tags_json"] or "[]") |
||||||
|
return cls( |
||||||
|
id=row["id"], |
||||||
|
pubkey=row["pubkey"], |
||||||
|
created_at=int(row["created_at"]), |
||||||
|
kind=int(row["kind"]), |
||||||
|
tags=tags, |
||||||
|
content=row["content"] or "", |
||||||
|
sig=row["sig"], |
||||||
|
) |
||||||
|
|
||||||
|
def to_dict_unsigned(self) -> dict[str, Any]: |
||||||
|
return { |
||||||
|
"pubkey": self.pubkey, |
||||||
|
"created_at": self.created_at, |
||||||
|
"kind": self.kind, |
||||||
|
"tags": self.tags, |
||||||
|
"content": self.content, |
||||||
|
} |
||||||
@ -0,0 +1,109 @@ |
|||||||
|
"""Local feed ranking: follows first, trending slice, votes, graph stubs.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from imwald.core.relay_policy import is_wisp_trending_relay_url |
||||||
|
|
||||||
|
from .database import Database |
||||||
|
|
||||||
|
|
||||||
|
# Followed authors should dominate the feed vs algorithmic/trending slices. |
||||||
|
WEIGHT_FOLLOW_AUTHOR = 50_000.0 |
||||||
|
# Kind 30000 list members (NIP-51) — meaningful boost, but below follows. |
||||||
|
WEIGHT_KIND30000_LIST_AUTHOR = 8_000.0 |
||||||
|
WEIGHT_TRENDING_RELAY = 120.0 |
||||||
|
WEIGHT_REPLY = 5.0 |
||||||
|
WEIGHT_QUOTE = 5.0 |
||||||
|
WEIGHT_BOOST = 5.0 |
||||||
|
WEIGHT_ZAP = 3.0 |
||||||
|
WEIGHT_LOCAL_VOTE = 1.0 |
||||||
|
|
||||||
|
|
||||||
|
def _tags_contain_repost(tags: list) -> bool: |
||||||
|
for t in tags: |
||||||
|
if not t: |
||||||
|
continue |
||||||
|
if t[0] == "kind" and len(t) > 1 and t[1] == "6": |
||||||
|
return True |
||||||
|
if t[0] == "q": |
||||||
|
return True |
||||||
|
return False |
||||||
|
|
||||||
|
|
||||||
|
class Ranker: |
||||||
|
def __init__(self, db: Database) -> None: |
||||||
|
self._db = db |
||||||
|
|
||||||
|
def score_event( |
||||||
|
self, |
||||||
|
ev: dict[str, Any], |
||||||
|
*, |
||||||
|
my_pubkey: str | None, |
||||||
|
following: set[str], |
||||||
|
list30000_pubkeys: set[str], |
||||||
|
) -> tuple[float, dict[str, Any]]: |
||||||
|
why: dict[str, Any] = {} |
||||||
|
score = 0.0 |
||||||
|
pk = (ev.get("pubkey") or "").lower() |
||||||
|
fol = {f.lower() for f in following} |
||||||
|
lists = {x.lower() for x in list30000_pubkeys} |
||||||
|
if my_pubkey: |
||||||
|
v = self._db.get_vote(ev["id"], my_pubkey) |
||||||
|
if v is not None: |
||||||
|
w = WEIGHT_LOCAL_VOTE * v |
||||||
|
score += w |
||||||
|
why["local_vote"] = {"vote": v, "contribution": w} |
||||||
|
if pk and pk in fol: |
||||||
|
score += WEIGHT_FOLLOW_AUTHOR |
||||||
|
why["follow_author"] = WEIGHT_FOLLOW_AUTHOR |
||||||
|
elif pk and pk in lists: |
||||||
|
score += WEIGHT_KIND30000_LIST_AUTHOR |
||||||
|
why["kind30000_list"] = WEIGHT_KIND30000_LIST_AUTHOR |
||||||
|
sr = ev.get("source_relay") or "" |
||||||
|
if sr and is_wisp_trending_relay_url(sr): |
||||||
|
score += WEIGHT_TRENDING_RELAY |
||||||
|
why["trending_relay"] = WEIGHT_TRENDING_RELAY |
||||||
|
tags = ev.get("tags") or [] |
||||||
|
if _tags_contain_repost(tags): |
||||||
|
score += WEIGHT_BOOST |
||||||
|
why["repost_or_quote_hint"] = WEIGHT_BOOST |
||||||
|
why["note"] = "Extend with reply/boost/zap counts from DB event_relations + kind 9735." |
||||||
|
return score, why |
||||||
|
|
||||||
|
def persist_score(self, event_id: str, score: float, why: dict[str, Any]) -> None: |
||||||
|
with self._db.write_lock() as c: |
||||||
|
c.execute( |
||||||
|
""" |
||||||
|
INSERT INTO rank_features(event_id, score, why_json) |
||||||
|
VALUES(?,?,?) |
||||||
|
ON CONFLICT(event_id) DO UPDATE SET score=excluded.score, why_json=excluded.why_json |
||||||
|
""", |
||||||
|
(event_id, score, json.dumps(why, ensure_ascii=False)), |
||||||
|
) |
||||||
|
|
||||||
|
def rank_feed( |
||||||
|
self, |
||||||
|
events: list[dict[str, Any]], |
||||||
|
my_pubkey: str | None, |
||||||
|
following: set[str], |
||||||
|
list30000_pubkeys: set[str], |
||||||
|
) -> list[dict[str, Any]]: |
||||||
|
following_l = {f.lower() for f in following} |
||||||
|
lists_l = {x.lower() for x in list30000_pubkeys} |
||||||
|
scored: list[tuple[float, dict[str, Any], dict[str, Any]]] = [] |
||||||
|
for ev in events: |
||||||
|
s, why = self.score_event( |
||||||
|
ev, |
||||||
|
my_pubkey=my_pubkey, |
||||||
|
following=following_l, |
||||||
|
list30000_pubkeys=lists_l, |
||||||
|
) |
||||||
|
scored.append((s, ev, why)) |
||||||
|
# Highest score first (follow boost + trending + votes), then recency. |
||||||
|
scored.sort(key=lambda x: (x[0], x[1]["created_at"]), reverse=True) |
||||||
|
for s, ev, why in scored: |
||||||
|
self.persist_score(ev["id"], s, why) |
||||||
|
return [ev for _, ev, _ in scored] |
||||||
@ -0,0 +1,175 @@ |
|||||||
|
"""Per-relay connection state, backoff, and transparent status (Jumble-style).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import asyncio |
||||||
|
import contextlib |
||||||
|
import json |
||||||
|
import logging |
||||||
|
import random |
||||||
|
import time |
||||||
|
from dataclasses import dataclass, field |
||||||
|
from enum import Enum |
||||||
|
from typing import Any, Callable, Coroutine |
||||||
|
|
||||||
|
import websockets |
||||||
|
from websockets.client import WebSocketClientProtocol |
||||||
|
|
||||||
|
log = logging.getLogger(__name__) |
||||||
|
|
||||||
|
|
||||||
|
class RelayState(str, Enum): |
||||||
|
DISCONNECTED = "disconnected" |
||||||
|
CONNECTING = "connecting" |
||||||
|
CONNECTED = "connected" |
||||||
|
BACKOFF = "backoff" |
||||||
|
ERROR = "error" |
||||||
|
|
||||||
|
|
||||||
|
@dataclass |
||||||
|
class RelayConn: |
||||||
|
url: str |
||||||
|
state: RelayState = RelayState.DISCONNECTED |
||||||
|
last_error: str | None = None |
||||||
|
last_connected_at: float | None = None |
||||||
|
backoff_until: float = 0.0 |
||||||
|
_ws: WebSocketClientProtocol | None = field(default=None, repr=False) |
||||||
|
_task: asyncio.Task[None] | None = field(default=None, repr=False) |
||||||
|
|
||||||
|
def status_line(self) -> str: |
||||||
|
err = f" ({self.last_error})" if self.last_error else "" |
||||||
|
return f"{self.url}: {self.state.value}{err}" |
||||||
|
|
||||||
|
|
||||||
|
def _normalize_ws_url(url: str) -> str: |
||||||
|
u = url.strip() |
||||||
|
if u.startswith("http://"): |
||||||
|
return "ws://" + u[len("http://") :] |
||||||
|
if u.startswith("https://"): |
||||||
|
return "wss://" + u[len("https://") :] |
||||||
|
return u |
||||||
|
|
||||||
|
|
||||||
|
class RelayManager: |
||||||
|
"""Owns asyncio tasks; call from a dedicated thread running `asyncio.run`.""" |
||||||
|
|
||||||
|
def __init__( |
||||||
|
self, |
||||||
|
on_event: Callable[[str, dict[str, Any]], Coroutine[Any, Any, None]], |
||||||
|
on_notice: Callable[[str, str], Coroutine[Any, Any, None]] | None = None, |
||||||
|
) -> None: |
||||||
|
self._on_event = on_event |
||||||
|
self._on_notice = on_notice |
||||||
|
self._relays: dict[str, RelayConn] = {} |
||||||
|
self._subs: dict[str, dict[str, Any]] = {} |
||||||
|
self._shutdown = asyncio.Event() |
||||||
|
|
||||||
|
def register(self, url: str) -> RelayConn: |
||||||
|
url = _normalize_ws_url(url) |
||||||
|
if url not in self._relays: |
||||||
|
self._relays[url] = RelayConn(url=url) |
||||||
|
return self._relays[url] |
||||||
|
|
||||||
|
def all_relays(self) -> list[RelayConn]: |
||||||
|
return list(self._relays.values()) |
||||||
|
|
||||||
|
async def connect_all(self) -> None: |
||||||
|
for url in list(self._relays.keys()): |
||||||
|
await self._ensure_connected(url) |
||||||
|
|
||||||
|
async def stop(self) -> None: |
||||||
|
self._shutdown.set() |
||||||
|
for r in self._relays.values(): |
||||||
|
if r._task: |
||||||
|
r._task.cancel() |
||||||
|
with contextlib.suppress(asyncio.CancelledError): |
||||||
|
await r._task |
||||||
|
r._task = None |
||||||
|
if r._ws: |
||||||
|
await r._ws.close() |
||||||
|
r._ws = None |
||||||
|
|
||||||
|
def request_subscribe(self, relay_url: str, sub_id: str, filters: list[dict[str, Any]]) -> None: |
||||||
|
relay_url = _normalize_ws_url(relay_url) |
||||||
|
self._subs[f"{relay_url}:{sub_id}"] = {"relay": relay_url, "sub_id": sub_id, "filters": filters} |
||||||
|
if relay_url in self._relays and self._relays[relay_url]._ws: |
||||||
|
asyncio.create_task(self._send_req(relay_url, sub_id, filters)) |
||||||
|
|
||||||
|
async def _send_req(self, relay_url: str, sub_id: str, filters: list[dict[str, Any]]) -> None: |
||||||
|
r = self._relays.get(relay_url) |
||||||
|
if not r or not r._ws: |
||||||
|
return |
||||||
|
msg = json.dumps(["REQ", sub_id, *filters]) |
||||||
|
await r._ws.send(msg) |
||||||
|
|
||||||
|
async def _ensure_connected(self, url: str) -> None: |
||||||
|
r = self._relays[url] |
||||||
|
now = time.monotonic() |
||||||
|
if r.state == RelayState.BACKOFF and now < r.backoff_until: |
||||||
|
return |
||||||
|
if r._ws and r.state == RelayState.CONNECTED: |
||||||
|
return |
||||||
|
if r._task and not r._task.done(): |
||||||
|
return |
||||||
|
r._task = asyncio.create_task(self._run_relay(url)) |
||||||
|
|
||||||
|
async def _run_relay(self, url: str) -> None: |
||||||
|
r = self._relays[url] |
||||||
|
attempt = 0 |
||||||
|
while not self._shutdown.is_set(): |
||||||
|
r.state = RelayState.CONNECTING |
||||||
|
r.last_error = None |
||||||
|
try: |
||||||
|
log.info("connecting %s", url) |
||||||
|
async with websockets.connect( |
||||||
|
url, |
||||||
|
ping_interval=20, |
||||||
|
ping_timeout=20, |
||||||
|
close_timeout=5, |
||||||
|
max_size=2**22, |
||||||
|
) as ws: |
||||||
|
r._ws = ws |
||||||
|
r.state = RelayState.CONNECTED |
||||||
|
r.last_connected_at = time.time() |
||||||
|
# re-send subscriptions for this relay |
||||||
|
for key, sub in self._subs.items(): |
||||||
|
if sub["relay"] == url: |
||||||
|
await self._send_req(url, sub["sub_id"], sub["filters"]) |
||||||
|
attempt = 0 |
||||||
|
async for raw in ws: |
||||||
|
if self._shutdown.is_set(): |
||||||
|
break |
||||||
|
try: |
||||||
|
msg = json.loads(raw) |
||||||
|
except json.JSONDecodeError: |
||||||
|
continue |
||||||
|
if not isinstance(msg, list) or not msg: |
||||||
|
continue |
||||||
|
typ = msg[0] |
||||||
|
if typ == "EVENT" and len(msg) >= 3: |
||||||
|
await self._on_event(url, msg[2]) |
||||||
|
elif typ == "NOTICE" and len(msg) >= 2 and self._on_notice: |
||||||
|
await self._on_notice(url, str(msg[1])) |
||||||
|
elif typ == "OK": |
||||||
|
pass |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
r.last_error = repr(e) |
||||||
|
r.state = RelayState.ERROR |
||||||
|
log.warning("relay %s error: %s", url, e) |
||||||
|
finally: |
||||||
|
r._ws = None |
||||||
|
r.state = RelayState.BACKOFF |
||||||
|
attempt += 1 |
||||||
|
delay = min(60.0, 1.5**attempt) + random.random() |
||||||
|
r.backoff_until = time.monotonic() + delay |
||||||
|
try: |
||||||
|
await asyncio.wait_for(self._shutdown.wait(), timeout=delay) |
||||||
|
break |
||||||
|
except TimeoutError: |
||||||
|
continue |
||||||
|
|
||||||
|
async def run_forever(self, urls: list[str]) -> None: |
||||||
|
for u in urls: |
||||||
|
self.register(u) |
||||||
|
await self.connect_all() |
||||||
|
await self._shutdown.wait() |
||||||
@ -0,0 +1,48 @@ |
|||||||
|
"""Default relay lists (kind 10002) and aggr.nostr.land thread rule.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
# Read relays (default kind 10002 `r` read=true) |
||||||
|
DEFAULT_READ_RELAYS: tuple[str, ...] = ( |
||||||
|
"wss://theforest.nostr1.com", |
||||||
|
"wss://nostr.land", |
||||||
|
"wss://nostr21.com", |
||||||
|
"wss://christpill.nostr1.com", |
||||||
|
) |
||||||
|
|
||||||
|
# Write relays (default kind 10002 `r` write=true) |
||||||
|
DEFAULT_WRITE_RELAYS: tuple[str, ...] = ( |
||||||
|
"wss://thecitadel.nostr1.com", |
||||||
|
"wss://freelay.sovbit.host", |
||||||
|
"wss://relay.damus.io", |
||||||
|
"wss://relay.primal.net", |
||||||
|
) |
||||||
|
|
||||||
|
# Thread / reply aggregation when nostr.land is a write relay in user's 10002 |
||||||
|
AGGR_THREAD_RELAY = "wss://aggr.nostr.land" |
||||||
|
|
||||||
|
# Jumble / Wisp “trending on Nostr” slice — nostrarchives feeds (see jumble `wisp-trending-relay.ts`). |
||||||
|
WISP_TRENDING_NOTES_RELAY = "wss://feeds.nostrarchives.com/notes/trending/reactions/today" |
||||||
|
|
||||||
|
# Kinds Jumble subscribes on that trending WebSocket (`WISP_TRENDING_FEED_KINDS`). |
||||||
|
WISP_TRENDING_FEED_KINDS: tuple[int, ...] = (1, 6, 1068, 6969, 30023, 20, 21, 22) |
||||||
|
|
||||||
|
|
||||||
|
def use_aggr_for_threads(user_write_urls: set[str]) -> bool: |
||||||
|
return "wss://nostr.land" in user_write_urls or "ws://nostr.land" in user_write_urls |
||||||
|
|
||||||
|
|
||||||
|
def default_feed_read_relays() -> list[str]: |
||||||
|
"""Read set for the main + trending mix (deduped).""" |
||||||
|
seen: set[str] = set() |
||||||
|
out: list[str] = [] |
||||||
|
for u in (*DEFAULT_READ_RELAYS, WISP_TRENDING_NOTES_RELAY): |
||||||
|
if u not in seen: |
||||||
|
seen.add(u) |
||||||
|
out.append(u) |
||||||
|
return out |
||||||
|
|
||||||
|
|
||||||
|
def is_wisp_trending_relay_url(url: str) -> bool: |
||||||
|
u = url.lower().replace("ws://", "https://").replace("wss://", "https://") |
||||||
|
return "feeds.nostrarchives.com" in u and "/notes/trending/" in u |
||||||
@ -0,0 +1,117 @@ |
|||||||
|
"""Advanced composer: kind, tags, content; Edit clones without id/sig/created_at.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
import time |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt |
||||||
|
from PySide6.QtWidgets import ( |
||||||
|
QComboBox, |
||||||
|
QDialog, |
||||||
|
QDialogButtonBox, |
||||||
|
QFormLayout, |
||||||
|
QHBoxLayout, |
||||||
|
QLabel, |
||||||
|
QLineEdit, |
||||||
|
QMessageBox, |
||||||
|
QPlainTextEdit, |
||||||
|
QSpinBox, |
||||||
|
QVBoxLayout, |
||||||
|
) |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount, unlock_secret |
||||||
|
from imwald.core.nostr_crypto import build_signed_event |
||||||
|
from imwald.core.nostr_publish import publish_to_relays_sync |
||||||
|
from imwald.core.relay_policy import DEFAULT_WRITE_RELAYS |
||||||
|
|
||||||
|
FEED_KINDS = [1, 20, 21, 30023, 9802, 11] |
||||||
|
TAG_SUGGESTIONS = ["t", "client", "e", "p", "relay", "imeta"] |
||||||
|
|
||||||
|
|
||||||
|
class ComposerDialog(QDialog): |
||||||
|
def __init__( |
||||||
|
self, |
||||||
|
parent=None, |
||||||
|
*, |
||||||
|
edit_from: dict[str, Any] | None = None, |
||||||
|
account: StoredAccount, |
||||||
|
password: str | None = None, |
||||||
|
) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self.setWindowTitle("New event" if edit_from is None else "Edit event (clone)") |
||||||
|
self._account = account |
||||||
|
self._password = password |
||||||
|
self._edit_from = edit_from |
||||||
|
self.last_published: dict | None = None |
||||||
|
|
||||||
|
self._kind = QSpinBox() |
||||||
|
self._kind.setRange(0, 99999) |
||||||
|
self._kind_combo = QComboBox() |
||||||
|
self._kind_combo.addItem("Custom (use spinbox)", -1) |
||||||
|
for k in FEED_KINDS: |
||||||
|
self._kind_combo.addItem(f"Kind {k}", k) |
||||||
|
self._kind_combo.currentIndexChanged.connect(self._on_kind_combo) |
||||||
|
|
||||||
|
self._tags = QLineEdit() |
||||||
|
self._tags.setPlaceholderText('JSON array of tags, e.g. [["t","nostr"]]') |
||||||
|
self._content = QPlainTextEdit() |
||||||
|
self._hint = QLabel("Suggestions: " + ", ".join(f'["{t}","…"]' for t in TAG_SUGGESTIONS[:4])) |
||||||
|
|
||||||
|
buttons = QDialogButtonBox(QDialogButtonBox.Save | QDialogButtonBox.Cancel) |
||||||
|
buttons.accepted.connect(self._publish) |
||||||
|
buttons.rejected.connect(self.reject) |
||||||
|
|
||||||
|
form = QFormLayout() |
||||||
|
row = QHBoxLayout() |
||||||
|
row.addWidget(self._kind_combo) |
||||||
|
row.addWidget(self._kind) |
||||||
|
form.addRow("Kind", row) |
||||||
|
form.addRow("Tags (JSON)", self._tags) |
||||||
|
form.addRow(self._hint) |
||||||
|
form.addRow("Content", self._content) |
||||||
|
|
||||||
|
root = QVBoxLayout(self) |
||||||
|
root.addLayout(form) |
||||||
|
root.addWidget(buttons) |
||||||
|
|
||||||
|
if edit_from: |
||||||
|
self._kind.setValue(int(edit_from.get("kind", 1))) |
||||||
|
self._tags.setText(json.dumps(edit_from.get("tags") or [], ensure_ascii=False)) |
||||||
|
self._content.setPlainText(edit_from.get("content") or "") |
||||||
|
else: |
||||||
|
self._kind.setValue(1) |
||||||
|
self._tags.setText(json.dumps([["client", "imwald"]], ensure_ascii=False)) |
||||||
|
|
||||||
|
def _on_kind_combo(self, _idx: int) -> None: |
||||||
|
data = self._kind_combo.currentData() |
||||||
|
if data is not None and int(data) >= 0: |
||||||
|
self._kind.setValue(int(data)) |
||||||
|
|
||||||
|
def _publish(self) -> None: |
||||||
|
try: |
||||||
|
tags = json.loads(self._tags.text() or "[]") |
||||||
|
if not isinstance(tags, list): |
||||||
|
raise ValueError("tags must be a JSON array") |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
QMessageBox.warning(self, "Invalid tags", str(e)) |
||||||
|
return |
||||||
|
kind = int(self._kind.value()) |
||||||
|
content = self._content.toPlainText() |
||||||
|
try: |
||||||
|
sec = unlock_secret(self._account, self._password) |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
QMessageBox.warning(self, "Unlock failed", str(e)) |
||||||
|
return |
||||||
|
now = int(time.time()) |
||||||
|
ev = build_signed_event(sec, created_at=now, kind=kind, tags=tags, content=content) |
||||||
|
publish_to_relays_sync(list(DEFAULT_WRITE_RELAYS), ev) |
||||||
|
self.last_published = ev |
||||||
|
self.accept() |
||||||
|
|
||||||
|
|
||||||
|
def open_composer_for_edit(parent, ev: dict[str, Any], account: StoredAccount, password: str | None) -> None: |
||||||
|
clone = {k: ev[k] for k in ("kind", "tags", "content") if k in ev} |
||||||
|
dlg = ComposerDialog(parent, edit_from=clone, account=account, password=password) |
||||||
|
dlg.exec() |
||||||
@ -0,0 +1,181 @@ |
|||||||
|
"""Local database admin: SELECT-only SQL, CRUD on local tables, event row actions.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt, Signal |
||||||
|
from PySide6.QtWidgets import ( |
||||||
|
QComboBox, |
||||||
|
QHBoxLayout, |
||||||
|
QLabel, |
||||||
|
QMessageBox, |
||||||
|
QPlainTextEdit, |
||||||
|
QPushButton, |
||||||
|
QTableWidget, |
||||||
|
QTableWidgetItem, |
||||||
|
QVBoxLayout, |
||||||
|
QWidget, |
||||||
|
) |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount |
||||||
|
from imwald.core.database import Database |
||||||
|
|
||||||
|
|
||||||
|
class DbAdminPage(QWidget): |
||||||
|
open_event = Signal(str) |
||||||
|
request_nip09 = Signal(str, str) # event_id, signing_pubkey hex |
||||||
|
|
||||||
|
def __init__(self, db: Database, accounts: list[StoredAccount], parent=None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self._db = db |
||||||
|
self._accounts = accounts |
||||||
|
self._table = QComboBox() |
||||||
|
self._table.currentTextChanged.connect(self._load_table) |
||||||
|
self._grid = QTableWidget() |
||||||
|
self._sql = QPlainTextEdit() |
||||||
|
self._sql.setPlaceholderText("SELECT … only") |
||||||
|
run = QPushButton("Run SELECT") |
||||||
|
run.clicked.connect(self._run_sql) |
||||||
|
open_btn = QPushButton("Open in event viewer") |
||||||
|
open_btn.clicked.connect(self._open_selected_event) |
||||||
|
purge_btn = QPushButton("Purge from local DB") |
||||||
|
purge_btn.clicked.connect(self._purge_selected_event) |
||||||
|
self._nip_btn = QPushButton("Publish NIP-09 deletion") |
||||||
|
self._nip_btn.clicked.connect(self._nip09) |
||||||
|
self._nip_btn.setVisible(False) |
||||||
|
row = QHBoxLayout() |
||||||
|
row.addWidget(open_btn) |
||||||
|
row.addWidget(purge_btn) |
||||||
|
row.addWidget(self._nip_btn) |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
lay.addWidget( |
||||||
|
QLabel( |
||||||
|
"Browse tables. Events: no SQL UPDATE/INSERT; NIP-09 only if pubkey matches a saved key; " |
||||||
|
"purges are local-only." |
||||||
|
) |
||||||
|
) |
||||||
|
lay.addWidget(self._table) |
||||||
|
lay.addWidget(self._grid) |
||||||
|
self._grid.itemSelectionChanged.connect(self._on_sel) |
||||||
|
lay.addLayout(row) |
||||||
|
lay.addWidget(QLabel("SQL (SELECT only):")) |
||||||
|
lay.addWidget(self._sql) |
||||||
|
lay.addWidget(run) |
||||||
|
self._reload_table_list() |
||||||
|
|
||||||
|
def set_accounts(self, accounts: list[StoredAccount]) -> None: |
||||||
|
self._accounts = accounts |
||||||
|
|
||||||
|
def _reload_table_list(self) -> None: |
||||||
|
self._table.blockSignals(True) |
||||||
|
self._table.clear() |
||||||
|
for name in self._db.list_tables(): |
||||||
|
self._table.addItem(name) |
||||||
|
self._table.blockSignals(False) |
||||||
|
if self._table.count(): |
||||||
|
self._load_table(self._table.currentText()) |
||||||
|
|
||||||
|
def _load_table(self, name: str) -> None: |
||||||
|
if not name: |
||||||
|
return |
||||||
|
cols, rows = self._db.select_safe(f'SELECT * FROM "{name}" LIMIT 500', ()) |
||||||
|
self._grid.clear() |
||||||
|
self._grid.setColumnCount(len(cols)) |
||||||
|
self._grid.setHorizontalHeaderLabels(cols) |
||||||
|
self._grid.setRowCount(len(rows)) |
||||||
|
for ri, row in enumerate(rows): |
||||||
|
for ci, c in enumerate(cols): |
||||||
|
v = row[ci] |
||||||
|
self._grid.setItem(ri, ci, QTableWidgetItem("" if v is None else str(v))) |
||||||
|
self._grid.setProperty("current_table", name) |
||||||
|
self._on_sel() |
||||||
|
|
||||||
|
def _current_event_id(self) -> str | None: |
||||||
|
name = self._grid.property("current_table") |
||||||
|
if name != "events": |
||||||
|
return None |
||||||
|
cols = [self._grid.horizontalHeaderItem(i).text() for i in range(self._grid.columnCount())] |
||||||
|
try: |
||||||
|
ci = cols.index("id") |
||||||
|
except ValueError: |
||||||
|
return None |
||||||
|
r = self._grid.currentRow() |
||||||
|
if r < 0: |
||||||
|
return None |
||||||
|
it = self._grid.item(r, ci) |
||||||
|
return it.text() if it else None |
||||||
|
|
||||||
|
def _current_event_pubkey(self) -> str | None: |
||||||
|
name = self._grid.property("current_table") |
||||||
|
if name != "events": |
||||||
|
return None |
||||||
|
cols = [self._grid.horizontalHeaderItem(i).text() for i in range(self._grid.columnCount())] |
||||||
|
try: |
||||||
|
ci = cols.index("pubkey") |
||||||
|
except ValueError: |
||||||
|
return None |
||||||
|
r = self._grid.currentRow() |
||||||
|
if r < 0: |
||||||
|
return None |
||||||
|
it = self._grid.item(r, ci) |
||||||
|
return it.text() if it else None |
||||||
|
|
||||||
|
def _run_sql(self) -> None: |
||||||
|
q = self._sql.toPlainText().strip() |
||||||
|
if not q: |
||||||
|
return |
||||||
|
try: |
||||||
|
cols, rows = self._db.select_safe(q, ()) |
||||||
|
except Exception as e: # noqa: BLE001 |
||||||
|
QMessageBox.warning(self, "SQL", str(e)) |
||||||
|
return |
||||||
|
self._grid.setProperty("current_table", None) |
||||||
|
self._grid.setColumnCount(len(cols)) |
||||||
|
self._grid.setHorizontalHeaderLabels(cols) |
||||||
|
self._grid.setRowCount(len(rows)) |
||||||
|
for ri, row in enumerate(rows): |
||||||
|
for ci, c in enumerate(cols): |
||||||
|
v = row[ci] |
||||||
|
self._grid.setItem(ri, ci, QTableWidgetItem("" if v is None else str(v))) |
||||||
|
self._nip_btn.setVisible(False) |
||||||
|
|
||||||
|
def _nip_allowed_for_selection(self) -> bool: |
||||||
|
pk = self._current_event_pubkey() |
||||||
|
if not pk: |
||||||
|
return False |
||||||
|
return pk.lower() in {a.pubkey.lower() for a in self._accounts} |
||||||
|
|
||||||
|
def _on_sel(self) -> None: |
||||||
|
name = self._grid.property("current_table") |
||||||
|
if name == "events" and self._nip_allowed_for_selection(): |
||||||
|
self._nip_btn.setVisible(True) |
||||||
|
else: |
||||||
|
self._nip_btn.setVisible(False) |
||||||
|
|
||||||
|
def _open_selected_event(self) -> None: |
||||||
|
eid = self._current_event_id() |
||||||
|
if eid: |
||||||
|
self.open_event.emit(eid) |
||||||
|
else: |
||||||
|
QMessageBox.information(self, "Open", "Select a row in the events table (id column).") |
||||||
|
|
||||||
|
def _purge_selected_event(self) -> None: |
||||||
|
eid = self._current_event_id() |
||||||
|
if not eid: |
||||||
|
QMessageBox.information(self, "Purge", "Select an events row.") |
||||||
|
return |
||||||
|
if QMessageBox.question(self, "Purge", f"Remove {eid[:16]}… from local DB?") != QMessageBox.StandardButton.Yes: |
||||||
|
return |
||||||
|
self._db.purge_event_local(eid) |
||||||
|
self._reload_table_list() |
||||||
|
|
||||||
|
def _nip09(self) -> None: |
||||||
|
eid = self._current_event_id() |
||||||
|
pk = self._current_event_pubkey() |
||||||
|
if not eid or not pk: |
||||||
|
QMessageBox.information(self, "NIP-09", "Select an events row with id and pubkey.") |
||||||
|
return |
||||||
|
saved = {a.pubkey.lower() for a in self._accounts} |
||||||
|
if pk.lower() not in saved: |
||||||
|
QMessageBox.information(self, "NIP-09", "This pubkey is not one of your saved keys — button stays hidden in product UI; here we block.") |
||||||
|
return |
||||||
|
self.request_nip09.emit(eid, pk) |
||||||
@ -0,0 +1,187 @@ |
|||||||
|
"""OP-as-document + thread panel + pager + votes + ranker 'why'.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import json |
||||||
|
from typing import Any |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt |
||||||
|
from PySide6.QtWidgets import ( |
||||||
|
QHBoxLayout, |
||||||
|
QLabel, |
||||||
|
QListWidget, |
||||||
|
QListWidgetItem, |
||||||
|
QPushButton, |
||||||
|
QSplitter, |
||||||
|
QTextBrowser, |
||||||
|
QVBoxLayout, |
||||||
|
QWidget, |
||||||
|
) |
||||||
|
|
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_engine import NostrEngine |
||||||
|
from imwald.core.ranker import Ranker |
||||||
|
|
||||||
|
FEED_KINDS = (1, 20, 21, 30023, 9802, 11) |
||||||
|
|
||||||
|
|
||||||
|
class FeedPage(QWidget): |
||||||
|
def __init__(self, db: Database, engine: NostrEngine, parent=None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self._db = db |
||||||
|
self._engine = engine |
||||||
|
self._ranker = Ranker(db) |
||||||
|
self._queue: list[dict[str, Any]] = [] |
||||||
|
self._index = 0 |
||||||
|
self._my_pubkey: str | None = None |
||||||
|
self._following: set[str] = set() |
||||||
|
self._list30000_pubkeys: set[str] = set() |
||||||
|
|
||||||
|
self._op = QTextBrowser() |
||||||
|
self._op.setOpenExternalLinks(True) |
||||||
|
self._why = QLabel("") |
||||||
|
self._thread = QListWidget() |
||||||
|
prev = QPushButton("◀ Previous") |
||||||
|
next_ = QPushButton("Next ▶") |
||||||
|
prev.clicked.connect(self._prev) |
||||||
|
next_.clicked.connect(self._next) |
||||||
|
up = QPushButton("▲ +1") |
||||||
|
down = QPushButton("▼ −1") |
||||||
|
up.clicked.connect(lambda: self._vote(1)) |
||||||
|
down.clicked.connect(lambda: self._vote(-1)) |
||||||
|
|
||||||
|
nav = QHBoxLayout() |
||||||
|
nav.addWidget(prev) |
||||||
|
nav.addWidget(next_) |
||||||
|
nav.addWidget(up) |
||||||
|
nav.addWidget(down) |
||||||
|
nav.addStretch() |
||||||
|
nav.addWidget(self._why) |
||||||
|
|
||||||
|
left = QVBoxLayout() |
||||||
|
left.addWidget(self._op, stretch=1) |
||||||
|
left.addLayout(nav) |
||||||
|
|
||||||
|
lw = QWidget() |
||||||
|
lw.setLayout(left) |
||||||
|
split = QSplitter(Qt.Orientation.Horizontal) |
||||||
|
split.addWidget(lw) |
||||||
|
split.addWidget(self._thread) |
||||||
|
split.setStretchFactor(0, 3) |
||||||
|
split.setStretchFactor(1, 1) |
||||||
|
|
||||||
|
outer = QVBoxLayout(self) |
||||||
|
outer.addWidget(split) |
||||||
|
|
||||||
|
def set_context( |
||||||
|
self, |
||||||
|
my_pubkey: str | None, |
||||||
|
following: set[str], |
||||||
|
list30000_pubkeys: set[str] | None = None, |
||||||
|
) -> None: |
||||||
|
self._my_pubkey = my_pubkey |
||||||
|
self._following = following |
||||||
|
self._list30000_pubkeys = list30000_pubkeys or set() |
||||||
|
|
||||||
|
def _feed_viewer_key(self) -> str: |
||||||
|
"""Per-device feed history; logged-out users share `_anon`.""" |
||||||
|
return (self._my_pubkey or "_anon").lower() |
||||||
|
|
||||||
|
def reload_queue(self) -> None: |
||||||
|
hide = self._db.get_setting("hide_nsfw", "1") == "1" |
||||||
|
raw = self._db.feed_candidates( |
||||||
|
FEED_KINDS, |
||||||
|
hide_nsfw=hide, |
||||||
|
limit=500, |
||||||
|
viewer_pubkey=self._feed_viewer_key(), |
||||||
|
exclude_viewed=True, |
||||||
|
) |
||||||
|
ranked = self._ranker.rank_feed(raw, self._my_pubkey, self._following, self._list30000_pubkeys) |
||||||
|
self._queue = ranked |
||||||
|
self._index = 0 |
||||||
|
self._show_current() |
||||||
|
|
||||||
|
def refresh_tail(self) -> None: |
||||||
|
"""After ingest: rebuild ranked queue (picks up trending + read relays).""" |
||||||
|
self.reload_queue() |
||||||
|
|
||||||
|
def show_event(self, event_id: str) -> None: |
||||||
|
ev = self._db.get_event(event_id) |
||||||
|
if not ev: |
||||||
|
self._op.setPlainText(f"(not in local DB yet) {event_id}") |
||||||
|
return |
||||||
|
self._queue = [ev] |
||||||
|
self._index = 0 |
||||||
|
self._show_current() |
||||||
|
if not ev.get("deleted"): |
||||||
|
self._db.mark_feed_viewed(self._feed_viewer_key(), ev["id"]) |
||||||
|
|
||||||
|
def _show_current(self) -> None: |
||||||
|
if not self._queue: |
||||||
|
self._op.setPlainText("No events in local database yet — wait for relay sync.") |
||||||
|
self._thread.clear() |
||||||
|
self._why.setText("") |
||||||
|
return |
||||||
|
ev = self._queue[self._index % len(self._queue)] |
||||||
|
if ev.get("deleted"): |
||||||
|
self._op.setHtml( |
||||||
|
f"<p><i>Marked deleted locally</i></p><pre>{ev.get('content','')}</pre>" |
||||||
|
f"<p style='color:gray'>{ev['id']}</p>" |
||||||
|
) |
||||||
|
self._thread.clear() |
||||||
|
self._why.setText("") |
||||||
|
return |
||||||
|
score, why = self._ranker.score_event( |
||||||
|
ev, |
||||||
|
my_pubkey=self._my_pubkey, |
||||||
|
following=self._following, |
||||||
|
list30000_pubkeys=self._list30000_pubkeys, |
||||||
|
) |
||||||
|
self._why.setText(f"score={score:.2f} {json.dumps(why, ensure_ascii=False)[:120]}…") |
||||||
|
tr = "" |
||||||
|
sr = ev.get("source_relay") or "" |
||||||
|
if sr and "nostrarchives.com" in sr: |
||||||
|
tr = "<p><i>Trending slice (nostrarchives)</i></p>" |
||||||
|
body = ( |
||||||
|
f"<h2>Kind {ev['kind']}</h2>" |
||||||
|
f"<p><b>{ev['pubkey'][:16]}…</b> · {ev['created_at']}</p>" |
||||||
|
f"{tr}" |
||||||
|
f"<pre>{ev.get('content','')}</pre>" |
||||||
|
f"<p style='color:gray'>{ev['id']}</p>" |
||||||
|
) |
||||||
|
self._op.setHtml(body) |
||||||
|
self._thread.clear() |
||||||
|
for r in self._db.list_replies_to(ev["id"]): |
||||||
|
line = f"k{r['kind']} {r['pubkey'][:8]}… {r['content'][:100]!r}" |
||||||
|
it = QListWidgetItem(line) |
||||||
|
it.setData(Qt.ItemDataRole.UserRole, r["id"]) |
||||||
|
self._thread.addItem(it) |
||||||
|
|
||||||
|
def _prev(self) -> None: |
||||||
|
if self._queue: |
||||||
|
self._index = (self._index - 1) % len(self._queue) |
||||||
|
self._show_current() |
||||||
|
|
||||||
|
def _next(self) -> None: |
||||||
|
if not self._queue: |
||||||
|
return |
||||||
|
cur = self._queue[self._index % len(self._queue)] |
||||||
|
if not cur.get("deleted"): |
||||||
|
self._db.mark_feed_viewed(self._feed_viewer_key(), cur["id"]) |
||||||
|
self._index = (self._index + 1) % len(self._queue) |
||||||
|
self._show_current() |
||||||
|
|
||||||
|
def current_event_id(self) -> str | None: |
||||||
|
if not self._queue: |
||||||
|
return None |
||||||
|
ev = self._queue[self._index % len(self._queue)] |
||||||
|
return str(ev["id"]) |
||||||
|
|
||||||
|
def _vote(self, v: int) -> None: |
||||||
|
if not self._my_pubkey or not self._queue: |
||||||
|
return |
||||||
|
ev = self._queue[self._index % len(self._queue)] |
||||||
|
if ev.get("deleted"): |
||||||
|
return |
||||||
|
self._db.set_vote(ev["id"], self._my_pubkey, v) |
||||||
|
self._show_current() |
||||||
@ -0,0 +1,281 @@ |
|||||||
|
"""Main shell: menus, stacked pages, relay ingest, accounts, composer.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt |
||||||
|
from PySide6.QtGui import QAction |
||||||
|
from PySide6.QtWidgets import ( |
||||||
|
QComboBox, |
||||||
|
QDialog, |
||||||
|
QInputDialog, |
||||||
|
QLabel, |
||||||
|
QLineEdit, |
||||||
|
QListWidget, |
||||||
|
QListWidgetItem, |
||||||
|
QMainWindow, |
||||||
|
QMessageBox, |
||||||
|
QStackedWidget, |
||||||
|
QToolBar, |
||||||
|
QVBoxLayout, |
||||||
|
) |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount, load_accounts |
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_engine import NostrEngine |
||||||
|
from imwald.ui.composer_dialog import ComposerDialog |
||||||
|
from imwald.ui.db_admin_page import DbAdminPage |
||||||
|
from imwald.ui.feed_page import FeedPage |
||||||
|
from imwald.ui.notifications_page import NotificationsPage |
||||||
|
from imwald.ui.onboarding_wizard import run_onboarding_wizard |
||||||
|
from imwald.ui.search_page import SearchPage |
||||||
|
|
||||||
|
|
||||||
|
class MainWindow(QMainWindow): |
||||||
|
def __init__(self, *, db: Database, engine: NostrEngine, parent=None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self.setWindowTitle("imwald") |
||||||
|
self.resize(1200, 820) |
||||||
|
|
||||||
|
self._db = db |
||||||
|
self._engine = engine |
||||||
|
self._accounts: list[StoredAccount] = load_accounts() |
||||||
|
self._session_passwords: dict[str, str] = {} |
||||||
|
|
||||||
|
self._stack = QStackedWidget() |
||||||
|
self._feed = FeedPage(db, engine) |
||||||
|
self._search = SearchPage(db) |
||||||
|
self._notif = NotificationsPage(db, self._accounts) |
||||||
|
self._dbadm = DbAdminPage(db, self._accounts) |
||||||
|
|
||||||
|
self._stack.addWidget(self._feed) # 0 |
||||||
|
self._stack.addWidget(self._search) # 1 |
||||||
|
self._stack.addWidget(self._notif) # 2 |
||||||
|
self._stack.addWidget(self._dbadm) # 3 |
||||||
|
|
||||||
|
self.setCentralWidget(self._stack) |
||||||
|
|
||||||
|
self._acct_combo = QComboBox() |
||||||
|
self._acct_combo.setMinimumWidth(220) |
||||||
|
self._reload_account_combo() |
||||||
|
|
||||||
|
tb = QToolBar() |
||||||
|
tb.addWidget(QLabel("Account:")) |
||||||
|
tb.addWidget(self._acct_combo) |
||||||
|
self.addToolBar(tb) |
||||||
|
|
||||||
|
self._wire_menu() |
||||||
|
self._wire_engine() |
||||||
|
self._wire_pages() |
||||||
|
|
||||||
|
self._acct_combo.currentIndexChanged.connect(self._on_account_changed) |
||||||
|
self._on_account_changed() |
||||||
|
|
||||||
|
if not self._db.get_setting("onboarding_done") and not self._accounts: |
||||||
|
if run_onboarding_wizard(self, db=self._db, engine=self._engine, existing_accounts=self._accounts): |
||||||
|
self._db.set_setting("onboarding_done", "1") |
||||||
|
self._accounts = load_accounts() |
||||||
|
self._reload_account_combo() |
||||||
|
self._notif.set_accounts(self._accounts) |
||||||
|
self._dbadm.set_accounts(self._accounts) |
||||||
|
|
||||||
|
self._feed.reload_queue() |
||||||
|
|
||||||
|
def _reload_account_combo(self) -> None: |
||||||
|
self._acct_combo.blockSignals(True) |
||||||
|
self._acct_combo.clear() |
||||||
|
self._acct_combo.addItem("Lurk (no key)", "") |
||||||
|
for a in self._accounts: |
||||||
|
label = a.label or a.pubkey[:12] + "…" |
||||||
|
self._acct_combo.addItem(label, a.pubkey) |
||||||
|
self._acct_combo.blockSignals(False) |
||||||
|
|
||||||
|
def _current_pubkey(self) -> str | None: |
||||||
|
d = self._acct_combo.currentData() |
||||||
|
return str(d) if d else None |
||||||
|
|
||||||
|
def list_owner_pubkey_for_relays(self) -> str | None: |
||||||
|
"""Pubkey used to REQ kind 30000 list metadata from relays (same as active account).""" |
||||||
|
return self._current_pubkey() |
||||||
|
|
||||||
|
def _password_for(self, pubkey: str) -> str | None: |
||||||
|
acc = next((a for a in self._accounts if a.pubkey.lower() == pubkey.lower()), None) |
||||||
|
if not acc: |
||||||
|
return None |
||||||
|
if acc.signer_type == "nsec": |
||||||
|
return None |
||||||
|
if pubkey in self._session_passwords: |
||||||
|
return self._session_passwords[pubkey] |
||||||
|
pw, ok = QInputDialog.getText(self, "Password", f"Password for {pubkey[:12]}… (ncryptsec)", echo=QLineEdit.EchoMode.Password) |
||||||
|
if not ok: |
||||||
|
return None |
||||||
|
self._session_passwords[pubkey] = pw |
||||||
|
return pw |
||||||
|
|
||||||
|
def _on_account_changed(self) -> None: |
||||||
|
pk = self._current_pubkey() |
||||||
|
following: set[str] = set() |
||||||
|
list300: set[str] = set() |
||||||
|
if pk: |
||||||
|
following = self._db.list_following_pubkeys(pk) |
||||||
|
list300 = self._db.list_kind30000_list_pubkeys(pk) |
||||||
|
self._feed.set_context(pk, following, list300) |
||||||
|
self._feed.reload_queue() |
||||||
|
|
||||||
|
def _wire_menu(self) -> None: |
||||||
|
m_file = self.menuBar().addMenu("&File") |
||||||
|
a_new = QAction("&New event…", self) |
||||||
|
a_new.triggered.connect(self._new_event) |
||||||
|
m_file.addAction(a_new) |
||||||
|
a_mine = QAction("&Your latest events…", self) |
||||||
|
a_mine.triggered.connect(self._my_events) |
||||||
|
m_file.addAction(a_mine) |
||||||
|
a_edit = QAction("&Edit current (clone)…", self) |
||||||
|
a_edit.triggered.connect(self._edit_current) |
||||||
|
m_file.addAction(a_edit) |
||||||
|
a_onb = QAction("&Onboarding wizard…", self) |
||||||
|
a_onb.triggered.connect(self._onboarding_again) |
||||||
|
m_file.addAction(a_onb) |
||||||
|
|
||||||
|
m_view = self.menuBar().addMenu("&View") |
||||||
|
for i, (title, idx) in enumerate( |
||||||
|
[ |
||||||
|
("&Feed", 0), |
||||||
|
("&Search", 1), |
||||||
|
("&Notifications", 2), |
||||||
|
] |
||||||
|
): |
||||||
|
act = QAction(title, self) |
||||||
|
act.setData(idx) |
||||||
|
act.triggered.connect(lambda checked=False, x=idx: self._stack.setCurrentIndex(x)) |
||||||
|
m_view.addAction(act) |
||||||
|
|
||||||
|
m_tools = self.menuBar().addMenu("&Tools") |
||||||
|
a_db = QAction("&Local database…", self) |
||||||
|
a_db.triggered.connect(lambda: self._stack.setCurrentIndex(3)) |
||||||
|
m_tools.addAction(a_db) |
||||||
|
|
||||||
|
m_help = self.menuBar().addMenu("&Help") |
||||||
|
a_about = QAction("&About", self) |
||||||
|
a_about.triggered.connect( |
||||||
|
lambda: QMessageBox.about( |
||||||
|
self, |
||||||
|
"About imwald", |
||||||
|
"Linux-native Nostr client — Qt (PySide6), SQLite, relay worker.", |
||||||
|
) |
||||||
|
) |
||||||
|
m_help.addAction(a_about) |
||||||
|
|
||||||
|
def _wire_engine(self) -> None: |
||||||
|
self._engine.event_ingested.connect(self._on_event_ingested) |
||||||
|
self._engine.relay_status.connect(lambda s: self.statusBar().showMessage(s, 8000)) |
||||||
|
|
||||||
|
def _on_event_ingested(self, relay_url: str, ev: object) -> None: |
||||||
|
if not isinstance(ev, dict): |
||||||
|
return |
||||||
|
NostrEngine.apply_ingest_to_db(self._db, ev, relay_url) |
||||||
|
if self._stack.currentWidget() is self._feed: |
||||||
|
self._feed.refresh_tail() |
||||||
|
self._notif.refresh_all() |
||||||
|
|
||||||
|
def _wire_pages(self) -> None: |
||||||
|
self._search.open_event.connect(self._open_event) |
||||||
|
self._notif.open_event.connect(self._open_event) |
||||||
|
self._notif.signing_pubkey_changed.connect(self._on_notif_signing) |
||||||
|
self._dbadm.open_event.connect(self._open_event) |
||||||
|
self._dbadm.request_nip09.connect(self._nip09_from_db) |
||||||
|
|
||||||
|
def _on_notif_signing(self, pubkey: str) -> None: |
||||||
|
self.statusBar().showMessage(f"Notifications tab signing context: {pubkey[:16]}…", 5000) |
||||||
|
|
||||||
|
def _open_event(self, event_id: str) -> None: |
||||||
|
self._stack.setCurrentIndex(0) |
||||||
|
self._feed.show_event(event_id) |
||||||
|
|
||||||
|
def _nip09_from_db(self, event_id: str, pubkey: str) -> None: |
||||||
|
acc = next((a for a in self._accounts if a.pubkey.lower() == pubkey.lower()), None) |
||||||
|
if not acc: |
||||||
|
return |
||||||
|
pw = self._password_for(pubkey) |
||||||
|
if acc.signer_type == "ncryptsec" and pw is None: |
||||||
|
return |
||||||
|
if QMessageBox.question(self, "NIP-09", f"Publish deletion for {event_id[:16]}…?") != QMessageBox.StandardButton.Yes: |
||||||
|
return |
||||||
|
self._engine.publish_nip09_deletion(acc, pw, event_id) |
||||||
|
QMessageBox.information(self, "NIP-09", "Deletion request published to default write relays.") |
||||||
|
|
||||||
|
def _edit_current(self) -> None: |
||||||
|
eid = self._feed.current_event_id() |
||||||
|
if not eid: |
||||||
|
return |
||||||
|
ev = self._db.get_event(eid) |
||||||
|
if not ev: |
||||||
|
return |
||||||
|
acc, pw = self._account_for_compose() |
||||||
|
if not acc: |
||||||
|
QMessageBox.information(self, "Edit", "Select an account.") |
||||||
|
return |
||||||
|
dlg = ComposerDialog(self, edit_from=ev, account=acc, password=pw) |
||||||
|
if dlg.exec() == QDialog.DialogCode.Accepted and dlg.last_published: |
||||||
|
self._db.upsert_event(dlg.last_published) |
||||||
|
|
||||||
|
def _new_event(self) -> None: |
||||||
|
acc, pw = self._account_for_compose() |
||||||
|
if not acc: |
||||||
|
QMessageBox.information(self, "Composer", "Select an account or add keys via onboarding.") |
||||||
|
return |
||||||
|
dlg = ComposerDialog(self, edit_from=None, account=acc, password=pw) |
||||||
|
if dlg.exec() == QDialog.DialogCode.Accepted and dlg.last_published: |
||||||
|
self._db.upsert_event(dlg.last_published) |
||||||
|
|
||||||
|
def _my_events(self) -> None: |
||||||
|
pk = self._current_pubkey() |
||||||
|
if not pk: |
||||||
|
QMessageBox.information(self, "Events", "Select an account first.") |
||||||
|
return |
||||||
|
rows = self._db.list_my_events(pk, limit=200) |
||||||
|
d = QDialog(self) |
||||||
|
d.setWindowTitle("Your latest events") |
||||||
|
lw = QListWidget() |
||||||
|
for ev in rows: |
||||||
|
it = QListWidgetItem(f"k{ev['kind']} {ev['id'][:16]}… {ev['content'][:60]!r}") |
||||||
|
it.setData(Qt.ItemDataRole.UserRole, ev["id"]) |
||||||
|
lw.addItem(it) |
||||||
|
|
||||||
|
def _go(it: QListWidgetItem) -> None: |
||||||
|
eid = it.data(Qt.ItemDataRole.UserRole) |
||||||
|
d.accept() |
||||||
|
if eid: |
||||||
|
self._open_event(str(eid)) |
||||||
|
|
||||||
|
lw.itemActivated.connect(_go) |
||||||
|
v = QVBoxLayout(d) |
||||||
|
v.addWidget(lw) |
||||||
|
d.resize(600, 400) |
||||||
|
d.exec() |
||||||
|
|
||||||
|
def _onboarding_again(self) -> None: |
||||||
|
self._accounts = load_accounts() |
||||||
|
run_onboarding_wizard(self, db=self._db, engine=self._engine, existing_accounts=self._accounts) |
||||||
|
self._accounts = load_accounts() |
||||||
|
self._reload_account_combo() |
||||||
|
self._notif.set_accounts(self._accounts) |
||||||
|
self._dbadm.set_accounts(self._accounts) |
||||||
|
|
||||||
|
def _account_for_compose(self) -> tuple[StoredAccount | None, str | None]: |
||||||
|
if self._stack.currentWidget() is self._notif: |
||||||
|
pk_tab = self._notif.active_signing_pubkey() |
||||||
|
if pk_tab: |
||||||
|
acc = next((a for a in self._accounts if a.pubkey.lower() == pk_tab.lower()), None) |
||||||
|
if acc: |
||||||
|
return acc, self._password_for(acc.pubkey) |
||||||
|
pk = self._current_pubkey() |
||||||
|
if not pk: |
||||||
|
return None, None |
||||||
|
acc = next((a for a in self._accounts if a.pubkey.lower() == pk.lower()), None) |
||||||
|
if not acc: |
||||||
|
return None, None |
||||||
|
return acc, self._password_for(pk) |
||||||
|
|
||||||
|
def closeEvent(self, event) -> None: # noqa: N802 |
||||||
|
self._engine.stop_relays() |
||||||
|
super().closeEvent(event) |
||||||
@ -0,0 +1,92 @@ |
|||||||
|
"""Per-account notification tabs; signing identity = active tab's pubkey.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt, Signal |
||||||
|
from PySide6.QtWidgets import QLabel, QListWidget, QListWidgetItem, QTabWidget, QVBoxLayout, QWidget |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount |
||||||
|
from imwald.core.database import Database |
||||||
|
|
||||||
|
|
||||||
|
class NotificationsPage(QWidget): |
||||||
|
"""v1: lists DB notifications per recipient; empty until ingest fills rows.""" |
||||||
|
|
||||||
|
open_event = Signal(str) |
||||||
|
signing_pubkey_changed = Signal(str) |
||||||
|
|
||||||
|
def __init__(self, db: Database, accounts: list[StoredAccount], parent=None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self._db = db |
||||||
|
self._accounts = accounts |
||||||
|
self._tabs = QTabWidget() |
||||||
|
self._tabs.currentChanged.connect(self._on_tab) |
||||||
|
self._lists: dict[str, QListWidget] = {} |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
lay.addWidget(QLabel("Notifications per saved account (signing uses the active tab).")) |
||||||
|
lay.addWidget(self._tabs) |
||||||
|
self._rebuild_tabs() |
||||||
|
|
||||||
|
def set_accounts(self, accounts: list[StoredAccount]) -> None: |
||||||
|
self._accounts = accounts |
||||||
|
self._rebuild_tabs() |
||||||
|
|
||||||
|
def _rebuild_tabs(self) -> None: |
||||||
|
self._tabs.clear() |
||||||
|
self._lists.clear() |
||||||
|
if not self._accounts: |
||||||
|
holder = QWidget() |
||||||
|
hl = QVBoxLayout(holder) |
||||||
|
hl.addWidget(QLabel("No saved accounts.")) |
||||||
|
self._tabs.addTab(holder, "—") |
||||||
|
return |
||||||
|
for acc in self._accounts: |
||||||
|
lw = QListWidget() |
||||||
|
lw.itemActivated.connect(self._on_item) |
||||||
|
self._lists[acc.pubkey] = lw |
||||||
|
label = acc.label or acc.pubkey[:12] + "…" |
||||||
|
self._tabs.addTab(lw, label) |
||||||
|
self._refresh_list(acc.pubkey) |
||||||
|
self._on_tab(self._tabs.currentIndex()) |
||||||
|
|
||||||
|
def _refresh_list(self, pubkey: str) -> None: |
||||||
|
lw = self._lists.get(pubkey) |
||||||
|
if not lw: |
||||||
|
return |
||||||
|
lw.clear() |
||||||
|
cur = self._db.conn().execute( |
||||||
|
""" |
||||||
|
SELECT source_event_id, kind, read, created_at FROM notifications |
||||||
|
WHERE recipient_pubkey=? ORDER BY created_at DESC LIMIT 200 |
||||||
|
""", |
||||||
|
(pubkey,), |
||||||
|
) |
||||||
|
for row in cur: |
||||||
|
title = f"{row['kind']} {row['source_event_id'][:12]}… read={row['read']}" |
||||||
|
it = QListWidgetItem(title) |
||||||
|
it.setData(Qt.ItemDataRole.UserRole, row["source_event_id"]) |
||||||
|
lw.addItem(it) |
||||||
|
if lw.count() == 0: |
||||||
|
lw.addItem(QListWidgetItem("(no rows yet — wire mention/reply detection on ingest)")) |
||||||
|
|
||||||
|
def refresh_all(self) -> None: |
||||||
|
for pk in self._lists: |
||||||
|
self._refresh_list(pk) |
||||||
|
|
||||||
|
def _on_tab(self, index: int) -> None: |
||||||
|
if index < 0 or not self._accounts: |
||||||
|
return |
||||||
|
if index >= len(self._accounts): |
||||||
|
return |
||||||
|
self.signing_pubkey_changed.emit(self._accounts[index].pubkey) |
||||||
|
|
||||||
|
def _on_item(self, it: QListWidgetItem) -> None: |
||||||
|
eid = it.data(Qt.ItemDataRole.UserRole) |
||||||
|
if eid: |
||||||
|
self.open_event.emit(str(eid)) |
||||||
|
|
||||||
|
def active_signing_pubkey(self) -> str | None: |
||||||
|
idx = self._tabs.currentIndex() |
||||||
|
if 0 <= idx < len(self._accounts): |
||||||
|
return self._accounts[idx].pubkey |
||||||
|
return None |
||||||
@ -0,0 +1,227 @@ |
|||||||
|
"""Onboarding: lurk vs create, interests (hashtags), languages (≤3), NSFW default.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt |
||||||
|
from PySide6.QtWidgets import ( |
||||||
|
QCheckBox, |
||||||
|
QFormLayout, |
||||||
|
QLabel, |
||||||
|
QLineEdit, |
||||||
|
QListWidget, |
||||||
|
QListWidgetItem, |
||||||
|
QMessageBox, |
||||||
|
QPlainTextEdit, |
||||||
|
QVBoxLayout, |
||||||
|
QWizard, |
||||||
|
QWizardPage, |
||||||
|
) |
||||||
|
|
||||||
|
from imwald.core.accounts_store import StoredAccount, add_account_nsec_hex, generate_new_identity, save_accounts |
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_engine import NostrEngine |
||||||
|
|
||||||
|
POPULAR_TAGS = ( |
||||||
|
"nostr", |
||||||
|
"bitcoin", |
||||||
|
"linux", |
||||||
|
"bookstr", |
||||||
|
"dev", |
||||||
|
"foodstr", |
||||||
|
"photography", |
||||||
|
"music", |
||||||
|
"science", |
||||||
|
"art", |
||||||
|
) |
||||||
|
|
||||||
|
TRINITY_LANGS = ( |
||||||
|
("en", "English"), |
||||||
|
("de", "Deutsch"), |
||||||
|
("fr", "Français"), |
||||||
|
("es", "Español"), |
||||||
|
("ru", "Русский"), |
||||||
|
("zh", "简体中文"), |
||||||
|
("pl", "Polski"), |
||||||
|
("nl", "Nederlands"), |
||||||
|
("cs", "Čeština"), |
||||||
|
("tr", "Türkçe"), |
||||||
|
) |
||||||
|
|
||||||
|
PAGE_INTRO, PAGE_PROFILE, PAGE_INTERESTS, PAGE_LANG, PAGE_SAFETY, PAGE_PASSWORD = range(6) |
||||||
|
|
||||||
|
|
||||||
|
class PageIntro(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Welcome to imwald") |
||||||
|
self.setSubTitle("Read-only lurk, or create a new Nostr identity.") |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
self._lurk = QCheckBox("Lurk only (no signing key on this device yet)") |
||||||
|
self._lurk.stateChanged.connect(self.completeChanged.emit) |
||||||
|
lay.addWidget(self._lurk) |
||||||
|
lay.addWidget(QLabel("If lurk: next jumps to NSFW preference, then Finish.")) |
||||||
|
|
||||||
|
def nextId(self) -> int: |
||||||
|
if self._lurk.isChecked(): |
||||||
|
return PAGE_SAFETY |
||||||
|
return PAGE_PROFILE |
||||||
|
|
||||||
|
def lurk(self) -> bool: |
||||||
|
return self._lurk.isChecked() |
||||||
|
|
||||||
|
|
||||||
|
class PageProfile(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Profile") |
||||||
|
self.setSubTitle("Shown on your kind 0 metadata.") |
||||||
|
form = QFormLayout(self) |
||||||
|
self._name = QLineEdit() |
||||||
|
self._about = QPlainTextEdit() |
||||||
|
form.addRow("Name", self._name) |
||||||
|
form.addRow("About", self._about) |
||||||
|
|
||||||
|
def nextId(self) -> int: |
||||||
|
return PAGE_INTERESTS |
||||||
|
|
||||||
|
|
||||||
|
class PageInterests(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Interests") |
||||||
|
self.setSubTitle("Pick hashtags for your kind 10015 list.") |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
self._list = QListWidget() |
||||||
|
for t in POPULAR_TAGS: |
||||||
|
it = QListWidgetItem(f"#{t}") |
||||||
|
it.setCheckState(Qt.CheckState.Unchecked) |
||||||
|
self._list.addItem(it) |
||||||
|
lay.addWidget(self._list) |
||||||
|
|
||||||
|
def nextId(self) -> int: |
||||||
|
return PAGE_LANG |
||||||
|
|
||||||
|
|
||||||
|
class PageLanguages(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Languages") |
||||||
|
self.setSubTitle("Pick up to three — we bias feed toward these (detection / translate later).") |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
self._boxes: list[QCheckBox] = [] |
||||||
|
for code, label in TRINITY_LANGS: |
||||||
|
cb = QCheckBox(f"{label} ({code})") |
||||||
|
cb.setProperty("code", code) |
||||||
|
cb.stateChanged.connect(self._cap) |
||||||
|
self._boxes.append(cb) |
||||||
|
lay.addWidget(cb) |
||||||
|
|
||||||
|
def _cap(self) -> None: |
||||||
|
sel = [b for b in self._boxes if b.isChecked()] |
||||||
|
if len(sel) > 3: |
||||||
|
b = sel[-1] |
||||||
|
b.blockSignals(True) |
||||||
|
b.setChecked(False) |
||||||
|
b.blockSignals(False) |
||||||
|
self.completeChanged.emit() |
||||||
|
|
||||||
|
def selected(self) -> list[str]: |
||||||
|
return [str(b.property("code")) for b in self._boxes if b.isChecked()] |
||||||
|
|
||||||
|
def nextId(self) -> int: |
||||||
|
return PAGE_SAFETY |
||||||
|
|
||||||
|
|
||||||
|
class PageSafety(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Safety") |
||||||
|
self.setSubTitle("NSFW-tagged notes are hidden by default.") |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
self._hide = QCheckBox("Hide #nsfw and content-warning by default (recommended)") |
||||||
|
self._hide.setChecked(True) |
||||||
|
lay.addWidget(self._hide) |
||||||
|
|
||||||
|
def nextId(self) -> int: |
||||||
|
wiz = self.wizard() |
||||||
|
intro = wiz.page(PAGE_INTRO) if wiz else None |
||||||
|
if isinstance(intro, PageIntro) and intro.lurk(): |
||||||
|
return -1 |
||||||
|
return PAGE_PASSWORD |
||||||
|
|
||||||
|
|
||||||
|
class PagePassword(QWizardPage): |
||||||
|
def __init__(self) -> None: |
||||||
|
super().__init__() |
||||||
|
self.setTitle("Encrypt key (NIP-49)") |
||||||
|
self.setSubTitle("Matches Jumble-style ncryptsec storage when a password is set.") |
||||||
|
form = QFormLayout(self) |
||||||
|
self._pw = QLineEdit() |
||||||
|
self._pw.setEchoMode(QLineEdit.Password) |
||||||
|
self._pw2 = QLineEdit() |
||||||
|
self._pw2.setEchoMode(QLineEdit.Password) |
||||||
|
form.addRow("Password", self._pw) |
||||||
|
form.addRow("Repeat", self._pw2) |
||||||
|
|
||||||
|
|
||||||
|
def run_onboarding_wizard( |
||||||
|
parent, |
||||||
|
*, |
||||||
|
db: Database, |
||||||
|
engine: NostrEngine, |
||||||
|
existing_accounts: list[StoredAccount], |
||||||
|
) -> bool: |
||||||
|
"""Return True if wizard finished (Accepted).""" |
||||||
|
w = QWizard(parent) |
||||||
|
w.setWindowTitle("imwald — onboarding") |
||||||
|
p0 = PageIntro() |
||||||
|
p1 = PageProfile() |
||||||
|
p2 = PageInterests() |
||||||
|
p3 = PageLanguages() |
||||||
|
p4 = PageSafety() |
||||||
|
p5 = PagePassword() |
||||||
|
w.setPage(PAGE_INTRO, p0) |
||||||
|
w.setPage(PAGE_PROFILE, p1) |
||||||
|
w.setPage(PAGE_INTERESTS, p2) |
||||||
|
w.setPage(PAGE_LANG, p3) |
||||||
|
w.setPage(PAGE_SAFETY, p4) |
||||||
|
w.setPage(PAGE_PASSWORD, p5) |
||||||
|
|
||||||
|
if w.exec() != QWizard.DialogCode.Accepted: |
||||||
|
return False |
||||||
|
|
||||||
|
hide_nsfw = "1" if p4._hide.isChecked() else "0" # noqa: SLF001 |
||||||
|
db.set_setting("hide_nsfw", hide_nsfw) |
||||||
|
|
||||||
|
if p0.lurk(): # noqa: SLF001 |
||||||
|
return True |
||||||
|
|
||||||
|
pw = p5._pw.text() # noqa: SLF001 |
||||||
|
if pw != p5._pw2.text(): # noqa: SLF001 |
||||||
|
QMessageBox.warning(parent, "Password mismatch", "Passwords do not match.") |
||||||
|
return False |
||||||
|
password = pw if pw else None |
||||||
|
|
||||||
|
sec, _pub = generate_new_identity() |
||||||
|
acc = add_account_nsec_hex(sec.hex(), password) |
||||||
|
acc.label = p1._name.text().strip() or None # noqa: SLF001 |
||||||
|
existing_accounts.append(acc) |
||||||
|
save_accounts(existing_accounts) |
||||||
|
|
||||||
|
interests = [] |
||||||
|
for i in range(p2._list.count()): # noqa: SLF001 |
||||||
|
it = p2._list.item(i) # noqa: SLF001 |
||||||
|
if it.checkState() == Qt.CheckState.Checked: |
||||||
|
interests.append(it.text().lstrip("#")) |
||||||
|
|
||||||
|
langs = p3.selected() # noqa: SLF001 |
||||||
|
engine.publish_kind0_and_lists( |
||||||
|
acc, |
||||||
|
password, |
||||||
|
name=p1._name.text().strip() or "imwald user", # noqa: SLF001 |
||||||
|
about=p1._about.toPlainText().strip(), |
||||||
|
interest_tags=interests, |
||||||
|
languages=langs, |
||||||
|
) |
||||||
|
QMessageBox.information(parent, "Welcome", "Profile, relay list (10002), and interests (10015) published.") |
||||||
|
return True |
||||||
@ -0,0 +1,46 @@ |
|||||||
|
"""Search local SQLite corpus.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from PySide6.QtCore import Qt, Signal |
||||||
|
from PySide6.QtWidgets import QHBoxLayout, QLabel, QLineEdit, QListWidget, QListWidgetItem, QPushButton, QVBoxLayout, QWidget |
||||||
|
|
||||||
|
from imwald.core.database import Database |
||||||
|
|
||||||
|
|
||||||
|
class SearchPage(QWidget): |
||||||
|
open_event = Signal(str) |
||||||
|
|
||||||
|
def __init__(self, db: Database, parent=None) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self._db = db |
||||||
|
self._q = QLineEdit() |
||||||
|
self._q.setPlaceholderText("Search content, id, pubkey…") |
||||||
|
go = QPushButton("Search") |
||||||
|
go.clicked.connect(self._run) |
||||||
|
self._list = QListWidget() |
||||||
|
self._list.itemActivated.connect(self._activate) |
||||||
|
|
||||||
|
top = QHBoxLayout() |
||||||
|
top.addWidget(QLabel("Query:")) |
||||||
|
top.addWidget(self._q) |
||||||
|
top.addWidget(go) |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
lay.addLayout(top) |
||||||
|
lay.addWidget(self._list) |
||||||
|
|
||||||
|
def _run(self) -> None: |
||||||
|
self._list.clear() |
||||||
|
q = self._q.text().strip() |
||||||
|
if not q: |
||||||
|
return |
||||||
|
for ev in self._db.search_local(q, limit=200): |
||||||
|
title = f"{ev['kind']} {ev['id'][:12]}… — {ev['content'][:80]!r}" |
||||||
|
it = QListWidgetItem(title) |
||||||
|
it.setData(Qt.ItemDataRole.UserRole, ev["id"]) |
||||||
|
self._list.addItem(it) |
||||||
|
|
||||||
|
def _activate(self, it: QListWidgetItem) -> None: |
||||||
|
eid = it.data(Qt.ItemDataRole.UserRole) |
||||||
|
if eid: |
||||||
|
self.open_event.emit(str(eid)) |
||||||
@ -0,0 +1,42 @@ |
|||||||
|
import tempfile |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.nostr_crypto import build_signed_event |
||||||
|
from imwald.core.ranker import Ranker, WEIGHT_FOLLOW_AUTHOR, WEIGHT_KIND30000_LIST_AUTHOR |
||||||
|
|
||||||
|
|
||||||
|
def _fake_sk() -> bytes: |
||||||
|
return bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683") |
||||||
|
|
||||||
|
|
||||||
|
def test_list30000_pubkeys_from_p_tags() -> None: |
||||||
|
with tempfile.TemporaryDirectory() as td: |
||||||
|
db = Database(Path(td) / "t.sqlite") |
||||||
|
db.connect() |
||||||
|
listed = "b" * 64 |
||||||
|
other = "c" * 64 |
||||||
|
sk = _fake_sk() |
||||||
|
tags = [["d", "testlist"], ["p", listed], ["p", other]] |
||||||
|
ev = build_signed_event(sk, created_at=100, kind=30000, tags=tags, content="") |
||||||
|
me = ev["pubkey"] |
||||||
|
db.upsert_event(ev) |
||||||
|
got = db.list_kind30000_list_pubkeys(me) |
||||||
|
assert listed in got and other in got |
||||||
|
|
||||||
|
|
||||||
|
def test_ranker_follow_beats_kind30000() -> None: |
||||||
|
with tempfile.TemporaryDirectory() as td: |
||||||
|
db = Database(Path(td) / "t2.sqlite") |
||||||
|
db.connect() |
||||||
|
r = Ranker(db) |
||||||
|
me = "f" * 64 |
||||||
|
follow_pk = "a" * 64 |
||||||
|
list_pk = "b" * 64 |
||||||
|
ev_f = {"id": "1" * 64, "pubkey": follow_pk, "created_at": 1, "kind": 1, "tags": [], "content": "x"} |
||||||
|
ev_l = {"id": "2" * 64, "pubkey": list_pk, "created_at": 2, "kind": 1, "tags": [], "content": "y"} |
||||||
|
sf, _ = r.score_event(ev_f, my_pubkey=me, following={follow_pk}, list30000_pubkeys={list_pk}) |
||||||
|
sl, _ = r.score_event(ev_l, my_pubkey=me, following={follow_pk}, list30000_pubkeys={list_pk}) |
||||||
|
assert sf > sl |
||||||
|
assert sf >= WEIGHT_FOLLOW_AUTHOR |
||||||
|
assert WEIGHT_KIND30000_LIST_AUTHOR <= sl < WEIGHT_FOLLOW_AUTHOR |
||||||
@ -0,0 +1,13 @@ |
|||||||
|
from imwald.core.nip49 import decrypt_ncryptsec, encrypt_to_ncryptsec |
||||||
|
|
||||||
|
|
||||||
|
def test_nip49_vector() -> None: |
||||||
|
nc = "ncryptsec1qgg9947rlpvqu76pj5ecreduf9jxhselq2nae2kghhvd5g7dgjtcxfqtd67p9m0w57lspw8gsq6yphnm8623nsl8xn9j4jdzz84zm3frztj3z7s35vpzmqf6ksu8r89qk5z2zxfmu5gv8th8wclt0h4p" |
||||||
|
sk = decrypt_ncryptsec(nc, "nostr") |
||||||
|
assert sk.hex() == "3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683" |
||||||
|
|
||||||
|
|
||||||
|
def test_nip49_roundtrip() -> None: |
||||||
|
sk = bytes.fromhex("3501454135014541350145413501453fefb02227e449e57cf4d3a3ce05378683") |
||||||
|
nc = encrypt_to_ncryptsec(sk, "hunter2", log_n=16) |
||||||
|
assert decrypt_ncryptsec(nc, "hunter2") == sk |
||||||
Loading…
Reference in new issue