8 changed files with 805 additions and 12 deletions
@ -0,0 +1,226 @@ |
|||||||
|
"""Resolve kind-0 ``lud06`` / ``lud16`` to LNURL-pay URLs, fetch pay metadata, dedupe by callback (NIP-57 / LUD-06).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import html |
||||||
|
import json |
||||||
|
import re |
||||||
|
from typing import Any, cast |
||||||
|
from urllib.error import URLError |
||||||
|
from urllib.request import Request, urlopen |
||||||
|
|
||||||
|
import bech32 |
||||||
|
|
||||||
|
_USER_AGENT = "imwald/1 (LNURL-pay profile fetch)" |
||||||
|
|
||||||
|
|
||||||
|
def decode_lnurl_bech32(lud06: str) -> str | None: |
||||||
|
"""Decode ``lnurl1…`` (LUD-06) to the underlying HTTPS LNURL URL.""" |
||||||
|
s = lud06.strip() |
||||||
|
if not s: |
||||||
|
return None |
||||||
|
hrp, data = bech32.bech32_decode(s.lower()) |
||||||
|
if hrp != "lnurl" or data is None: |
||||||
|
return None |
||||||
|
conv = bech32.convertbits(list(data), 5, 8, False) |
||||||
|
if conv is None: |
||||||
|
return None |
||||||
|
try: |
||||||
|
out = bytes(conv).decode("utf-8").strip() |
||||||
|
except UnicodeDecodeError: |
||||||
|
return None |
||||||
|
return out if out.startswith("https://") else None |
||||||
|
|
||||||
|
|
||||||
|
def lnurlp_url_from_lud16(lud16: str) -> str | None: |
||||||
|
""" |
||||||
|
Map ``lud16`` to an LNURL-pay **first request** URL. |
||||||
|
|
||||||
|
- Lightning address ``local@domain`` → ``https://domain/.well-known/lnurlp/local`` (LUD-16). |
||||||
|
- Already ``https://…`` → returned as-is (trimmed). |
||||||
|
""" |
||||||
|
s = lud16.strip() |
||||||
|
if not s: |
||||||
|
return None |
||||||
|
if s.startswith("https://"): |
||||||
|
return s |
||||||
|
if s.startswith("http://"): |
||||||
|
return None |
||||||
|
if "@" in s and not s.startswith("lnurl"): |
||||||
|
local, domain = s.split("@", 1) |
||||||
|
local, domain = local.strip(), domain.strip().lower() |
||||||
|
if local and domain and re.fullmatch(r"[a-z0-9.-]+", domain): |
||||||
|
return f"https://{domain}/.well-known/lnurlp/{local}" |
||||||
|
return None |
||||||
|
|
||||||
|
|
||||||
|
def _normalize_lnurlp_first_url(u: str) -> str: |
||||||
|
"""Stable key for deduplicating identical entry points (ignores trivial suffix differences).""" |
||||||
|
return u.strip().rstrip("/").lower() |
||||||
|
|
||||||
|
|
||||||
|
def normalize_callback(cb: str) -> str: |
||||||
|
"""Dedupe live metadata: same wallet often reachable via lud06 vs lud16.""" |
||||||
|
c = cb.strip().split("?", 1)[0].rstrip("/").lower() |
||||||
|
return c |
||||||
|
|
||||||
|
|
||||||
|
def collect_unique_lnurlp_urls(lud06: str | None, lud16: str | None) -> list[str]: |
||||||
|
"""Ordered unique LNURL-pay **first-hop** URLs from kind 0 fields (lud06 then lud16).""" |
||||||
|
seen: set[str] = set() |
||||||
|
out: list[str] = [] |
||||||
|
for raw in (lud06, lud16): |
||||||
|
if not raw: |
||||||
|
continue |
||||||
|
u: str | None = None |
||||||
|
t = raw.strip() |
||||||
|
if t.lower().startswith("lnurl"): |
||||||
|
u = decode_lnurl_bech32(t) |
||||||
|
elif "@" in t or t.startswith("https://"): |
||||||
|
u = lnurlp_url_from_lud16(t) |
||||||
|
if not u: |
||||||
|
continue |
||||||
|
k = _normalize_lnurlp_first_url(u) |
||||||
|
if k in seen: |
||||||
|
continue |
||||||
|
seen.add(k) |
||||||
|
out.append(u.strip()) |
||||||
|
return out |
||||||
|
|
||||||
|
|
||||||
|
def fetch_lnurlp_pay_json(url: str, *, timeout: float = 14.0) -> dict[str, Any] | None: |
||||||
|
"""GET LNURL-pay first response; returns JSON object or ``None``.""" |
||||||
|
try: |
||||||
|
req = Request( |
||||||
|
url, |
||||||
|
headers={"User-Agent": _USER_AGENT, "Accept": "application/json"}, |
||||||
|
method="GET", |
||||||
|
) |
||||||
|
with urlopen(req, timeout=timeout) as resp: # noqa: S310 |
||||||
|
blob = resp.read() |
||||||
|
data = json.loads(blob.decode("utf-8")) |
||||||
|
except (URLError, OSError, UnicodeDecodeError, json.JSONDecodeError, TypeError, ValueError): |
||||||
|
return None |
||||||
|
return cast(dict[str, Any], data) if isinstance(data, dict) else None |
||||||
|
|
||||||
|
|
||||||
|
def _int_msat_field(v: object) -> int: |
||||||
|
if isinstance(v, bool) or v is None: |
||||||
|
return 0 |
||||||
|
if isinstance(v, int): |
||||||
|
return v |
||||||
|
if isinstance(v, str): |
||||||
|
try: |
||||||
|
return int(v.strip()) |
||||||
|
except ValueError: |
||||||
|
return 0 |
||||||
|
return 0 |
||||||
|
|
||||||
|
|
||||||
|
def _msat_range_sats(min_msat: object, max_msat: object) -> str: |
||||||
|
lo = _int_msat_field(min_msat) |
||||||
|
hi = _int_msat_field(max_msat) |
||||||
|
if lo <= 0 and hi <= 0: |
||||||
|
return "unknown" |
||||||
|
# LNURL-pay amounts are millisatoshis (1000 msat = 1 sat). |
||||||
|
lo_s = lo / 1000.0 |
||||||
|
hi_s = hi / 1000.0 |
||||||
|
return f"{lo_s:.0f}–{hi_s:.0f} sat" if lo_s != hi_s else f"{lo_s:.0f} sat" |
||||||
|
|
||||||
|
|
||||||
|
def _metadata_lines(meta: object) -> str: |
||||||
|
if isinstance(meta, str): |
||||||
|
try: |
||||||
|
inner = json.loads(meta) |
||||||
|
except json.JSONDecodeError: |
||||||
|
return f"<p style='margin:4px 0'>{html.escape(meta[:500])}</p>" |
||||||
|
if isinstance(inner, dict): |
||||||
|
d = cast(dict[str, Any], inner) |
||||||
|
parts: list[str] = [] |
||||||
|
for key in ("long_description", "description", "image"): |
||||||
|
v_raw = d.get(key) |
||||||
|
if isinstance(v_raw, str) and v_raw.strip(): |
||||||
|
v = v_raw.strip() |
||||||
|
parts.append( |
||||||
|
f"<p style='margin:4px 0'><b>{html.escape(key)}</b>: {html.escape(v[:800])}</p>" |
||||||
|
) |
||||||
|
return "".join(parts) if parts else "" |
||||||
|
return "" |
||||||
|
return "" |
||||||
|
|
||||||
|
|
||||||
|
def format_lnurl_pay_html(source_url: str, doc: dict[str, Any]) -> str: |
||||||
|
"""Single payRequest document → HTML fragment.""" |
||||||
|
if doc.get("tag") != "payRequest": |
||||||
|
return f"<p><i>Unexpected tag {html.escape(str(doc.get('tag')))}</i></p>" |
||||||
|
cb = str(doc.get("callback") or "") |
||||||
|
ms = _msat_range_sats(doc.get("minSendable"), doc.get("maxSendable")) |
||||||
|
allows = doc.get("allowsNostr") |
||||||
|
npk = doc.get("nostrPubkey") |
||||||
|
meta_html = _metadata_lines(doc.get("metadata")) |
||||||
|
lines = [ |
||||||
|
f"<p style='margin:4px 0'><b>Resolved from</b> <code>{html.escape(source_url[:96])}</code></p>", |
||||||
|
f"<p style='margin:4px 0'><b>Amount range</b> ({doc.get('minSendable')}–{doc.get('maxSendable')} msat): {html.escape(ms)}</p>", |
||||||
|
f"<p style='margin:4px 0'><b>allowsNostr</b>: {html.escape(str(allows))} " |
||||||
|
f"<b>nostrPubkey</b>: <code>{html.escape(str(npk)[:80])}</code></p>", |
||||||
|
f"<p style='margin:4px 0'><b>callback</b> <code>{html.escape(cb[:120])}</code>…</p>", |
||||||
|
meta_html, |
||||||
|
] |
||||||
|
return "".join(lines) |
||||||
|
|
||||||
|
|
||||||
|
def build_merged_lnurl_pay_section(urls: list[str]) -> str: |
||||||
|
""" |
||||||
|
Fetch each unique URL, dedupe by ``callback`` host/path, aggregate min/max across merged group. |
||||||
|
|
||||||
|
Returns HTML for the profile page (empty string if nothing usable). |
||||||
|
""" |
||||||
|
if not urls: |
||||||
|
return "" |
||||||
|
by_cb: dict[str, list[tuple[str, dict[str, Any]]]] = {} |
||||||
|
errors: list[str] = [] |
||||||
|
for u in urls: |
||||||
|
j = fetch_lnurlp_pay_json(u) |
||||||
|
if not j: |
||||||
|
errors.append(f"<li>Fetch failed or invalid JSON: <code>{html.escape(u[:80])}</code></li>") |
||||||
|
continue |
||||||
|
if j.get("tag") != "payRequest": |
||||||
|
errors.append(f"<li>Not payRequest: <code>{html.escape(u[:80])}</code></li>") |
||||||
|
continue |
||||||
|
cb = str(j.get("callback") or "") |
||||||
|
key = normalize_callback(cb) if cb else f"nocab:{_normalize_lnurlp_first_url(u)}" |
||||||
|
by_cb.setdefault(key, []).append((u, j)) |
||||||
|
|
||||||
|
blocks: list[str] = [] |
||||||
|
for _key, group in by_cb.items(): |
||||||
|
# Aggregate millisats across aliases pointing at same callback |
||||||
|
mins: list[int] = [] |
||||||
|
maxs: list[int] = [] |
||||||
|
first_doc: dict[str, Any] | None = None |
||||||
|
sources: list[str] = [] |
||||||
|
for src, d in group: |
||||||
|
sources.append(src) |
||||||
|
if first_doc is None: |
||||||
|
first_doc = d |
||||||
|
for fld, bucket in (("minSendable", mins), ("maxSendable", maxs)): |
||||||
|
bucket.append(_int_msat_field(d.get(fld))) |
||||||
|
if first_doc is None: |
||||||
|
continue |
||||||
|
agg = dict(first_doc) |
||||||
|
if mins: |
||||||
|
agg["minSendable"] = min(mins) |
||||||
|
if maxs: |
||||||
|
agg["maxSendable"] = max(maxs) |
||||||
|
src_label = html.escape(" + ".join(s[:48] for s in sources)[:200]) |
||||||
|
blocks.append( |
||||||
|
f"<div style='margin:12px 0;padding:10px;border:1px solid #2a3d34;border-radius:8px'>" |
||||||
|
f"<p style='margin:0 0 8px;color:#8fb0a3;font-size:14px'><b>Merged sources</b> ({len(group)}): {src_label}</p>" |
||||||
|
f"{format_lnurl_pay_html(sources[0], agg)}" |
||||||
|
f"</div>" |
||||||
|
) |
||||||
|
|
||||||
|
err_html = f"<ul style='color:#6a8578'>{''.join(errors)}</ul>" if errors else "" |
||||||
|
if not blocks and not errors: |
||||||
|
return "" |
||||||
|
head = "<h3 style='color:#dceee6;margin:16px 0 8px'>Lightning (LNURL-pay, live)</h3>" |
||||||
|
return head + "".join(blocks) + err_html |
||||||
@ -0,0 +1,300 @@ |
|||||||
|
"""Full-screen profile view (kind 0, relays, follows, notes) opened in a browser tab.""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
import html |
||||||
|
import json |
||||||
|
from typing import cast |
||||||
|
|
||||||
|
from PySide6.QtCore import QObject, QRunnable, Qt, QThreadPool, Signal, QUrl |
||||||
|
from PySide6.QtGui import QDesktopServices |
||||||
|
from PySide6.QtWidgets import QFrame, QScrollArea, QTabWidget, QVBoxLayout, QWidget |
||||||
|
|
||||||
|
from imwald.core.author_html import avatar_img_or_placeholder |
||||||
|
from imwald.core.database import Database |
||||||
|
from imwald.core.kind0_profile import display_name_from_profile_or_hex, parse_kind0_profile |
||||||
|
from imwald.core.profile_lnurl import build_merged_lnurl_pay_section, collect_unique_lnurlp_urls |
||||||
|
from imwald.core.md_render import markdown_html_fragment, markdown_plain_summary |
||||||
|
from imwald.core.nip19 import encode_npub |
||||||
|
from imwald.core.nostr_engine import NostrEngine |
||||||
|
from imwald.core.relay_list import parse_kind10002_tags |
||||||
|
from imwald.ui.note_text_browser import NoteTextBrowser |
||||||
|
from imwald.ui.theme import BORDER, FEED_DOC_CSS, TEXT, TEXT_DIM, TEXT_MUTED |
||||||
|
|
||||||
|
# Notes to list under “Recent in local DB” (feed-shaped kinds). |
||||||
|
_PROFILE_NOTE_KINDS: tuple[int, ...] = (1, 6, 20, 21, 30023, 9802, 11) |
||||||
|
|
||||||
|
|
||||||
|
class _ProfileLnurlSignals(QObject): |
||||||
|
finished = Signal(str, int) |
||||||
|
|
||||||
|
|
||||||
|
class _ProfileLnurlRunnable(QRunnable): |
||||||
|
def __init__(self, urls: list[str], gen: int, out: _ProfileLnurlSignals) -> None: |
||||||
|
super().__init__() |
||||||
|
self._urls = urls |
||||||
|
self._gen = gen |
||||||
|
self._out = out |
||||||
|
|
||||||
|
def run(self) -> None: |
||||||
|
html = build_merged_lnurl_pay_section(self._urls) |
||||||
|
self._out.finished.emit(html, self._gen) |
||||||
|
|
||||||
|
|
||||||
|
class ProfilePage(QWidget): |
||||||
|
"""One pubkey: metadata, NIP-65 relays, follows (kind 3), emoji inventory, raw JSON, recent notes.""" |
||||||
|
|
||||||
|
open_note = Signal(str) |
||||||
|
open_profile = Signal(str) |
||||||
|
|
||||||
|
def __init__( |
||||||
|
self, |
||||||
|
db: Database, |
||||||
|
engine: NostrEngine, |
||||||
|
pubkey_hex: str, |
||||||
|
parent: QWidget | None = None, |
||||||
|
) -> None: |
||||||
|
super().__init__(parent) |
||||||
|
self.setObjectName("ProfilePage") |
||||||
|
self._db = db |
||||||
|
self._engine = engine |
||||||
|
self._pubkey = pubkey_hex.strip().lower() |
||||||
|
scroll = QScrollArea(self) |
||||||
|
scroll.setWidgetResizable(True) |
||||||
|
scroll.setFrameShape(QFrame.Shape.NoFrame) |
||||||
|
scroll.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff) |
||||||
|
self._body = NoteTextBrowser() |
||||||
|
self._body.setObjectName("ProfileBody") |
||||||
|
self._body.setOpenLinks(False) |
||||||
|
self._body.setOpenExternalLinks(False) |
||||||
|
self._body.setHorizontalScrollBarPolicy(Qt.ScrollBarPolicy.ScrollBarAlwaysOff) |
||||||
|
self._body.anchorClicked.connect(self._on_anchor) |
||||||
|
scroll.setWidget(self._body) |
||||||
|
lay = QVBoxLayout(self) |
||||||
|
lay.setContentsMargins(0, 0, 0, 0) |
||||||
|
lay.addWidget(scroll) |
||||||
|
self._lnurl_gen = 0 |
||||||
|
self._lnurl_sigs = _ProfileLnurlSignals(self) |
||||||
|
self._lnurl_sigs.finished.connect(self._on_lnurl_profile_ready) |
||||||
|
self._lnurl_pool = QThreadPool(self) |
||||||
|
self._lnurl_pool.setMaxThreadCount(1) |
||||||
|
self.refresh() |
||||||
|
|
||||||
|
def tab_title(self) -> str: |
||||||
|
row = self._db.get_latest_kind0_profile(self._pubkey) |
||||||
|
p = parse_kind0_profile(row["content"] if row else "") |
||||||
|
t = display_name_from_profile_or_hex(p, self._pubkey) |
||||||
|
return t[:28] + ("…" if len(t) > 28 else "") |
||||||
|
|
||||||
|
def _on_lnurl_profile_ready(self, html: str, gen: int) -> None: |
||||||
|
if gen != self._lnurl_gen: |
||||||
|
return |
||||||
|
self.refresh(from_lnurl=True, lnurl_html=html) |
||||||
|
|
||||||
|
def refresh(self, *, from_lnurl: bool = False, lnurl_html: str | None = None) -> None: |
||||||
|
if not from_lnurl: |
||||||
|
self._engine.enqueue_author_metadata(self._pubkey) |
||||||
|
pk = self._pubkey |
||||||
|
npub = encode_npub(pk) |
||||||
|
k0_ev = self._db.get_latest_kind0_event(pk) |
||||||
|
prof_row = self._db.get_latest_kind0_profile(pk) |
||||||
|
content = prof_row["content"] if prof_row else "" |
||||||
|
created0 = int(prof_row["created_at"]) if prof_row else 0 |
||||||
|
parsed = parse_kind0_profile(content) |
||||||
|
tags0: list[list[str]] | None = k0_ev["tags"] if k0_ev else None |
||||||
|
|
||||||
|
lud06_raw = parsed.get("lud06") |
||||||
|
lud16_raw = parsed.get("lud16") |
||||||
|
lud06_s = lud06_raw.strip() if isinstance(lud06_raw, str) else "" |
||||||
|
lud16_s = lud16_raw.strip() if isinstance(lud16_raw, str) else "" |
||||||
|
lnurls = collect_unique_lnurlp_urls(lud06_s or None, lud16_s or None) |
||||||
|
pay_rows: list[str] = [] |
||||||
|
if lud06_s: |
||||||
|
pay_rows.append( |
||||||
|
f"<p style='margin:6px 0;color:{TEXT_DIM}'><b>lud06</b> (LNURL / NIP-57): " |
||||||
|
f"<code style='color:{TEXT_MUTED}'>{html.escape(lud06_s[:200])}</code></p>" |
||||||
|
) |
||||||
|
if lud16_s: |
||||||
|
pay_rows.append( |
||||||
|
f"<p style='margin:6px 0;color:{TEXT_DIM}'><b>lud16</b> (Lightning address or HTTPS LNURL): " |
||||||
|
f"<code style='color:{TEXT_MUTED}'>{html.escape(lud16_s[:200])}</code></p>" |
||||||
|
) |
||||||
|
pay_static = "".join(pay_rows) |
||||||
|
live_lnurl = "" |
||||||
|
if from_lnurl and lnurl_html is not None: |
||||||
|
live_lnurl = lnurl_html |
||||||
|
elif not from_lnurl and lnurls: |
||||||
|
self._lnurl_gen += 1 |
||||||
|
gen = self._lnurl_gen |
||||||
|
self._lnurl_pool.start(_ProfileLnurlRunnable(lnurls, gen, self._lnurl_sigs)) |
||||||
|
live_lnurl = f"<p style='color:{TEXT_DIM}'><i>Fetching LNURL-pay metadata…</i></p>" |
||||||
|
pay_block = "" |
||||||
|
if pay_static or live_lnurl: |
||||||
|
pay_block = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Lightning (NIP-57)</h3>" |
||||||
|
f"<div style='margin-bottom:8px'>{pay_static}{live_lnurl}</div>" |
||||||
|
) |
||||||
|
|
||||||
|
disp = html.escape(display_name_from_profile_or_hex(parsed, pk)) |
||||||
|
av = avatar_img_or_placeholder(parsed, 72, border_hex=BORDER) |
||||||
|
nip05 = html.escape((parsed.get("nip05") or "").strip()) if parsed.get("nip05") else "" |
||||||
|
nip05_html = ( |
||||||
|
f"<div style='color:{TEXT_MUTED};font-size:16px;margin-top:6px'>{nip05}</div>" if nip05 else "" |
||||||
|
) |
||||||
|
banner = parsed.get("banner") |
||||||
|
banner_html = "" |
||||||
|
if banner and str(banner).strip().startswith("https://"): |
||||||
|
bu = html.escape(str(banner).strip(), quote=True) |
||||||
|
banner_html = ( |
||||||
|
f"<div style='margin-bottom:12px;border-radius:10px;overflow:hidden'>" |
||||||
|
f'<img src="{bu}" alt="" style="width:100%;max-height:160px;object-fit:cover" /></div>' |
||||||
|
) |
||||||
|
|
||||||
|
about_raw = (parsed.get("about") or "").strip() |
||||||
|
about_md = "" |
||||||
|
if about_raw: |
||||||
|
frag = markdown_html_fragment( |
||||||
|
about_raw, |
||||||
|
db=self._db, |
||||||
|
nip30_tags=tags0 or None, |
||||||
|
nip30_author_pubkey=pk, |
||||||
|
) |
||||||
|
about_md = f"<h3 style='color:{TEXT};margin:16px 0 8px'>About</h3><div class=\"md\">{frag}</div>" |
||||||
|
|
||||||
|
raw_json = "" |
||||||
|
try: |
||||||
|
obj = json.loads(content or "") |
||||||
|
if isinstance(obj, dict): |
||||||
|
raw_json = json.dumps(obj, indent=2, ensure_ascii=False) |
||||||
|
except json.JSONDecodeError: |
||||||
|
raw_json = content or "" |
||||||
|
raw_esc = html.escape(raw_json[:12000] + ("…" if len(raw_json) > 12000 else ""), quote=False) |
||||||
|
json_block = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Kind 0 JSON (full)</h3>" |
||||||
|
f"<pre style='color:{TEXT_DIM};font-size:14px;white-space:pre-wrap;word-break:break-all;" |
||||||
|
f"background:rgba(0,0,0,0.25);padding:12px;border-radius:8px;border:1px solid {BORDER}'>{raw_esc}</pre>" |
||||||
|
) |
||||||
|
|
||||||
|
k10002 = self._db.get_latest_kind10002_event(pk) |
||||||
|
relay_html = ( |
||||||
|
f"<p style='color:{TEXT_DIM};font-size:15px'>" |
||||||
|
f"<i>No NIP-65 relay list (kind 10002) in local DB yet.</i></p>" |
||||||
|
) |
||||||
|
if k10002: |
||||||
|
reads, writes = parse_kind10002_tags(k10002.get("tags") or []) |
||||||
|
r_esc = "<br>".join(html.escape(u) for u in reads[:40]) |
||||||
|
w_esc = "<br>".join(html.escape(u) for u in writes[:40]) |
||||||
|
relay_html = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Relays (NIP-65, kind 10002)</h3>" |
||||||
|
f"<p style='color:{TEXT_MUTED};font-size:15px'><b>Read</b></p>" |
||||||
|
f"<div style='color:{TEXT_DIM};font-size:14px'>{r_esc or '—'}</div>" |
||||||
|
f"<p style='color:{TEXT_MUTED};font-size:15px;margin-top:10px'><b>Write</b></p>" |
||||||
|
f"<div style='color:{TEXT_DIM};font-size:14px'>{w_esc or '—'}</div>" |
||||||
|
) |
||||||
|
|
||||||
|
follows = self._db.get_latest_kind3_contact_pubkeys(pk, limit=400) |
||||||
|
follow_lines: list[str] = [] |
||||||
|
for fp in follows[:80]: |
||||||
|
href = f"imwald://pub/{fp}" |
||||||
|
np = encode_npub(fp) |
||||||
|
follow_lines.append( |
||||||
|
f'<div style="margin:4px 0"><a href="{html.escape(href, quote=True)}" ' |
||||||
|
f'style="color:{TEXT};text-decoration:none">{html.escape(np)}</a>' |
||||||
|
f'<span style="color:{TEXT_DIM};font-size:13px"> · {html.escape(fp[:16])}…</span></div>' |
||||||
|
) |
||||||
|
_no_follow = f"<i style='color:{TEXT_DIM}'>No kind 3 in local DB.</i>" |
||||||
|
follow_block = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Following (kind 3, local snapshot)</h3>" |
||||||
|
f"<div style='font-size:14px'>{''.join(follow_lines) or _no_follow}</div>" |
||||||
|
) |
||||||
|
|
||||||
|
nip30 = self._db.get_author_nip30_emoji_urls(pk) |
||||||
|
em_lines: list[str] = [] |
||||||
|
for short, url in sorted(nip30.items(), key=lambda x: x[0])[:48]: |
||||||
|
em_lines.append( |
||||||
|
f"<div style='margin:3px 0'><code style='color:{TEXT_MUTED}'>:{html.escape(short)}:</code> " |
||||||
|
f'<a href="{html.escape(url, quote=True)}" style="color:{TEXT}">{html.escape(url[:48])}…</a></div>' |
||||||
|
) |
||||||
|
_no_emoji = f"<i style='color:{TEXT_DIM}'>No emoji packs indexed yet.</i>" |
||||||
|
emoji_block = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Custom emoji (NIP-30, local)</h3>" |
||||||
|
f"<div>{''.join(em_lines) or _no_emoji}</div>" |
||||||
|
) |
||||||
|
|
||||||
|
notes = self._db.list_events_by_pubkey(pk, kinds=_PROFILE_NOTE_KINDS, limit=40) |
||||||
|
note_lines: list[str] = [] |
||||||
|
for ev in notes: |
||||||
|
eid = str(ev["id"]) |
||||||
|
href = f"imwald://note/{eid}" |
||||||
|
nip = cast(list[list[str]], ev["tags"]) if isinstance(ev.get("tags"), list) else None |
||||||
|
snip = markdown_plain_summary( |
||||||
|
ev.get("content") or "", |
||||||
|
max_len=72, |
||||||
|
db=self._db, |
||||||
|
nip30_tags=nip, |
||||||
|
nip30_author_pubkey=str(ev.get("pubkey") or "") or None, |
||||||
|
) |
||||||
|
note_lines.append( |
||||||
|
f'<div style="margin:8px 0;padding:8px;border:1px solid {BORDER};border-radius:8px">' |
||||||
|
f"<span style='color:{TEXT_MUTED};font-size:13px'>k{int(ev['kind'])} · {int(ev['created_at'])}</span><br>" |
||||||
|
f'<a href="{html.escape(href, quote=True)}" style="color:{TEXT};font-weight:600;text-decoration:none">' |
||||||
|
f"Open in feed</a>" |
||||||
|
f"<div style='color:{TEXT_DIM};margin-top:6px;font-size:15px'>{html.escape(snip)}</div>" |
||||||
|
"</div>" |
||||||
|
) |
||||||
|
_no_notes = f"<i style='color:{TEXT_DIM}'>No matching notes stored yet.</i>" |
||||||
|
notes_block = ( |
||||||
|
f"<h3 style='color:{TEXT};margin:16px 0 8px'>Recent notes (local DB)</h3>" |
||||||
|
f"{''.join(note_lines) or _no_notes}" |
||||||
|
) |
||||||
|
|
||||||
|
k0_meta = "" |
||||||
|
if k0_ev: |
||||||
|
k0_meta = ( |
||||||
|
f"<p style='color:{TEXT_DIM};font-size:14px'>Kind 0 event id: <code>{html.escape(str(k0_ev['id']))}</code>" |
||||||
|
f" · updated {created0}</p>" |
||||||
|
) |
||||||
|
|
||||||
|
doc = ( |
||||||
|
"<!DOCTYPE html><html><head><meta charset=\"utf-8\">" |
||||||
|
f"{FEED_DOC_CSS}</head><body style='padding:12px 14px'>" |
||||||
|
f"{banner_html}" |
||||||
|
f"<div style='display:flex;align-items:flex-start;gap:14px;margin-bottom:8px'>" |
||||||
|
f"{av}" |
||||||
|
f"<div style='flex:1;min-width:0'>" |
||||||
|
f"<div style='font-size:26px;font-weight:700;color:{TEXT}'>{disp}</div>" |
||||||
|
f"<div style='color:{TEXT_MUTED};font-size:15px;margin-top:4px'>{html.escape(npub)}</div>" |
||||||
|
f"<div style='color:{TEXT_DIM};font-size:14px;margin-top:2px'>{html.escape(pk[:24])}…</div>" |
||||||
|
f"{nip05_html}" |
||||||
|
f"</div></div>" |
||||||
|
f"{k0_meta}" |
||||||
|
f"{about_md}" |
||||||
|
f"{pay_block}" |
||||||
|
f"{relay_html}" |
||||||
|
f"{follow_block}" |
||||||
|
f"{emoji_block}" |
||||||
|
f"{json_block}" |
||||||
|
f"{notes_block}" |
||||||
|
"</body></html>" |
||||||
|
) |
||||||
|
self._body.setHtml(doc) |
||||||
|
tw = self.parentWidget() |
||||||
|
if isinstance(tw, QTabWidget): |
||||||
|
i = tw.indexOf(self) |
||||||
|
if i >= 0: |
||||||
|
tw.setTabText(i, self.tab_title()) |
||||||
|
|
||||||
|
def _on_anchor(self, url: QUrl) -> None: |
||||||
|
s = url.toString() |
||||||
|
if url.scheme() == "imwald" and url.host() == "pub": |
||||||
|
tail = (url.path() or "").strip("/").lower() |
||||||
|
if len(tail) == 64 and all(c in "0123456789abcdef" for c in tail): |
||||||
|
self.open_profile.emit(tail) |
||||||
|
return |
||||||
|
if url.scheme() == "imwald" and url.host() == "note": |
||||||
|
eid = (url.path() or "").strip("/") |
||||||
|
if len(eid) == 64 and all(c in "0123456789abcdef" for c in eid.lower()): |
||||||
|
self.open_note.emit(eid.lower()) |
||||||
|
return |
||||||
|
if s.startswith("https://") or s.startswith("http://"): |
||||||
|
QDesktopServices.openUrl(url) |
||||||
@ -0,0 +1,73 @@ |
|||||||
|
"""Tests for kind-0 lud06/lud16 URL collection and LNURL-pay merge (mocked HTTP).""" |
||||||
|
|
||||||
|
from __future__ import annotations |
||||||
|
|
||||||
|
from typing import Any |
||||||
|
from unittest.mock import MagicMock, patch |
||||||
|
|
||||||
|
from imwald.core.profile_lnurl import ( |
||||||
|
build_merged_lnurl_pay_section, |
||||||
|
collect_unique_lnurlp_urls, |
||||||
|
lnurlp_url_from_lud16, |
||||||
|
normalize_callback, |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def test_lnurlp_url_from_lightning_address() -> None: |
||||||
|
assert lnurlp_url_from_lud16("Alice@Example.COM") == "https://example.com/.well-known/lnurlp/Alice" |
||||||
|
|
||||||
|
|
||||||
|
def test_collect_unique_order_and_dedupe() -> None: |
||||||
|
u = collect_unique_lnurlp_urls("https://domain/.well-known/lnurlp/x", "https://domain/.well-known/lnurlp/x") |
||||||
|
assert u == ["https://domain/.well-known/lnurlp/x"] |
||||||
|
u2 = collect_unique_lnurlp_urls(None, "a@b.co") |
||||||
|
assert u2 == ["https://b.co/.well-known/lnurlp/a"] |
||||||
|
|
||||||
|
|
||||||
|
def test_normalize_callback_strips_query() -> None: |
||||||
|
assert normalize_callback("HTTPS://Host/path?x=1") == "https://host/path" |
||||||
|
|
||||||
|
|
||||||
|
@patch("imwald.core.profile_lnurl.fetch_lnurlp_pay_json") |
||||||
|
def test_build_merge_dedupes_same_wallet_callback(mock_fetch: MagicMock) -> None: |
||||||
|
def side_effect(url: str) -> dict[str, Any]: |
||||||
|
_ = url |
||||||
|
return { |
||||||
|
"tag": "payRequest", |
||||||
|
"callback": "https://wallet.example/lnurlpay/cb?ok=1", |
||||||
|
"minSendable": 1000, |
||||||
|
"maxSendable": 500_000, |
||||||
|
"allowsNostr": True, |
||||||
|
"nostrPubkey": "ab" * 32, |
||||||
|
"metadata": "[]", |
||||||
|
} |
||||||
|
|
||||||
|
mock_fetch.side_effect = side_effect |
||||||
|
html = build_merged_lnurl_pay_section( |
||||||
|
[ |
||||||
|
"https://relay-a/.well-known/lnurlp/alice", |
||||||
|
"https://relay-b/.well-known/lnurlp/alice", |
||||||
|
] |
||||||
|
) |
||||||
|
assert mock_fetch.call_count == 2 |
||||||
|
assert html.count("wallet.example/lnurlpay/cb") >= 1 |
||||||
|
assert "Merged sources" in html |
||||||
|
assert "(2)" in html |
||||||
|
|
||||||
|
|
||||||
|
@patch("imwald.core.profile_lnurl.fetch_lnurlp_pay_json") |
||||||
|
def test_build_merge_two_distinct_callbacks(mock_fetch: MagicMock) -> None: |
||||||
|
urls = ["https://a/1", "https://b/2"] |
||||||
|
payloads: list[dict[str, Any]] = [ |
||||||
|
{"tag": "payRequest", "callback": "https://w/a", "minSendable": 1000, "maxSendable": 100_000, "metadata": "[]"}, |
||||||
|
{"tag": "payRequest", "callback": "https://w/b", "minSendable": 2000, "maxSendable": 200_000, "metadata": "[]"}, |
||||||
|
] |
||||||
|
|
||||||
|
def side_effect(url: str) -> dict[str, Any]: |
||||||
|
return payloads[urls.index(url)] |
||||||
|
|
||||||
|
mock_fetch.side_effect = side_effect |
||||||
|
html = build_merged_lnurl_pay_section(urls) |
||||||
|
assert mock_fetch.call_count == 2 |
||||||
|
assert html.count("Merged sources") == 2 |
||||||
|
assert "https://w/a" in html and "https://w/b" in html |
||||||
Loading…
Reference in new issue