diff --git a/package-lock.json b/package-lock.json index 6ddefa1f..eb0b11d8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "jumble-imwald", - "version": "20.1.1", + "version": "21.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "jumble-imwald", - "version": "20.1.1", + "version": "21.0.0", "license": "MIT", "dependencies": { "@asciidoctor/core": "^3.0.4", diff --git a/package.json b/package.json index 3916fe09..07551acd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "jumble-imwald", - "version": "20.1.1", + "version": "21.0.0", "description": "A user-friendly Nostr client focused on relay feed browsing and relay discovery, forked from Jumble", "private": true, "type": "module", diff --git a/src/components/HttpRelaysSetting/SaveButton.tsx b/src/components/HttpRelaysSetting/SaveButton.tsx new file mode 100644 index 00000000..6ff8f923 --- /dev/null +++ b/src/components/HttpRelaysSetting/SaveButton.tsx @@ -0,0 +1,84 @@ +import { Button } from '@/components/ui/button' +import { Skeleton } from '@/components/ui/skeleton' +import { createHttpRelayListDraftEvent } from '@/lib/draft-event' +import { showPublishingFeedback, showSimplePublishSuccess, showPublishingError } from '@/lib/publishing-feedback' +import { useNostr } from '@/providers/NostrProvider' +import { TMailboxRelay } from '@/types' +import { CloudUpload } from 'lucide-react' +import { useState } from 'react' +import { useTranslation } from 'react-i18next' +import logger from '@/lib/logger' + +export default function SaveButton({ + mailboxRelays, + hasChange, + setHasChange +}: { + mailboxRelays: TMailboxRelay[] + hasChange: boolean + setHasChange: (hasChange: boolean) => void +}) { + const { t } = useTranslation() + const { pubkey, publish, updateHttpRelayListEvent } = useNostr() + const [pushing, setPushing] = useState(false) + + const save = async () => { + if (!pubkey) return + + setPushing(true) + try { + const event = createHttpRelayListDraftEvent(mailboxRelays) + const result = await publish(event) + + const relayStatuses = (result as any).relayStatuses + + await updateHttpRelayListEvent(result) + setHasChange(false) + + if (relayStatuses && relayStatuses.length > 0) { + showPublishingFeedback( + { + success: true, + relayStatuses: relayStatuses, + successCount: relayStatuses.filter((s: any) => s.success).length, + totalCount: relayStatuses.length + }, + { + message: t('HTTP relays saved'), + duration: 6000 + } + ) + } else { + showSimplePublishSuccess(t('HTTP relays saved')) + } + } catch (error) { + logger.error('Failed to save HTTP relay list', { error }) + if (error instanceof Error && (error as any).relayStatuses) { + const errorRelayStatuses = (error as any).relayStatuses + showPublishingFeedback( + { + success: false, + relayStatuses: errorRelayStatuses, + successCount: errorRelayStatuses.filter((s: any) => s.success).length, + totalCount: errorRelayStatuses.length + }, + { + message: error.message || t('Failed to save HTTP relay list'), + duration: 6000 + } + ) + } else { + showPublishingError(error instanceof Error ? error : new Error(t('Failed to save HTTP relay list'))) + } + } finally { + setPushing(false) + } + } + + return ( + + ) +} diff --git a/src/components/HttpRelaysSetting/index.tsx b/src/components/HttpRelaysSetting/index.tsx new file mode 100644 index 00000000..dc6d30cd --- /dev/null +++ b/src/components/HttpRelaysSetting/index.tsx @@ -0,0 +1,165 @@ +import { Button } from '@/components/ui/button' +import { isHttpRelayUrl, normalizeHttpRelayUrl } from '@/lib/url' +import { useNostr } from '@/providers/NostrProvider' +import { TMailboxRelay, TMailboxRelayScope } from '@/types' +import { useEffect, useState } from 'react' +import { useTranslation } from 'react-i18next' +import { + DndContext, + closestCenter, + KeyboardSensor, + PointerSensor, + TouchSensor, + useSensor, + useSensors, + DragEndEvent +} from '@dnd-kit/core' +import { + arrayMove, + SortableContext, + sortableKeyboardCoordinates, + verticalListSortingStrategy +} from '@dnd-kit/sortable' +import { restrictToVerticalAxis, restrictToParentElement } from '@dnd-kit/modifiers' +import MailboxRelay from '../MailboxSetting/MailboxRelay' +import NewMailboxRelayInput from '../MailboxSetting/NewMailboxRelayInput' +import RelayCountWarning from '../MailboxSetting/RelayCountWarning' +import SaveButton from './SaveButton' +import DiscoveredRelays from '../MailboxSetting/DiscoveredRelays' + +export default function HttpRelaysSetting() { + const { t } = useTranslation() + const { pubkey, httpRelayListEvent, checkLogin } = useNostr() + const [relays, setRelays] = useState([]) + const [hasChange, setHasChange] = useState(false) + + const sensors = useSensors( + useSensor(PointerSensor, { + activationConstraint: { distance: 8 } + }), + useSensor(TouchSensor, { + activationConstraint: { delay: 200, tolerance: 8 } + }), + useSensor(KeyboardSensor, { + coordinateGetter: sortableKeyboardCoordinates + }) + ) + + function handleDragEnd(event: DragEndEvent) { + const { active, over } = event + if (active.id !== over?.id) { + const oldIndex = relays.findIndex((relay) => relay.url === active.id) + const newIndex = relays.findIndex((relay) => relay.url === over?.id) + if (oldIndex !== -1 && newIndex !== -1) { + setRelays((relays) => arrayMove(relays, oldIndex, newIndex)) + setHasChange(true) + } + } + } + + useEffect(() => { + if (!httpRelayListEvent) { + setRelays([]) + setHasChange(false) + return + } + const fromTags: TMailboxRelay[] = [] + httpRelayListEvent.tags.forEach((tag) => { + if (tag[0] !== 'r' || !tag[1]) return + const url = tag[1].trim() + if (!isHttpRelayUrl(url)) return + const n = normalizeHttpRelayUrl(url) + if (!n) return + const type = tag[2] + const scope: TMailboxRelayScope = + type === 'read' ? 'read' : type === 'write' ? 'write' : 'both' + fromTags.push({ url: n, scope }) + }) + setRelays(fromTags) + setHasChange(false) + }, [httpRelayListEvent]) + + if (!pubkey) { + return ( +
+ +
+ ) + } + + if (httpRelayListEvent === undefined) { + return
{t('loading...')}
+ } + + const changeScope = (url: string, scope: TMailboxRelayScope) => { + setRelays((prev) => prev.map((r) => (r.url === url ? { ...r, scope } : r))) + setHasChange(true) + } + + const removeRelay = (url: string) => { + setRelays((prev) => prev.filter((r) => r.url !== url)) + setHasChange(true) + } + + const saveNewRelay = (url: string) => { + if (url === '') return null + const normalizedUrl = normalizeHttpRelayUrl(url) + if (!normalizedUrl) { + return t('Invalid relay URL') + } + if (!isHttpRelayUrl(normalizedUrl)) { + return t('HTTP relays must start with https:// or http://') + } + if (relays.some((r) => r.url === normalizedUrl)) { + return t('Relay already exists') + } + setRelays([...relays, { url: normalizedUrl, scope: 'both' }]) + setHasChange(true) + return null + } + + const handleAddDiscovered = (newRelays: TMailboxRelay[]) => { + const httpOnly = newRelays.filter((r) => isHttpRelayUrl(r.url)) + const toAdd = httpOnly.filter((nr) => !relays.some((r) => r.url === nr.url)) + if (toAdd.length > 0) { + setRelays([...relays, ...toAdd]) + setHasChange(true) + } + } + + return ( +
+
+
{t('httpRelaysDescription')}
+
{t('read relays description')}
+
{t('write relays description')}
+
{t('read & write relays notice')}
+
+ + + + + r.url)} strategy={verticalListSortingStrategy}> +
+ {relays.map((relay) => ( + + ))} +
+
+
+ +
+ ) +} diff --git a/src/components/PostEditor/PostRelaySelector.tsx b/src/components/PostEditor/PostRelaySelector.tsx index b15a4753..d6cebcb6 100644 --- a/src/components/PostEditor/PostRelaySelector.tsx +++ b/src/components/PostEditor/PostRelaySelector.tsx @@ -5,7 +5,7 @@ import { SOCIAL_KIND_BLOCKED_RELAY_URLS } from '@/constants' import { NOSTR_URI_FOR_REPLY_PUBKEYS_REGEX } from '@/lib/content-patterns' -import { simplifyUrl, isLocalNetworkUrl, normalizeUrl } from '@/lib/url' +import { simplifyUrl, isLocalNetworkUrl, normalizeAnyRelayUrl, normalizeUrl } from '@/lib/url' import { useCurrentRelays } from '@/providers/CurrentRelaysProvider' import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider' import { useScreenSize } from '@/providers/ScreenSizeProvider' @@ -151,6 +151,7 @@ export default function PostRelaySelector({ const result = await relaySelectionService.selectRelays({ userWriteRelays, + userHttpWriteRelays: relayList?.httpWrite ?? [], userReadRelays: relayList?.read || [], favoriteRelays: memoizedFavoriteRelays, blockedRelays: memoizedBlockedRelays, @@ -256,6 +257,7 @@ export default function PostRelaySelector({ const result = await relaySelectionService.selectRelays({ userWriteRelays, + userHttpWriteRelays: relayList?.httpWrite ?? [], userReadRelays: relayList?.read || [], favoriteRelays: memoizedFavoriteRelays, blockedRelays: memoizedBlockedRelays, @@ -325,8 +327,12 @@ export default function PostRelaySelector({ // Update parent component with selected relays useEffect(() => { // An event is "protected" if we have selected relays that aren't the default user write relays - const userWriteRelays = relayList?.write || [] - const isProtectedEvent = selectedRelayUrls.length > 0 && !selectedRelayUrls.every(url => userWriteRelays.includes(url)) + const defaultUserWriteRelays = [...(relayList?.httpWrite ?? []), ...(relayList?.write || [])] + const normW = (u: string) => normalizeAnyRelayUrl(u) || u + const defaultNorm = new Set(defaultUserWriteRelays.map(normW)) + const isProtectedEvent = + selectedRelayUrls.length > 0 && + !selectedRelayUrls.every((url) => defaultNorm.has(normW(url))) setIsProtectedEvent(isProtectedEvent) setAdditionalRelayUrls(selectedRelayUrls) }, [selectedRelayUrls, relayList, setIsProtectedEvent, setAdditionalRelayUrls]) diff --git a/src/constants.ts b/src/constants.ts index 1e229064..30d11b76 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -309,6 +309,8 @@ export const ExtendedKind = { BLOCKED_RELAYS: 10006, BLOSSOM_SERVER_LIST: 10063, CACHE_RELAYS: 10432, + /** HTTPS index-relay list (same `r` tag semantics as kind 10002; URLs are http/https). */ + HTTP_RELAY_LIST: 10243, RELAY_REVIEW: 31987, GROUP_METADATA: 39000, GROUP_LIST: 10009, // NIP-51 Group List diff --git a/src/hooks/useFetchRelayList.tsx b/src/hooks/useFetchRelayList.tsx index 0e45437e..d935cacb 100644 --- a/src/hooks/useFetchRelayList.tsx +++ b/src/hooks/useFetchRelayList.tsx @@ -7,7 +7,10 @@ export function useFetchRelayList(pubkey?: string | null) { const [relayList, setRelayList] = useState({ write: [], read: [], - originalRelays: [] + originalRelays: [], + httpRead: [], + httpWrite: [], + httpOriginalRelays: [] }) const [isFetching, setIsFetching] = useState(true) diff --git a/src/i18n/locales/de.ts b/src/i18n/locales/de.ts index ca11a001..697ac65c 100644 --- a/src/i18n/locales/de.ts +++ b/src/i18n/locales/de.ts @@ -523,6 +523,13 @@ export default { 'Fügt {{n}} zufällige öffentliche Relays aus der NIP-66-Liveliness-Liste hinzu (bevorzugt solche, deren Monitor eine Write-RTT gemeldet hat). Bei AN standardmäßig ausgewählt; bei AUS in der Liste, aber nicht angehakt.', relayType_local: 'Lokal', relayType_relay_list: 'Relay-Liste', + relayType_http_relay_list: 'HTTP', + 'HTTP relays': 'HTTP-Relays', + httpRelaysDescription: + 'HTTPS-Index-Relays (z. B. REST /api/events/filter). Gleiche Lese-/Schreib-/beides-Rollen wie Mailbox-Relays; gespeichert als Kind 10243. Liste leeren und speichern, um eine leere Liste zu veröffentlichen.', + 'HTTP relays saved': 'HTTP-Relays gespeichert', + 'Failed to save HTTP relay list': 'HTTP-Relay-Liste konnte nicht gespeichert werden', + 'HTTP relays must start with https:// or http://': 'HTTP-Relays müssen mit https:// oder http:// beginnen', relayType_client_default: 'Client-Standard', relayType_open_from: 'Aktueller Feed', relayType_favorite: 'Favorit', diff --git a/src/i18n/locales/en.ts b/src/i18n/locales/en.ts index 5968b6bf..6a23af6f 100644 --- a/src/i18n/locales/en.ts +++ b/src/i18n/locales/en.ts @@ -546,6 +546,13 @@ export default { 'Adds {{n}} random public relays from the NIP-66 lively list (preferring monitors that reported a write RTT) to the publish relay list. When ON, they are selected by default; when OFF, they appear in the list but are unchecked so you can optionally include them.', relayType_local: 'Local', relayType_relay_list: 'Relay list', + relayType_http_relay_list: 'HTTP', + 'HTTP relays': 'HTTP relays', + httpRelaysDescription: + 'HTTPS index relays (e.g. REST /api/events/filter). Same read/write/both roles as mailbox relays; stored as kind 10243. Clear the list and save to publish an empty list.', + 'HTTP relays saved': 'HTTP relays saved', + 'Failed to save HTTP relay list': 'Failed to save HTTP relay list', + 'HTTP relays must start with https:// or http://': 'HTTP relays must start with https:// or http://', relayType_client_default: 'Client default', relayType_open_from: 'Current feed', relayType_favorite: 'Favorite', diff --git a/src/lib/account-list-relay-urls.ts b/src/lib/account-list-relay-urls.ts index e81eea75..76eeebdd 100644 --- a/src/lib/account-list-relay-urls.ts +++ b/src/lib/account-list-relay-urls.ts @@ -1,6 +1,6 @@ import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays' import { buildPrioritizedReadRelayUrls, buildPrioritizedWriteRelayUrls } from '@/lib/relay-url-priority' -import { normalizeUrl } from '@/lib/url' +import { normalizeAnyRelayUrl } from '@/lib/url' import client from '@/services/client.service' /** @@ -16,20 +16,20 @@ export async function buildAccountListRelayUrlsForMerge(options: { const myRelayList = await client.fetchRelayList(accountPubkey) const favoritesTier = getFavoritesFeedRelayUrls(favoriteRelays ?? [], blockedRelays) const read = buildPrioritizedReadRelayUrls({ - userReadRelays: myRelayList.read ?? [], - userWriteRelays: myRelayList.write ?? [], + userReadRelays: [...(myRelayList.httpRead ?? []), ...(myRelayList.read ?? [])], + userWriteRelays: [...(myRelayList.httpWrite ?? []), ...(myRelayList.write ?? [])], favoriteRelays: favoritesTier, blockedRelays, maxRelays: 100, applySocialKindBlockedFilter: false }) const write = buildPrioritizedWriteRelayUrls({ - userWriteRelays: myRelayList.write ?? [], + userWriteRelays: [...(myRelayList.httpWrite ?? []), ...(myRelayList.write ?? [])], favoriteRelays: favoritesTier, blockedRelays, maxRelays: 100, applySocialKindBlockedFilter: false }) const merged = [...read, ...write] - return [...new Set(merged.map((u) => normalizeUrl(u) || u).filter(Boolean))] + return [...new Set(merged.map((u) => normalizeAnyRelayUrl(u) || u).filter(Boolean))] } diff --git a/src/lib/draft-event.ts b/src/lib/draft-event.ts index 547e30fe..2137b4a8 100644 --- a/src/lib/draft-event.ts +++ b/src/lib/draft-event.ts @@ -643,6 +643,16 @@ export function createRelayListDraftEvent(mailboxRelays: TMailboxRelay[]): TDraf } } +/** Kind 10243 — empty `tags` is a valid “cleared” list (publish to replace). */ +export function createHttpRelayListDraftEvent(mailboxRelays: TMailboxRelay[]): TDraftEvent { + return { + kind: ExtendedKind.HTTP_RELAY_LIST, + content: '', + tags: mailboxRelays.map(({ url, scope }) => buildRTag(url, scope)), + created_at: dayjs().unix() + } +} + /** NIP-A7 spell (kind 777) draft params from Create Spell form. */ export type TSpellDraftParams = { cmd: 'REQ' | 'COUNT' @@ -953,7 +963,11 @@ export async function createPollDraftEvent( relays.forEach((relay) => tags.push(buildRelayTag(relay))) } else { const relayList = await client.fetchRelayList(author) - relayList.read.slice(0, 4).forEach((relay) => { + const readHints = [ + ...(relayList.httpRead || []).slice(0, 4), + ...(relayList.read || []).slice(0, 4) + ].slice(0, 4) + readHints.forEach((relay) => { tags.push(buildRelayTag(relay)) }) } diff --git a/src/lib/event-metadata.ts b/src/lib/event-metadata.ts index d14b7052..17edd18f 100644 --- a/src/lib/event-metadata.ts +++ b/src/lib/event-metadata.ts @@ -1,22 +1,33 @@ import { FAST_READ_RELAY_URLS, POLL_TYPE } from '@/constants' -import { TEmoji, TPollType, TRelayList, TRelaySet, TPaymentInfo, TProfile } from '@/types' +import { TEmoji, TMailboxRelay, TPollType, TRelayList, TRelaySet, TPaymentInfo, TProfile } from '@/types' import { Event, kinds } from 'nostr-tools' import { buildATag } from './draft-event' import { getReplaceableEventIdentifier } from './event' import { getAmountFromInvoice, getLightningAddressFromProfile } from './lightning' import { formatPubkey, pubkeyToNpub } from './pubkey' import { generateBech32IdFromATag, generateBech32IdFromETag, tagNameEquals } from './tag' -import { isWebsocketUrl, normalizeHttpUrl, normalizeUrl } from './url' +import { isHttpRelayUrl, isWebsocketUrl, normalizeHttpRelayUrl, normalizeHttpUrl, normalizeUrl } from './url' import { isTorBrowser } from './utils' import logger from '@/lib/logger' +const emptyHttpRelayListFields = { + httpRead: [] as string[], + httpWrite: [] as string[], + httpOriginalRelays: [] as TMailboxRelay[] +} + export function getRelayListFromEvent(event?: Event | null, blockedRelays?: string[]) { if (!event) { - return { write: FAST_READ_RELAY_URLS, read: FAST_READ_RELAY_URLS, originalRelays: [] } + return { + write: FAST_READ_RELAY_URLS, + read: FAST_READ_RELAY_URLS, + originalRelays: [], + ...emptyHttpRelayListFields + } } const torBrowserDetected = isTorBrowser() - const relayList = { write: [], read: [], originalRelays: [] } as TRelayList + const relayList = { write: [], read: [], originalRelays: [] } as Pick // Normalize blocked relays for comparison const normalizedBlockedRelays = (blockedRelays || []).map(url => normalizeUrl(url) || url) @@ -53,7 +64,53 @@ export function getRelayListFromEvent(event?: Event | null, blockedRelays?: stri return { write: relayList.write.length && relayList.write.length <= 8 ? relayList.write : FAST_READ_RELAY_URLS, read: relayList.read.length && relayList.write.length <= 8 ? relayList.read : FAST_READ_RELAY_URLS, - originalRelays: relayList.originalRelays + originalRelays: relayList.originalRelays, + ...emptyHttpRelayListFields + } +} + +/** Kind 10243: `r` tags with http(s) URLs only; same read/write/both semantics as NIP-65. */ +export function getHttpRelayListFromEvent(event?: Event | null, blockedRelays?: string[]) { + const out = { + httpRead: [] as string[], + httpWrite: [] as string[], + httpOriginalRelays: [] as TMailboxRelay[] + } + if (!event) return out + + const torBrowserDetected = isTorBrowser() + const normalizedBlockedRelays = (blockedRelays || []).map((url) => normalizeUrl(url) || url) + + event.tags.filter(tagNameEquals('r')).forEach(([, url, type]) => { + if (!url || typeof url !== 'string' || url.trim() === '') return + if (!isHttpRelayUrl(url)) return + + const normalizedUrl = normalizeHttpRelayUrl(url) + if (!normalizedUrl) return + + const asWs = normalizeUrl(url) + if (asWs && normalizedBlockedRelays.includes(asWs)) return + if (normalizedBlockedRelays.includes(normalizedUrl)) return + + const scope = type === 'read' ? 'read' : type === 'write' ? 'write' : 'both' + out.httpOriginalRelays.push({ url: normalizedUrl, scope }) + + if ((normalizedUrl.includes('.onion') || normalizedUrl.endsWith('.onion/')) && !torBrowserDetected) return + + if (type === 'write') { + out.httpWrite.push(normalizedUrl) + } else if (type === 'read') { + out.httpRead.push(normalizedUrl) + } else { + out.httpWrite.push(normalizedUrl) + out.httpRead.push(normalizedUrl) + } + }) + + return { + httpRead: Array.from(new Set(out.httpRead)), + httpWrite: Array.from(new Set(out.httpWrite)), + httpOriginalRelays: out.httpOriginalRelays } } diff --git a/src/lib/index-relay-http.ts b/src/lib/index-relay-http.ts new file mode 100644 index 00000000..8c1a52d4 --- /dev/null +++ b/src/lib/index-relay-http.ts @@ -0,0 +1,152 @@ +/** + * HTTP JSON API for index-style relays (e.g. gc_index_relay: POST /api/events/filter, POST /api/events). + * @see gc_index_relay lib/gc_index_relay_web/router.ex + */ +import logger from '@/lib/logger' +import { normalizeHttpRelayUrl } from '@/lib/url' +import type { Filter, Event as NEvent } from 'nostr-tools' +import { verifyEvent } from 'nostr-tools' + +function trimSlash(base: string): string { + return base.replace(/\/+$/, '') +} + +export function indexRelayFilterUrl(baseUrl: string): string { + return `${trimSlash(normalizeHttpRelayUrl(baseUrl) || baseUrl)}/api/events/filter` +} + +export function indexRelayPublishUrl(baseUrl: string): string { + return `${trimSlash(normalizeHttpRelayUrl(baseUrl) || baseUrl)}/api/events` +} + +/** Map a Nostr filter to gc_index_relay POST body (requires `limit` 1–100; strips unsupported keys). */ +export function nostrFilterToIndexRelayBody(f: Filter): Record { + const body: Record = {} + const lim = f.limit + const capped = lim == null || lim < 1 ? 100 : Math.min(100, lim) + body.limit = capped + if (f.ids?.length) body.ids = f.ids + if (f.authors?.length) body.authors = f.authors + if (f.kinds?.length) body.kinds = f.kinds + if (f.since != null) body.since = f.since + if (f.until != null) body.until = f.until + for (const key of Object.keys(f)) { + if (key.startsWith('#') && key.length === 2) { + const v = (f as Record)[key] + if (Array.isArray(v) && v.length > 0) body[key] = v + } + } + return body +} + +function rawToVerifiedEvent(raw: Record): NEvent | null { + try { + const id = raw.id + const pubkey = raw.pubkey + const created_at = raw.created_at + const kind = raw.kind + const tags = raw.tags + const content = raw.content + const sig = raw.sig + if ( + typeof id !== 'string' || + typeof pubkey !== 'string' || + typeof created_at !== 'number' || + typeof kind !== 'number' || + !Array.isArray(tags) || + typeof content !== 'string' || + typeof sig !== 'string' + ) { + return null + } + const ev = { id, pubkey, created_at, kind, tags, content, sig } as NEvent + return verifyEvent(ev) ? ev : null + } catch { + return null + } +} + +/** + * Query one HTTP index relay. Runs one POST per filter when given an array. + */ +export async function queryIndexRelay( + baseUrl: string, + filter: Filter | Filter[], + options?: { signal?: AbortSignal } +): Promise { + const base = normalizeHttpRelayUrl(baseUrl) || baseUrl + const endpoint = indexRelayFilterUrl(base) + const filters = Array.isArray(filter) ? filter : [filter] + const out: NEvent[] = [] + const seen = new Set() + for (const f of filters) { + const body = nostrFilterToIndexRelayBody(filterForIndexRelay(f)) + try { + const res = await fetch(endpoint, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify(body), + signal: options?.signal + }) + if (!res.ok) { + logger.warn('[IndexRelayHttp] filter request failed', { endpoint, status: res.status }) + continue + } + const json = (await res.json()) as { data?: unknown } + const data = json.data + if (!Array.isArray(data)) continue + for (const item of data) { + if (!item || typeof item !== 'object') continue + const ev = rawToVerifiedEvent(item as Record) + if (ev && !seen.has(ev.id)) { + seen.add(ev.id) + out.push(ev) + } + } + } catch (e) { + if ((e as Error).name === 'AbortError') throw e + logger.warn('[IndexRelayHttp] filter request error', { endpoint, error: e }) + } + } + return out +} + +function filterForIndexRelay(f: Filter): Filter { + const { search: _s, ...rest } = f + return rest as Filter +} + +export async function publishEventToIndexRelay( + baseUrl: string, + event: NEvent, + options?: { signal?: AbortSignal } +): Promise { + const base = normalizeHttpRelayUrl(baseUrl) || baseUrl + const endpoint = indexRelayPublishUrl(base) + const res = await fetch(endpoint, { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + event: { + id: event.id, + pubkey: event.pubkey, + created_at: event.created_at, + kind: event.kind, + tags: event.tags, + content: event.content, + sig: event.sig + } + }), + signal: options?.signal + }) + if (!res.ok) { + const text = await res.text().catch(() => '') + throw new Error(`HTTP ${res.status}${text ? `: ${text.slice(0, 200)}` : ''}`) + } +} diff --git a/src/lib/relay-list-builder.ts b/src/lib/relay-list-builder.ts index c362c7f0..847bcd72 100644 --- a/src/lib/relay-list-builder.ts +++ b/src/lib/relay-list-builder.ts @@ -10,7 +10,7 @@ */ import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, SEARCHABLE_RELAY_URLS } from '@/constants' -import { normalizeUrl } from '@/lib/url' +import { normalizeAnyRelayUrl, normalizeUrl } from '@/lib/url' import { getCacheRelayUrls } from './private-relays' import client from '@/services/client.service' import logger from '@/lib/logger' @@ -20,7 +20,7 @@ function dedupeNormalizedRelayUrls(urls: string[]): string[] { const seen = new Set() const out: string[] = [] for (const u of urls) { - const n = normalizeUrl(u) || u + const n = normalizeAnyRelayUrl(u) || u.trim() if (!n || seen.has(n)) continue seen.add(n) out.push(n) @@ -95,7 +95,7 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio const addRelay = (url: string | undefined) => { if (!url) return - const normalized = normalizeUrl(url) + const normalized = normalizeAnyRelayUrl(url) if (!normalized) return // Filter blocked (case-insensitive comparison) if (normalizedBlocked.has(normalized.toLowerCase())) return @@ -127,11 +127,16 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio const authorRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (authorRelayList) { - const authorOutboxes = (authorRelayList.write || []).slice(0, 10) + const authorOutboxes = [ + ...(authorRelayList.httpWrite || []).slice(0, 10), + ...(authorRelayList.write || []).slice(0, 10) + ] authorOutboxes.forEach(addRelay) - - // Also include author's read relays (inboxes) for better discovery - const authorInboxes = (authorRelayList.read || []).slice(0, 10) + + const authorInboxes = [ + ...(authorRelayList.httpRead || []).slice(0, 10), + ...(authorRelayList.read || []).slice(0, 10) + ] authorInboxes.forEach(addRelay) logger.debug('[RelayListBuilder] Added author relays', { @@ -161,9 +166,14 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio const userRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (userRelayList) { - // Include both read and write - const userRead = (userRelayList.read || []).slice(0, 10) - const userWrite = (userRelayList.write || []).slice(0, 10) + const userRead = [ + ...(userRelayList.httpRead || []).slice(0, 10), + ...(userRelayList.read || []).slice(0, 10) + ] + const userWrite = [ + ...(userRelayList.httpWrite || []).slice(0, 10), + ...(userRelayList.write || []).slice(0, 10) + ] userRead.forEach(addRelay) userWrite.forEach(addRelay) } @@ -214,7 +224,10 @@ export async function buildComprehensiveRelayList(options: RelayListBuilderOptio const userRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (userRelayList) { - const userInboxes = (userRelayList.read || []).slice(0, 10) + const userInboxes = [ + ...(userRelayList.httpRead || []).slice(0, 10), + ...(userRelayList.read || []).slice(0, 10) + ] userInboxes.forEach(addRelay) } @@ -425,7 +438,7 @@ export async function buildReplyWriteRelayList( const addRelay = (url: string | undefined) => { if (!url) return - const normalized = normalizeUrl(url) + const normalized = normalizeAnyRelayUrl(url) if (!normalized) return // Filter blocked (case-insensitive comparison) if (normalizedBlocked.has(normalized.toLowerCase())) return @@ -443,11 +456,16 @@ export async function buildReplyWriteRelayList( const opRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (opRelayList) { - const opOutboxes = (opRelayList.write || []).slice(0, 10) + const opOutboxes = [ + ...(opRelayList.httpWrite || []).slice(0, 10), + ...(opRelayList.write || []).slice(0, 10) + ] opOutboxes.forEach(addRelay) - - // OP author's inboxes - const opInboxes = (opRelayList.read || []).slice(0, 10) + + const opInboxes = [ + ...(opRelayList.httpRead || []).slice(0, 10), + ...(opRelayList.read || []).slice(0, 10) + ] opInboxes.forEach(addRelay) } } catch (error) { @@ -466,7 +484,10 @@ export async function buildReplyWriteRelayList( const replyToRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (replyToRelayList) { - const replyToInboxes = (replyToRelayList.read || []).slice(0, 10) + const replyToInboxes = [ + ...(replyToRelayList.httpRead || []).slice(0, 10), + ...(replyToRelayList.read || []).slice(0, 10) + ] replyToInboxes.forEach(addRelay) } } catch (error) { @@ -485,7 +506,10 @@ export async function buildReplyWriteRelayList( const userRelayList = await Promise.race([relayListPromise, timeoutPromise]) if (userRelayList) { - const userOutboxes = (userRelayList.write || []).slice(0, 10) + const userOutboxes = [ + ...(userRelayList.httpWrite || []).slice(0, 10), + ...(userRelayList.write || []).slice(0, 10) + ] userOutboxes.forEach(addRelay) } diff --git a/src/lib/relay-list-sanitize.ts b/src/lib/relay-list-sanitize.ts index 7484ab2d..6ee77e50 100644 --- a/src/lib/relay-list-sanitize.ts +++ b/src/lib/relay-list-sanitize.ts @@ -1,4 +1,4 @@ -import { isLocalNetworkUrl, normalizeUrl } from '@/lib/url' +import { isHttpRelayUrl, isLocalNetworkUrl, normalizeAnyRelayUrl, normalizeUrl } from '@/lib/url' import type { TRelayList } from '@/types' /** @@ -8,12 +8,15 @@ import type { TRelayList } from '@/types' */ export function stripLocalNetworkRelaysFromRelayList(list: TRelayList): TRelayList { const keepUrl = (u: string): boolean => { - const n = normalizeUrl(u) || u - return Boolean(n && !isLocalNetworkUrl(n)) + const n = isHttpRelayUrl(u) ? normalizeAnyRelayUrl(u) || u : normalizeUrl(u) || u + return Boolean(n && !isLocalNetworkUrl(isHttpRelayUrl(u) ? u : n)) } return { write: list.write.filter(keepUrl), read: list.read.filter(keepUrl), - originalRelays: list.originalRelays.filter((r) => keepUrl(r.url)) + originalRelays: list.originalRelays.filter((r) => keepUrl(r.url)), + httpWrite: (list.httpWrite ?? []).filter(keepUrl), + httpRead: (list.httpRead ?? []).filter(keepUrl), + httpOriginalRelays: (list.httpOriginalRelays ?? []).filter((r) => keepUrl(r.url)) } } diff --git a/src/lib/relay-url-priority.ts b/src/lib/relay-url-priority.ts index f7cd6eae..967c2cb4 100644 --- a/src/lib/relay-url-priority.ts +++ b/src/lib/relay-url-priority.ts @@ -5,7 +5,7 @@ import { MAX_PUBLISH_RELAYS, MAX_REQ_RELAY_URLS } from '@/constants' -import { isLocalNetworkUrl, normalizeUrl } from '@/lib/url' +import { isLocalNetworkUrl, normalizeAnyRelayUrl, normalizeUrl } from '@/lib/url' export { MAX_REQ_RELAY_URLS } @@ -13,7 +13,7 @@ export function dedupeNormalizeRelayUrlsOrdered(urls: string[]): string[] { const seen = new Set() const out: string[] = [] for (const u of urls) { - const n = normalizeUrl(u) || u + const n = normalizeAnyRelayUrl(u) || u.trim() if (!n || seen.has(n)) continue seen.add(n) out.push(n) @@ -26,9 +26,9 @@ export function relayUrlsLocalsFirst(urls: string[]): string[] { const local: string[] = [] const remote: string[] = [] for (const u of urls) { - const n = normalizeUrl(u) || u + const n = normalizeAnyRelayUrl(u) || u.trim() if (!n) continue - if (isLocalNetworkUrl(n)) local.push(n) + if (isLocalNetworkUrl(u) || isLocalNetworkUrl(n)) local.push(n) else remote.push(n) } return dedupeNormalizeRelayUrlsOrdered([...local, ...remote]) diff --git a/src/lib/tombstone-events.ts b/src/lib/tombstone-events.ts index a6b027e3..6684506c 100644 --- a/src/lib/tombstone-events.ts +++ b/src/lib/tombstone-events.ts @@ -1,5 +1,5 @@ import { PROFILE_FETCH_RELAY_URLS } from '@/constants' -import { normalizeUrl } from '@/lib/url' +import { normalizeAnyRelayUrl, normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url' import type { TRelayList } from '@/types' /** Dispatched after tombstones in IndexedDB change (kind-5 sync or local apply). */ @@ -12,16 +12,22 @@ export function dispatchTombstonesUpdated(): void { /** Relay set for querying the current user's kind-5 events (aligned with login sync). */ export function buildDeletionRelayUrls(relayList: TRelayList | null | undefined): string[] { - if (!relayList?.read?.length && !relayList?.write?.length) { + const httpR = relayList?.httpRead ?? [] + const httpW = relayList?.httpWrite ?? [] + if (!relayList?.read?.length && !relayList?.write?.length && !httpR.length && !httpW.length) { return Array.from( new Set(PROFILE_FETCH_RELAY_URLS.map((url) => normalizeUrl(url) || url).filter(Boolean)) ).slice(0, 20) } + const ws = relayList?.write ?? [] + const rs = relayList?.read ?? [] return Array.from( new Set([ - ...relayList.write.map((url: string) => normalizeUrl(url) || url), - ...relayList.read.slice(0, 8).map((url: string) => normalizeUrl(url) || url), - ...PROFILE_FETCH_RELAY_URLS.map((url: string) => normalizeUrl(url) || url) + ...ws.map((url: string) => normalizeUrl(url) || url), + ...rs.slice(0, 8).map((url: string) => normalizeUrl(url) || url), + ...httpW.map((url: string) => normalizeHttpRelayUrl(url) || url), + ...httpR.slice(0, 8).map((url: string) => normalizeHttpRelayUrl(url) || url), + ...PROFILE_FETCH_RELAY_URLS.map((url: string) => normalizeAnyRelayUrl(url) || url) ]) ).slice(0, 20) } diff --git a/src/lib/url.ts b/src/lib/url.ts index 5a4ab930..ab5452ae 100644 --- a/src/lib/url.ts +++ b/src/lib/url.ts @@ -16,6 +16,28 @@ export function isWebsocketUrl(url: string): boolean { return /^wss?:\/\/.+$/.test(url) } +/** Nostr relay over HTTPS (index relay JSON API), not WebSocket. */ +export function isHttpRelayUrl(url: string): boolean { + const u = url.trim() + return /^https?:\/\/.+/i.test(u) +} + +/** + * Normalize https/http relay base URL without converting to WebSocket. + * Use for kind 10243 and index-relay HTTP API calls (not for NIP-01 WS pool). + */ +export function normalizeHttpRelayUrl(url: string): string { + return normalizeHttpUrl(url) +} + +/** + * Normalize relay URL for deduplication: WebSocket URLs via {@link normalizeUrl}, HTTPS index relays via {@link normalizeHttpRelayUrl}. + */ +export function normalizeAnyRelayUrl(url: string): string { + if (isHttpRelayUrl(url)) return normalizeHttpRelayUrl(url) || '' + return normalizeUrl(url) || '' +} + // copy from nostr-tools/utils export function normalizeUrl(url: string): string { try { diff --git a/src/pages/secondary/RelaySettingsPage/index.tsx b/src/pages/secondary/RelaySettingsPage/index.tsx index 28c9e3e0..45a936e7 100644 --- a/src/pages/secondary/RelaySettingsPage/index.tsx +++ b/src/pages/secondary/RelaySettingsPage/index.tsx @@ -1,3 +1,4 @@ +import HttpRelaysSetting from '@/components/HttpRelaysSetting' import MailboxSetting from '@/components/MailboxSetting' import FavoriteRelaysSetting from '@/components/FavoriteRelaysSetting' import SessionRelaysTab from '@/components/SessionRelaysTab' @@ -17,6 +18,9 @@ const RelaySettingsPage = forwardRef(({ index, hideTitlebar = false }: { index?: useEffect(() => { switch (window.location.hash) { + case '#http-relays': + setTabValue('http-relays') + break case '#mailbox': setTabValue('mailbox') break @@ -49,6 +53,7 @@ const RelaySettingsPage = forwardRef(({ index, hideTitlebar = false }: { index?: {t('Favorite Relays')} {t('Read & Write Relays')} + {t('HTTP relays')} {t('Session relays')} @@ -57,6 +62,9 @@ const RelaySettingsPage = forwardRef(({ index, hideTitlebar = false }: { index?: + + + diff --git a/src/providers/GroupListProvider.tsx b/src/providers/GroupListProvider.tsx index 407d9085..c58f7bca 100644 --- a/src/providers/GroupListProvider.tsx +++ b/src/providers/GroupListProvider.tsx @@ -33,11 +33,20 @@ export function GroupListProvider({ children }: { children: React.ReactNode }) { // Build comprehensive relay list for fetching group list const buildComprehensiveRelayList = useCallback(async () => { - const myRelayList = accountPubkey ? await client.fetchRelayList(accountPubkey) : { write: [], read: [] } + const myRelayList = accountPubkey + ? await client.fetchRelayList(accountPubkey) + : { + write: [], + read: [], + originalRelays: [], + httpRead: [], + httpWrite: [], + httpOriginalRelays: [] + } const favoritesTier = getFavoritesFeedRelayUrls(favoriteRelays ?? [], blockedRelays) return buildPrioritizedReadRelayUrls({ - userReadRelays: myRelayList.read ?? [], - userWriteRelays: myRelayList.write ?? [], + userReadRelays: [...(myRelayList.httpRead ?? []), ...(myRelayList.read ?? [])], + userWriteRelays: [...(myRelayList.httpWrite ?? []), ...(myRelayList.write ?? [])], favoriteRelays: favoritesTier, blockedRelays, applySocialKindBlockedFilter: false diff --git a/src/providers/NostrProvider/index.tsx b/src/providers/NostrProvider/index.tsx index 4c30039b..51201006 100644 --- a/src/providers/NostrProvider/index.tsx +++ b/src/providers/NostrProvider/index.tsx @@ -19,9 +19,9 @@ import { createRelayListDraftEvent } from '@/lib/draft-event' import { getLatestEvent, minePow } from '@/lib/event' -import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' +import { getHttpRelayListFromEvent, getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' import logger from '@/lib/logger' -import { normalizeUrl } from '@/lib/url' +import { normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url' import { formatPubkey, pubkeyToNpub } from '@/lib/pubkey' import { showPublishingFeedback, showSimplePublishSuccess } from '@/lib/publishing-feedback' import client from '@/services/client.service' @@ -139,6 +139,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { const [profileEvent, setProfileEvent] = useState(null) const [relayList, setRelayList] = useState(null) const [cacheRelayListEvent, setCacheRelayListEvent] = useState(null) + const [httpRelayListEvent, setHttpRelayListEvent] = useState(undefined) const [followListEvent, setFollowListEvent] = useState(null) const [muteListEvent, setMuteListEvent] = useState(null) const [bookmarkListEvent, setBookmarkListEvent] = useState(null) @@ -214,6 +215,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { setMuteListEvent(null) setBookmarkListEvent(null) setRssFeedListEvent(null) + setCacheRelayListEvent(null) + setHttpRelayListEvent(undefined) return undefined } @@ -232,6 +235,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { setMuteListEvent(null) setBookmarkListEvent(null) setRssFeedListEvent(null) + setHttpRelayListEvent(undefined) } hydrationGenForThisRun = accountHydrationGenerationRef.current += 1 @@ -268,7 +272,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { storedUserEmojiListEvent, storedRssFeedListEvent, storedInterestListEvent, - storedBlossomServerListEvent + storedBlossomServerListEvent, + storedHttpRelayListEvent ] = await Promise.all([ indexedDb.getReplaceableEvent(account.pubkey, kinds.RelayList), indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.CACHE_RELAYS), @@ -281,7 +286,8 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { indexedDb.getReplaceableEvent(account.pubkey, kinds.UserEmojiList), indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.RSS_FEED_LIST), indexedDb.getReplaceableEvent(account.pubkey, INTEREST_LIST_KIND), - indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.BLOSSOM_SERVER_LIST) + indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.BLOSSOM_SERVER_LIST), + indexedDb.getReplaceableEvent(account.pubkey, ExtendedKind.HTTP_RELAY_LIST) ]) // Extract blocked relays from event @@ -302,35 +308,49 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { // Set initial relay list from stored events (will be updated with merged list later) // Merge cache relays even at initial load so cache relays are available immediately - if (!userForcedAccountNetworkHydrate && (storedRelayListEvent || storedCacheRelayListEvent)) { - const baseRelayList = storedRelayListEvent + if ( + !userForcedAccountNetworkHydrate && + (storedRelayListEvent || storedCacheRelayListEvent || storedHttpRelayListEvent) + ) { + const emptyHttp = { + httpRead: [] as string[], + httpWrite: [] as string[], + httpOriginalRelays: [] as TMailboxRelay[] + } + let baseRelayList: TRelayList = storedRelayListEvent ? getRelayListFromEvent(storedRelayListEvent, blockedRelays) - : { write: [], read: [], originalRelays: [] } - - // Merge cache relays if available + : { write: [], read: [], originalRelays: [], ...emptyHttp } + const httpSlice = getHttpRelayListFromEvent(storedHttpRelayListEvent, blockedRelays) + baseRelayList = { + ...baseRelayList, + httpRead: httpSlice.httpRead, + httpWrite: httpSlice.httpWrite, + httpOriginalRelays: httpSlice.httpOriginalRelays + } + if (storedCacheRelayListEvent) { const cacheRelayList = getRelayListFromEvent(storedCacheRelayListEvent) - - // Merge read relays - cache relays first, then others (for offline priority) + const mergedRead = [...cacheRelayList.read, ...baseRelayList.read] const mergedWrite = [...cacheRelayList.write, ...baseRelayList.write] const mergedOriginalRelays = new Map() - - // Add cache relay original relays first (prioritized) - cacheRelayList.originalRelays.forEach(relay => { + + cacheRelayList.originalRelays.forEach((relay) => { mergedOriginalRelays.set(relay.url, relay) }) - // Then add regular relay original relays - baseRelayList.originalRelays.forEach(relay => { + baseRelayList.originalRelays.forEach((relay) => { if (!mergedOriginalRelays.has(relay.url)) { mergedOriginalRelays.set(relay.url, relay) } }) - + setRelayList({ write: Array.from(new Set(mergedWrite)), read: Array.from(new Set(mergedRead)), - originalRelays: Array.from(mergedOriginalRelays.values()) + originalRelays: Array.from(mergedOriginalRelays.values()), + httpRead: baseRelayList.httpRead, + httpWrite: baseRelayList.httpWrite, + httpOriginalRelays: baseRelayList.httpOriginalRelays }) } else { setRelayList(baseRelayList) @@ -369,6 +389,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { if (storedBlossomServerListEvent) { void client.updateBlossomServerListEventCache(storedBlossomServerListEvent) } + setHttpRelayListEvent(storedHttpRelayListEvent ?? null) } const lastNetworkHydrateAt = storage.getAccountNetworkHydrateAt(account.pubkey) @@ -434,7 +455,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { logger.debug('[NostrProvider] RSS feed list cache is fresh, using cached value') } - const [relayListEvents, cacheRelayListEvents] = await Promise.all([ + const [relayListEvents, cacheRelayListEvents, httpRelayListEvents] = await Promise.all([ queryService.fetchEvents(FAST_READ_RELAY_URLS, { kinds: [kinds.RelayList], authors: [account.pubkey] @@ -442,11 +463,16 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { queryService.fetchEvents(FAST_READ_RELAY_URLS, { kinds: [ExtendedKind.CACHE_RELAYS], authors: [account.pubkey] + }), + queryService.fetchEvents(FAST_READ_RELAY_URLS, { + kinds: [ExtendedKind.HTTP_RELAY_LIST], + authors: [account.pubkey], + limit: 1 }) ]) const relayListEvent = getLatestEvent(relayListEvents) ?? storedRelayListEvent const cacheRelayListEvent = getLatestEvent(cacheRelayListEvents) ?? storedCacheRelayListEvent - const relayList = getRelayListFromEvent(relayListEvent, blockedRelays) + const httpRelayListEventFetched = getLatestEvent(httpRelayListEvents) ?? storedHttpRelayListEvent ?? null if (relayListEvent) { client.updateRelayListCache(relayListEvent) await indexedDb.putReplaceableEvent(relayListEvent) @@ -457,12 +483,21 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { } else { setCacheRelayListEvent(null) } - // Fetch updated relay list (which merges both 10002 and 10432) + if (httpRelayListEventFetched) { + await indexedDb.putReplaceableEvent(httpRelayListEventFetched) + setHttpRelayListEvent(httpRelayListEventFetched) + } else { + setHttpRelayListEvent(null) + } + // Fetch updated relay list (merges 10002, 10432, 10243) const mergedRelayList = await client.fetchRelayList(account.pubkey) // Keep using client for relay list merging setRelayList(mergedRelayList) const normalizedRelays = [ - ...relayList.write.map((url: string) => normalizeUrl(url) || url), + ...mergedRelayList.write.map((url: string) => normalizeUrl(url) || url), + ...mergedRelayList.read.map((url: string) => normalizeUrl(url) || url), + ...mergedRelayList.httpRead.map((url: string) => normalizeHttpRelayUrl(url) || url), + ...mergedRelayList.httpWrite.map((url: string) => normalizeHttpRelayUrl(url) || url), ...FAST_WRITE_RELAY_URLS.map((url: string) => normalizeUrl(url) || url), ...PROFILE_FETCH_RELAY_URLS.map((url: string) => normalizeUrl(url) || url) ] @@ -1289,6 +1324,16 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { // This ensures kind 10002 and 10432 remain separate and are only merged when publishing/using } + const updateHttpRelayListEvent = async (httpRelayEvent: Event) => { + await indexedDb.putReplaceableEvent(httpRelayEvent) + if (account?.pubkey) { + client.clearRelayListCache(account.pubkey) + } + setHttpRelayListEvent(httpRelayEvent) + const mergedRelayList = await client.fetchRelayList(account?.pubkey || '') + setRelayList(mergedRelayList) + } + const updateProfileEvent = async (profileEvent: Event) => { const newProfileEvent = await indexedDb.putReplaceableEvent(profileEvent) setProfileEvent(newProfileEvent) @@ -1364,6 +1409,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { profileEvent, relayList, cacheRelayListEvent, + httpRelayListEvent, followListEvent, muteListEvent, bookmarkListEvent, @@ -1394,6 +1440,7 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { signEvent, updateRelayListEvent, updateCacheRelayListEvent, + updateHttpRelayListEvent, updateProfileEvent, updateFollowListEvent, updateMuteListEvent, diff --git a/src/providers/nostr-context.tsx b/src/providers/nostr-context.tsx index bd938608..a3fa7b16 100644 --- a/src/providers/nostr-context.tsx +++ b/src/providers/nostr-context.tsx @@ -21,6 +21,8 @@ export type TNostrContext = { profileEvent: Event | null relayList: TRelayList | null cacheRelayListEvent: Event | null + /** Kind 10243 (HTTPS index relays); null if none, undefined while not loaded. */ + httpRelayListEvent: Event | null | undefined followListEvent: Event | null muteListEvent: Event | null bookmarkListEvent: Event | null @@ -51,6 +53,7 @@ export type TNostrContext = { checkLogin: (cb?: () => T) => Promise updateRelayListEvent: (relayListEvent: Event) => Promise updateCacheRelayListEvent: (cacheRelayListEvent: Event) => Promise + updateHttpRelayListEvent: (httpRelayListEvent: Event) => Promise updateProfileEvent: (profileEvent: Event) => Promise updateFollowListEvent: (followListEvent: Event) => Promise updateMuteListEvent: (muteListEvent: Event, privateTags: string[][]) => Promise diff --git a/src/services/client-query.service.ts b/src/services/client-query.service.ts index 3a7c8dec..3d2b332b 100644 --- a/src/services/client-query.service.ts +++ b/src/services/client-query.service.ts @@ -8,8 +8,9 @@ import { SEARCHABLE_RELAY_URLS } from '@/constants' import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter' +import { queryIndexRelay } from '@/lib/index-relay-http' import logger from '@/lib/logger' -import { normalizeUrl } from '@/lib/url' +import { isHttpRelayUrl, normalizeHttpRelayUrl, normalizeUrl } from '@/lib/url' import { RelaySubscribeOpBatch } from '@/services/relay-operation-log.service' import { patchRelayNoticeForFetchFailures } from '@/services/relay-notice-strike' import type { Filter, Event as NEvent } from 'nostr-tools' @@ -216,8 +217,19 @@ export class QueryService { ? FIRST_RELAY_RESULT_GRACE_MS : null + const httpRelayBases = Array.from( + new Set( + urls + .filter((u) => isHttpRelayUrl(u)) + .map((u) => normalizeHttpRelayUrl(u) || u) + .filter(Boolean) + ) + ) + const wsQueryUrls = urls.filter((u) => !isHttpRelayUrl(u)) + return await new Promise((resolve) => { const events: NEvent[] = [] + const abortHttp = new AbortController() let resolveTimeout: ReturnType | null = null let firstResultGraceTimeoutId: ReturnType | null = null let feedFirstResultGraceTimeoutId: ReturnType | null = null @@ -227,6 +239,33 @@ export class QueryService { let resolved = false let firstResultTime: number | null = null let globalTimeoutId: ReturnType | null = null + let queryFinalizing = false + + const httpInflight = + httpRelayBases.length === 0 + ? Promise.resolve() + : Promise.allSettled( + httpRelayBases.map(async (base) => { + try { + const evts = await queryIndexRelay(base, filter, { signal: abortHttp.signal }) + for (const evt of evts) { + if (resolved) return + eventCount++ + onevent?.(evt) + events.push(evt) + if (!shouldDropEventOnIngest(evt)) { + this.onQueryResultIngest?.([evt]) + } + if (firstResultTime === null) { + firstResultTime = Date.now() + } + } + } catch (e) { + if ((e as Error).name === 'AbortError') return + logger.warn('[QueryService] HTTP index relay query failed', { base, error: e }) + } + }) + ).then(() => {}) const resolveReplaceableRaceEvents = (): NEvent[] => { if (events.length === 0) return events @@ -258,24 +297,27 @@ export class QueryService { } const resolveWithEvents = () => { - if (resolved) return - resolved = true - if (resolveTimeout) clearTimeout(resolveTimeout) - if (firstResultGraceTimeoutId) clearTimeout(firstResultGraceTimeoutId) - if (feedFirstResultGraceTimeoutId) clearTimeout(feedFirstResultGraceTimeoutId) - if (replaceableRaceTimeoutId) clearTimeout(replaceableRaceTimeoutId) - if (globalTimeoutId) clearTimeout(globalTimeoutId) - - sub.close() - - const resolvedList = - replaceableRace && events.length > 0 ? resolveReplaceableRaceEvents() : events - // Session cache already updated per-event in onevent; avoid duplicate ingest + waiter churn. - resolve(resolvedList) + if (resolved || queryFinalizing) return + queryFinalizing = true + void httpInflight.finally(() => { + if (resolved) return + resolved = true + if (resolveTimeout) clearTimeout(resolveTimeout) + if (firstResultGraceTimeoutId) clearTimeout(firstResultGraceTimeoutId) + if (feedFirstResultGraceTimeoutId) clearTimeout(feedFirstResultGraceTimeoutId) + if (replaceableRaceTimeoutId) clearTimeout(replaceableRaceTimeoutId) + if (globalTimeoutId) clearTimeout(globalTimeoutId) + + sub.close() + + const resolvedList = + replaceableRace && events.length > 0 ? resolveReplaceableRaceEvents() : events + resolve(resolvedList) + }) } - const sub = this.subscribe( - urls, + const wsSub = this.subscribe( + wsQueryUrls, filter, { onevent: (evt) => { @@ -369,7 +411,14 @@ export class QueryService { }, { source: options?.relayOpSource ?? 'QueryService.query', logLevel: 'debug' } ) - + + const sub = { + close: () => { + abortHttp.abort() + wsSub.close() + } + } + globalTimeoutId = setTimeout(() => resolveWithEvents(), globalTimeout) }) } @@ -400,6 +449,8 @@ export class QueryService { }) } + relays = relays.filter((url) => !isHttpRelayUrl(url)) + if (relays.length === 0) { queueMicrotask(() => callbacks.oneose?.(true)) return { close: () => {} } diff --git a/src/services/client.service.ts b/src/services/client.service.ts index 2ece3dc6..7857abc1 100644 --- a/src/services/client.service.ts +++ b/src/services/client.service.ts @@ -27,7 +27,7 @@ function canonicalSeenOnEventId(eventId: string): string { return /^[0-9a-f]{64}$/i.test(t) ? t.toLowerCase() : t } import { shouldDropEventOnIngest } from '@/lib/event-ingest-filter' -import { getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' +import { getHttpRelayListFromEvent, getProfileFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' import logger from '@/lib/logger' import { buildDeletionRelayUrls, dispatchTombstonesUpdated } from '@/lib/tombstone-events' import { hexPubkeysEqual, isValidPubkey, pubkeyToNpub, userIdToPubkey } from '@/lib/pubkey' @@ -38,8 +38,9 @@ import { mergeRelayPriorityLayers, relayUrlsLocalsFirst } from '@/lib/relay-url-priority' +import { publishEventToIndexRelay } from '@/lib/index-relay-http' import { stripLocalNetworkRelaysFromRelayList } from '@/lib/relay-list-sanitize' -import { isLocalNetworkUrl, normalizeUrl, simplifyUrl } from '@/lib/url' +import { isHttpRelayUrl, isLocalNetworkUrl, normalizeAnyRelayUrl, normalizeHttpRelayUrl, normalizeUrl, simplifyUrl } from '@/lib/url' import { isSafari } from '@/lib/utils' import { ISigner, @@ -332,11 +333,11 @@ class ClientService extends EventTarget { * Normalize, dedupe, then cap at {@link MAX_PUBLISH_RELAYS}. */ private filterPublishingRelays(relays: string[], event: NEvent): string[] { - const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u)) + const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeAnyRelayUrl(u) || u)) const socialKindBlockedSet = new Set(SOCIAL_KIND_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u)) return dedupeNormalizeRelayUrlsOrdered( relays.filter((url) => { - const n = normalizeUrl(url) || url + const n = normalizeAnyRelayUrl(url) || url if (readOnlySet.has(n)) return false if (isSocialKindBlockedKind(event.kind) && socialKindBlockedSet.has(n)) return false return true @@ -348,9 +349,13 @@ class ClientService extends EventTarget { private async getUserOutboxRelayUrlsForPublish(event: NEvent): Promise { try { const relayList = await this.fetchRelayList(event.pubkey) - const raw = dedupeNormalizeRelayUrlsOrdered( - (relayList?.write ?? []).map((u) => normalizeUrl(u) || u).filter((u): u is string => !!u) - ) + const wsOut = (relayList?.write ?? []) + .map((u) => normalizeUrl(u) || u) + .filter((u): u is string => !!u) + const httpOut = (relayList?.httpWrite ?? []) + .map((u) => normalizeHttpRelayUrl(u) || u) + .filter((u): u is string => !!u) + const raw = dedupeNormalizeRelayUrlsOrdered([...httpOut, ...wsOut]) return this.filterPublishingRelays(raw, event) } catch { return [] @@ -362,7 +367,7 @@ class ClientService extends EventTarget { userOutboxUrls: string[], relayStatuses: { url: string; success: boolean; error?: string }[] ): Promise { - const norm = (u: string) => normalizeUrl(u) || u + const norm = (u: string) => normalizeAnyRelayUrl(u) || u const hadSuccess = new Set() for (const r of relayStatuses) { if (r.success) hadSuccess.add(norm(r.url)) @@ -626,7 +631,14 @@ class ClientService extends EventTarget { pubkey: event.pubkey, error: err instanceof Error ? err.message : String(err) }) - spellRelayList = { write: [], read: [], originalRelays: [] } + spellRelayList = { + write: [], + read: [], + originalRelays: [], + httpRead: [], + httpWrite: [], + httpOriginalRelays: [] + } } const normalizedWrite = dedupeNormalizeRelayUrlsOrdered( (spellRelayList?.write ?? []) @@ -717,7 +729,14 @@ class ClientService extends EventTarget { pubkey: event.pubkey, error: err instanceof Error ? err.message : String(err) }) - relayList = { write: [], read: [], originalRelays: [] } + relayList = { + write: [], + read: [], + originalRelays: [], + httpRead: [], + httpWrite: [], + httpOriginalRelays: [] + } } if ( event.kind === kinds.RelayList || @@ -983,10 +1002,10 @@ class ClientService extends EventTarget { : relayUrls } - const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u)) + const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeAnyRelayUrl(u) || u)) const socialKindBlockedSet = new Set(SOCIAL_KIND_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u)) let filtered = mergedRelayUrls.filter((url) => { - const n = normalizeUrl(url) || url + const n = normalizeAnyRelayUrl(url) || url if (readOnlySet.has(n)) return false if (isSocialKindBlockedKind(event.kind) && socialKindBlockedSet.has(n)) return false return true @@ -1036,6 +1055,7 @@ class ClientService extends EventTarget { } if ( event.kind === kinds.RelayList || + event.kind === ExtendedKind.HTTP_RELAY_LIST || event.kind === ExtendedKind.FAVORITE_RELAYS || event.kind === kinds.Relaysets ) { @@ -1147,10 +1167,25 @@ class ClientService extends EventTarget { }, connectionTimeout + publishTimeout + 2_000) // Add 2s buffer try { + if (isHttpRelayUrl(url)) { + const base = normalizeHttpRelayUrl(url) || url + logger.debug(`[PublishEvent] Publishing to HTTP index relay`, { url: base }) + await Promise.race([ + publishEventToIndexRelay(base, event), + new Promise((_, reject) => + setTimeout(() => reject(new Error(`HTTP publish timeout after ${publishTimeout}ms`)), publishTimeout) + ) + ]) + that.recordPublishSuccess(url, Date.now() - startMs) + successCount++ + relayStatuses.push({ url, success: true }) + return + } + // For local relays, add a connection timeout let relay: Relay logger.debug(`[PublishEvent] Ensuring relay connection`, { url, isLocal, connectionTimeout }) - + const connectionPromise = isLocal ? Promise.race([ this.pool.ensureRelay(url), @@ -1164,7 +1199,7 @@ class ClientService extends EventTarget { setTimeout(() => reject(new Error('Remote relay connection timeout')), connectionTimeout) ) ]) - + relay = await connectionPromise logger.debug(`[PublishEvent] Relay connected`, { url }) const relayKeyPub = normalizeUrl(url) || url @@ -2772,10 +2807,17 @@ class ClientService extends EventTarget { const storedCacheRelayEvents = await Promise.all( pubkeys.map(pubkey => indexedDb.getReplaceableEvent(pubkey, ExtendedKind.CACHE_RELAYS)) ) - + const storedHttpRelayEvents = await Promise.all( + pubkeys.map(pubkey => indexedDb.getReplaceableEvent(pubkey, ExtendedKind.HTTP_RELAY_LIST)) + ) + // Then fetch from relays (will update cache if newer) const relayEvents = await this.replaceableEventService.fetchReplaceableEventsFromProfileFetchRelays(pubkeys, kinds.RelayList) - + const httpRelayEvents = await this.replaceableEventService.fetchReplaceableEventsFromProfileFetchRelays( + pubkeys, + ExtendedKind.HTTP_RELAY_LIST + ) + // Fetch cache relays from multiple sources: FAST_READ_RELAY_URLS, PROFILE_RELAY_URLS, and user's inboxes/outboxes const cacheRelayEvents = await this.fetchCacheRelayEventsFromMultipleSources(pubkeys, relayEvents, storedRelayEvents) @@ -2786,26 +2828,44 @@ class ClientService extends EventTarget { // Use stored cache relay event if available (for offline), otherwise use fetched one const storedCacheEvent = storedCacheRelayEvents[index] const cacheEvent = cacheRelayEvents[index] || storedCacheEvent - + + const httpRelayEvent = httpRelayEvents[index] || storedHttpRelayEvents[index] + // Use stored relay event if no network event (for offline), otherwise use fetched one const storedRelayEvent = storedRelayEvents[index] const relayEvent = relayEvents[index] || storedRelayEvent - + + const emptyHttp = { httpRead: [] as string[], httpWrite: [] as string[], httpOriginalRelays: [] as TMailboxRelay[] } + + const mergeKind10243 = (list: TRelayList): TRelayList => { + if (!httpRelayEvent) { + return { + ...list, + httpRead: list.httpRead ?? [], + httpWrite: list.httpWrite ?? [], + httpOriginalRelays: list.httpOriginalRelays ?? [] + } + } + const h = getHttpRelayListFromEvent(httpRelayEvent) + return { ...list, httpRead: h.httpRead, httpWrite: h.httpWrite, httpOriginalRelays: h.httpOriginalRelays } + } + const relayList = relayEvent ? getRelayListFromEvent(relayEvent) : { write: [], read: [], - originalRelays: [] + originalRelays: [], + ...emptyHttp } - + // Merge kind 10432 (cache relays) only for the logged-in user — never use someone else's local relays. if (isOwnRelayList && cacheEvent) { const cacheRelayList = getRelayListFromEvent(cacheEvent) - + // Merge read relays - cache relays first, then others (for offline priority) const mergedRead = [...cacheRelayList.read, ...relayList.read] const mergedWrite = [...cacheRelayList.write, ...relayList.write] const mergedOriginalRelays = new Map() - + // Add cache relay original relays first (prioritized) cacheRelayList.originalRelays.forEach(relay => { mergedOriginalRelays.set(relay.url, relay) @@ -2816,37 +2876,40 @@ class ClientService extends EventTarget { mergedOriginalRelays.set(relay.url, relay) } }) - + // Deduplicate while preserving order (cache relays first) - return { + return mergeKind10243({ write: Array.from(new Set(mergedWrite)), read: Array.from(new Set(mergedRead)), - originalRelays: Array.from(mergedOriginalRelays.values()) - } + originalRelays: Array.from(mergedOriginalRelays.values()), + ...emptyHttp + }) } - + // If no merged cache path, return original relay list or default (with own cache as fallback only) if (!relayEvent) { if (isOwnRelayList && storedCacheEvent) { const cacheRelayList = getRelayListFromEvent(storedCacheEvent) - return { + return mergeKind10243({ write: cacheRelayList.write.length > 0 ? cacheRelayList.write : PROFILE_FETCH_RELAY_URLS, read: cacheRelayList.read.length > 0 ? cacheRelayList.read : PROFILE_FETCH_RELAY_URLS, - originalRelays: cacheRelayList.originalRelays - } + originalRelays: cacheRelayList.originalRelays, + ...emptyHttp + }) } - return { + return mergeKind10243({ write: PROFILE_FETCH_RELAY_URLS, read: PROFILE_FETCH_RELAY_URLS, - originalRelays: [] - } + originalRelays: [], + ...emptyHttp + }) } - + if (!isOwnRelayList) { - return stripLocalNetworkRelaysFromRelayList(relayList) + return mergeKind10243(stripLocalNetworkRelaysFromRelayList(relayList)) } - return relayList + return mergeKind10243(relayList) }) } diff --git a/src/services/indexed-db.service.ts b/src/services/indexed-db.service.ts index f39c1b8a..2dbd9300 100644 --- a/src/services/indexed-db.service.ts +++ b/src/services/indexed-db.service.ts @@ -30,6 +30,8 @@ export const StoreNames = { FAVORITE_RELAYS: 'favoriteRelays', BLOCKED_RELAYS_EVENTS: 'blockedRelaysEvents', CACHE_RELAYS_EVENTS: 'cacheRelaysEvents', + /** Kind 10243 HTTPS index relay list (replaceable by pubkey). */ + HTTP_RELAY_LIST_EVENTS: 'httpRelayListEvents', RSS_FEED_LIST_EVENTS: 'rssFeedListEvents', RSS_FEED_ITEMS: 'rssFeedItems', RELAY_SETS: 'relaySets', @@ -56,7 +58,7 @@ export const StoreNames = { } /** Schema version we expect. When adding stores or migrations, bump this. */ -const DB_VERSION = 30 +const DB_VERSION = 31 /** Max age for profile and payment info cache before we refetch (5 min). */ const PROFILE_AND_PAYMENT_CACHE_MAX_AGE_MS = 5 * 60 * 1000 @@ -855,6 +857,8 @@ class IndexedDbService { return StoreNames.BLOCKED_RELAYS_EVENTS case ExtendedKind.CACHE_RELAYS: return StoreNames.CACHE_RELAYS_EVENTS + case ExtendedKind.HTTP_RELAY_LIST: + return StoreNames.HTTP_RELAY_LIST_EVENTS case ExtendedKind.RSS_FEED_LIST: return StoreNames.RSS_FEED_LIST_EVENTS case kinds.UserEmojiList: @@ -1484,6 +1488,7 @@ class IndexedDbService { if (storeName === StoreNames.FAVORITE_RELAYS) return ExtendedKind.FAVORITE_RELAYS if (storeName === StoreNames.BLOCKED_RELAYS_EVENTS) return ExtendedKind.BLOCKED_RELAYS if (storeName === StoreNames.CACHE_RELAYS_EVENTS) return ExtendedKind.CACHE_RELAYS + if (storeName === StoreNames.HTTP_RELAY_LIST_EVENTS) return ExtendedKind.HTTP_RELAY_LIST if (storeName === StoreNames.RSS_FEED_LIST_EVENTS) return ExtendedKind.RSS_FEED_LIST if (storeName === StoreNames.USER_EMOJI_LIST_EVENTS) return kinds.UserEmojiList if (storeName === StoreNames.EMOJI_SET_EVENTS) return kinds.Emojisets @@ -1506,6 +1511,7 @@ class IndexedDbService { kind === ExtendedKind.FAVORITE_RELAYS || kind === ExtendedKind.BLOCKED_RELAYS || kind === ExtendedKind.CACHE_RELAYS || + kind === ExtendedKind.HTTP_RELAY_LIST || kind === ExtendedKind.BLOSSOM_SERVER_LIST || kind === ExtendedKind.RSS_FEED_LIST ) diff --git a/src/services/relay-selection.service.ts b/src/services/relay-selection.service.ts index 9c9f1725..e7eca92a 100644 --- a/src/services/relay-selection.service.ts +++ b/src/services/relay-selection.service.ts @@ -3,17 +3,19 @@ import { ExtendedKind, FAST_WRITE_RELAY_URLS, RANDOM_PUBLISH_RELAY_COUNT } from import { NOSTR_URI_FOR_REPLY_PUBKEYS_REGEX } from '@/lib/content-patterns' import client from '@/services/client.service' import { eventService } from '@/services/client.service' -import { normalizeUrl, isLocalNetworkUrl } from '@/lib/url' +import { normalizeAnyRelayUrl, normalizeUrl, isLocalNetworkUrl } from '@/lib/url' import { TRelaySet, TRelayList } from '@/types' import logger from '@/lib/logger' import indexedDb from '@/services/indexed-db.service' -import { getRelayListFromEvent } from '@/lib/event-metadata' +import { getHttpRelayListFromEvent, getRelayListFromEvent } from '@/lib/event-metadata' import nip66Service from '@/services/nip66.service' import storage from '@/services/local-storage.service' export interface RelaySelectionContext { // User's own relays userWriteRelays: string[] + /** Kind 10243 write/both targets (HTTPS index relays); labeled "HTTP" in the picker. */ + userHttpWriteRelays?: string[] userReadRelays: string[] favoriteRelays: string[] blockedRelays: string[] @@ -34,6 +36,7 @@ export interface RelaySelectionContext { export type RelaySourceType = | 'local' | 'relay_list' + | 'http_relay_list' | 'client_default' | 'open_from' | 'favorite' @@ -108,7 +111,7 @@ class RelaySelectionService { const addRelay = (url: string, type: RelaySourceType) => { if (!url) return - const normalized = normalizeUrl(url) + const normalized = normalizeAnyRelayUrl(url) if (normalized && !seen.has(normalized)) { seen.add(normalized) order.push({ url: normalized, type }) @@ -117,6 +120,9 @@ class RelaySelectionService { } } + const userHttpWrites = context.userHttpWriteRelays ?? [] + userHttpWrites.forEach((url) => addRelay(url, 'http_relay_list')) + // User's write relays (or fallback = client default) const userRelays = userWriteRelays.length > 0 ? userWriteRelays : FAST_WRITE_RELAY_URLS const userType: RelaySourceType = userWriteRelays.length > 0 ? 'relay_list' : 'client_default' @@ -191,28 +197,37 @@ class RelaySelectionService { */ private async getCachedRelayList(pubkey: string): Promise { try { - // Get both kind 10002 (relay list) and kind 10432 (cache relays) from IndexedDB - const [relayListEvent, cacheRelayListEvent] = await Promise.all([ + // Get kind 10002, 10432, and 10243 from IndexedDB + const [relayListEvent, cacheRelayListEvent, httpRelayListEvent] = await Promise.all([ indexedDb.getReplaceableEvent(pubkey, kinds.RelayList), - indexedDb.getReplaceableEvent(pubkey, ExtendedKind.CACHE_RELAYS) + indexedDb.getReplaceableEvent(pubkey, ExtendedKind.CACHE_RELAYS), + indexedDb.getReplaceableEvent(pubkey, ExtendedKind.HTTP_RELAY_LIST) ]) + const mergeKind10243 = (list: TRelayList): TRelayList => { + const h = getHttpRelayListFromEvent(httpRelayListEvent ?? undefined) + return { ...list, httpRead: h.httpRead, httpWrite: h.httpWrite, httpOriginalRelays: h.httpOriginalRelays } + } + let relayList: TRelayList - + // If no cached relay list event, fetch from relays (which will also cache it) if (!relayListEvent) { try { relayList = await client.fetchRelayList(pubkey) // Keep using client for relay list merging } catch (error) { logger.warn('Failed to fetch relay list from relays', { error, pubkey }) - relayList = { + relayList = mergeKind10243({ write: [], read: [], - originalRelays: [] - } + originalRelays: [], + httpRead: [], + httpWrite: [], + httpOriginalRelays: [] + }) } } else { - relayList = getRelayListFromEvent(relayListEvent) + relayList = mergeKind10243(getRelayListFromEvent(relayListEvent)) } // Merge cache relays (kind 10432) into the relay list @@ -245,7 +260,10 @@ class RelaySelectionService { return { write: Array.from(new Set(mergedWrite)), read: Array.from(new Set(mergedRead)), - originalRelays: Array.from(mergedOriginalRelays.values()) + originalRelays: Array.from(mergedOriginalRelays.values()), + httpRead: relayList.httpRead, + httpWrite: relayList.httpWrite, + httpOriginalRelays: relayList.httpOriginalRelays } } @@ -810,10 +828,8 @@ class RelaySelectionService { return relays } - // Helper function to safely normalize URLs const safeNormalize = (url: string): string => { - const normalized = normalizeUrl(url) - return normalized || url + return normalizeAnyRelayUrl(url) || url } const normalizedBlocked = blockedRelays.map(safeNormalize) diff --git a/src/types/index.d.ts b/src/types/index.d.ts index 7561e301..6572a31e 100644 --- a/src/types/index.d.ts +++ b/src/types/index.d.ts @@ -55,6 +55,10 @@ export type TRelayList = { write: string[] read: string[] originalRelays: TMailboxRelay[] + /** Kind 10243 — index relays (https://…); read/write/both same as NIP-65 `r` tags. */ + httpRead: string[] + httpWrite: string[] + httpOriginalRelays: TMailboxRelay[] } export type TRelayInfo = {