Browse Source

fix relay service

imwald
Silberengel 1 month ago
parent
commit
a37736757d
  1. 3
      src/components/Explore/ExploreRelayReviews.tsx
  2. 12
      src/constants.ts
  3. 7
      src/hooks/useFetchProfile.tsx
  4. 23
      src/hooks/useProfileTimeline.tsx
  5. 131
      src/lib/favorites-feed-relays.ts
  6. 178
      src/lib/relay-url-priority.ts
  7. 3
      src/pages/primary/NoteListPage/FollowingFeed.tsx
  8. 8
      src/pages/primary/SpellsPage/CreateSpellDialog.tsx
  9. 21
      src/pages/primary/SpellsPage/index.tsx
  10. 32
      src/pages/secondary/NoteListPage/index.tsx
  11. 38
      src/providers/BookmarksProvider.tsx
  12. 28
      src/providers/GroupListProvider.tsx
  13. 38
      src/providers/InterestListProvider.tsx
  14. 53
      src/providers/NostrProvider/index.tsx
  15. 210
      src/services/client-query.service.ts
  16. 32
      src/services/client-replaceable-events.service.ts
  17. 561
      src/services/client.service.ts
  18. 8
      src/services/spell.service.ts
  19. 4
      src/types/index.d.ts

3
src/components/Explore/ExploreRelayReviews.tsx

@ -19,7 +19,8 @@ export default function ExploreRelayReviews() { @@ -19,7 +19,8 @@ export default function ExploreRelayReviews() {
getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{ userWriteRelays: relayList?.write ?? [] }
),
[favoriteRelays, blockedRelays, relayList]
)

12
src/constants.ts

@ -14,6 +14,18 @@ export const DEFAULT_FAVORITE_RELAYS = [ @@ -14,6 +14,18 @@ export const DEFAULT_FAVORITE_RELAYS = [
'wss://nostr.land'
]
/**
* Max concurrent relay connection + REQ setups (ensureRelay + subscribe) app-wide.
* Limits parallel WebSocket handshakes when many relays or timeline shards open at once.
*/
export const MAX_CONCURRENT_RELAY_CONNECTIONS = 10
/** Max relays to publish each event to (outboxes first, then targets' inboxes, then extras). */
export const MAX_PUBLISH_RELAYS = MAX_CONCURRENT_RELAY_CONNECTIONS
/** Max merged URLs per REQ / timeline relay list (see `relay-url-priority`). */
export const MAX_REQ_RELAY_URLS = MAX_CONCURRENT_RELAY_CONNECTIONS
/** Multi-relay queries and timeline initial REQ: after the first event, wait this long then close (query) or finalize EOSE (live feed) while keeping the subscription open for new events. */
export const FIRST_RELAY_RESULT_GRACE_MS = 2000

7
src/hooks/useFetchProfile.tsx

@ -175,9 +175,8 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -175,9 +175,8 @@ export function useFetchProfile(id?: string, skipCache = false) {
])
const fetchTime = Date.now() - startTime
// Only log at info level if profile was found or if fetch took a long time
if (profileEvent || fetchTime > 1000) {
logger.info('[useFetchProfile] fetchProfileEvent completed', {
logger.debug('[useFetchProfile] fetchProfileEvent completed', {
pubkey: pubkey.substring(0, 8),
hasEvent: !!profileEvent,
eventId: profileEvent?.id?.substring(0, 8),
@ -441,7 +440,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -441,7 +440,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
}
if (profile && profile.pubkey === extractedPubkey) {
logger.info('[useFetchProfile] Already have profile for this pubkey (safety check)', {
logger.debug('[useFetchProfile] Already have profile for this pubkey (safety check)', {
extractedPubkey
})
setIsFetching(false)
@ -481,7 +480,7 @@ export function useFetchProfile(id?: string, skipCache = false) { @@ -481,7 +480,7 @@ export function useFetchProfile(id?: string, skipCache = false) {
}
if (cancelled.current) {
logger.info('[useFetchProfile] Cancelled after checkProfile, cleaning up')
logger.debug('[useFetchProfile] Cancelled after checkProfile, cleaning up')
setIsFetching(false)
return
}

23
src/hooks/useProfileTimeline.tsx

@ -105,8 +105,21 @@ export function useProfileTimeline({ @@ -105,8 +105,21 @@ export function useProfileTimeline({
const [events, setEvents] = useState<Event[]>(cachedEntry?.events ?? [])
const [isLoading, setIsLoading] = useState(!cachedEntry)
const [refreshToken, setRefreshToken] = useState(0)
const [authorOutboxWrite, setAuthorOutboxWrite] = useState<string[]>([])
const subscriptionRef = useRef<() => void>(() => {})
useEffect(() => {
let cancelled = false
setAuthorOutboxWrite([])
void client.fetchRelayList(pubkey).then((rl) => {
if (cancelled || !rl?.write?.length) return
setAuthorOutboxWrite(rl.write)
})
return () => {
cancelled = true
}
}, [pubkey])
useEffect(() => {
setEvents((prev) => {
const next = prev.filter((e) => !isEventDeletedRef.current(e))
@ -168,7 +181,12 @@ export function useProfileTimeline({ @@ -168,7 +181,12 @@ export function useProfileTimeline({
const feedRelayUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{
userWriteRelays: relayList?.write ?? [],
authorWriteRelays: authorOutboxWrite,
applyKind1BlockedFilter: kinds.includes(1)
}
)
const startWave = async (subRequests: ReturnType<typeof buildSubRequests>) => {
@ -222,7 +240,8 @@ export function useProfileTimeline({ @@ -222,7 +240,8 @@ export function useProfileTimeline({
refreshToken,
favoriteRelays,
blockedRelays,
relayList
relayList,
authorOutboxWrite
])
const refresh = useCallback(() => {

131
src/lib/favorites-feed-relays.ts

@ -1,13 +1,32 @@ @@ -1,13 +1,32 @@
import { DEFAULT_FAVORITE_RELAYS, FAST_READ_RELAY_URLS } from '@/constants'
import { DEFAULT_FAVORITE_RELAYS } from '@/constants'
import type { TFeedSubRequest } from '@/types'
import { normalizeUrl } from '@/lib/url'
import type { Filter } from 'nostr-tools'
import {
buildPrioritizedReadRelayUrls,
buildReadRelayPriorityLayers,
dedupeNormalizeRelayUrlsOrdered,
MAX_REQ_RELAY_URLS,
mergeRelayPriorityLayers,
relayUrlsLocalsFirst
} from '@/lib/relay-url-priority'
/** True when the filter is unrestricted by kind or explicitly includes kind 1 (short notes). */
export function relayFilterLikelyIncludesKind1(filter: Filter): boolean {
const k = filter.kinds
if (k === undefined) return true
const arr = Array.isArray(k) ? k : [k]
return arr.includes(1)
}
const blockedSet = (blockedRelays: string[]) =>
new Set(blockedRelays.map((b) => normalizeUrl(b) || b))
/**
* Relay URLs for the all favorites home feed only (`FeedProvider` `all-favorites` / that `RelaysFeed` mode).
* Non-blocked user favorites, or {@link DEFAULT_FAVORITE_RELAYS} when none remain.
* Logged-in users favorite relays (kind 10012 `relay` tags via {@link useFavoriteRelays}, plus bootstrap defaults
* when the event is missing): drop blocked, dedupe, normalize. If no non-blocked entries remain, use
* {@link DEFAULT_FAVORITE_RELAYS}. Same list drives the favorites tier in REQ/publish prioritization and the
* all-favorites home feed.
*/
export function getFavoritesFeedRelayUrls(
favoriteRelays: string[],
@ -48,34 +67,108 @@ export function mergeRelayUrlLayers(layers: string[][], blockedRelays: string[]) @@ -48,34 +67,108 @@ export function mergeRelayUrlLayers(layers: string[][], blockedRelays: string[])
return out
}
export type ReadRelayPriorityOptions = {
/** User NIP-65 write list — local URLs are promoted with inboxes for REQ. */
userWriteRelays?: string[]
/** Profile/timeline author outboxes (write relays) when known. */
authorWriteRelays?: string[]
maxRelays?: number
/**
* When set, applies to all subrequests. When unset, each subrequest uses {@link relayFilterLikelyIncludesKind1}
* on its filter to decide whether to strip kind-1-blocklisted relays before capping.
*/
applyKind1BlockedFilter?: boolean
/**
* When false, ignore each subrequests `urls` and use only the shared prioritized stack (rare).
* Default true.
*/
mergeSubrequestRelayUrls?: boolean
/**
* When true, fold `r.urls` into the author-outbox tier only (no extra first layer). Use for GIF / explicit spell relays
* that should rank with author outboxes, not ahead of user inboxes. Default false: prepend `r.urls` before user tiers.
*/
mergeSubrequestRelaysIntoAuthorTier?: boolean
}
/**
* Favorites (same set as the favorites feed) plus {@link FAST_READ_RELAY_URLS} and the users NIP-65 **read** / inbox relays.
* Fast-read URLs are merged first so REQ setup hits responsive indexers early (same deduped set).
* REQ order: user inboxes + locals author outboxes favorites {@link FAST_READ_RELAY_URLS}.
*/
export function getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
userInboxReadRelays: string[],
options?: ReadRelayPriorityOptions
): string[] {
const favorites = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
const fast = FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
return mergeRelayUrlLayers([fast, favorites, userInboxReadRelays], blockedRelays)
return buildPrioritizedReadRelayUrls({
userReadRelays: userInboxReadRelays,
userWriteRelays: options?.userWriteRelays ?? [],
authorWriteRelays: options?.authorWriteRelays ?? [],
favoriteRelays: favorites,
blockedRelays,
maxRelays: options?.maxRelays,
applyKind1BlockedFilter: options?.applyKind1BlockedFilter
})
}
/** Prefix each subrequest’s `urls` with the extended read set (favorites + fast read + inboxes). */
/**
* Per subrequest: shared inbox author/favorites fast read stack, normalized, user-blocked and (when applicable)
* kind-1-blocked stripped, deduped, capped. Subrequest `urls` are prepended first by default (following shards);
* set {@link ReadRelayPriorityOptions.mergeSubrequestRelaysIntoAuthorTier} to fold them into the author tier only
* (e.g. curated GIF / spell relay lists).
*/
export function augmentSubRequestsWithFavoritesFastReadAndInbox(
requests: TFeedSubRequest[],
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
userInboxReadRelays: string[],
options?: ReadRelayPriorityOptions
): TFeedSubRequest[] {
const base = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
userInboxReadRelays
)
return requests.map((r) => ({
...r,
urls: mergeRelayUrlLayers([base, r.urls], blockedRelays)
}))
const max = options?.maxRelays ?? MAX_REQ_RELAY_URLS
return requests.map((r) => {
const useSubUrls = options?.mergeSubrequestRelayUrls !== false
const foldIntoAuthor = options?.mergeSubrequestRelaysIntoAuthorTier === true
const applyK1 =
options?.applyKind1BlockedFilter !== undefined
? options.applyKind1BlockedFilter
: relayFilterLikelyIncludesKind1(r.filter)
const favorites = getFavoritesFeedRelayUrls(favoriteRelays, blockedRelays)
if (!useSubUrls) {
return {
...r,
urls: buildPrioritizedReadRelayUrls({
userReadRelays: userInboxReadRelays,
userWriteRelays: options?.userWriteRelays ?? [],
authorWriteRelays: options?.authorWriteRelays ?? [],
favoriteRelays: favorites,
blockedRelays,
maxRelays: max,
applyKind1BlockedFilter: applyK1
})
}
}
const authorOnly = dedupeNormalizeRelayUrlsOrdered(options?.authorWriteRelays ?? [])
const authorTier = foldIntoAuthor
? dedupeNormalizeRelayUrlsOrdered([...authorOnly, ...r.urls])
: authorOnly
const coreLayers = buildReadRelayPriorityLayers({
userReadRelays: userInboxReadRelays,
userWriteRelays: options?.userWriteRelays ?? [],
authorWriteRelays: authorTier,
favoriteRelays: favorites
})
const layers = foldIntoAuthor ? coreLayers : [relayUrlsLocalsFirst(r.urls), ...coreLayers]
return {
...r,
urls: mergeRelayPriorityLayers(layers, blockedRelays, max, {
applyKind1BlockedFilter: applyK1
})
}
})
}

178
src/lib/relay-url-priority.ts

@ -0,0 +1,178 @@ @@ -0,0 +1,178 @@
import {
FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS,
KIND_1_BLOCKED_RELAY_URLS,
MAX_PUBLISH_RELAYS,
MAX_REQ_RELAY_URLS
} from '@/constants'
import { isLocalNetworkUrl, normalizeUrl } from '@/lib/url'
export { MAX_REQ_RELAY_URLS }
export function dedupeNormalizeRelayUrlsOrdered(urls: string[]): string[] {
const seen = new Set<string>()
const out: string[] = []
for (const u of urls) {
const n = normalizeUrl(u) || u
if (!n || seen.has(n)) continue
seen.add(n)
out.push(n)
}
return out
}
/** LAN / local host relays first, then the rest; deduped. */
export function relayUrlsLocalsFirst(urls: string[]): string[] {
const local: string[] = []
const remote: string[] = []
for (const u of urls) {
const n = normalizeUrl(u) || u
if (!n) continue
if (isLocalNetworkUrl(n)) local.push(n)
else remote.push(n)
}
return dedupeNormalizeRelayUrlsOrdered([...local, ...remote])
}
function blockedNormSet(blockedRelays: string[] | undefined): Set<string> {
return new Set((blockedRelays ?? []).map((b) => normalizeUrl(b) || b).filter(Boolean))
}
let kind1BlockedNormCache: Set<string> | undefined
function kind1BlockedNormSet(): Set<string> {
if (!kind1BlockedNormCache) {
kind1BlockedNormCache = new Set(
KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean)
)
}
return kind1BlockedNormCache
}
export type MergeRelayPriorityLayersOptions = {
/** When true, drop {@link KIND_1_BLOCKED_RELAY_URLS} before applying the max cap. */
applyKind1BlockedFilter?: boolean
}
/**
* Merge priority layers in order; first occurrence wins; skip blocked (and optional kind-1 block list); stop at `max`.
*/
export function mergeRelayPriorityLayers(
layers: string[][],
blockedRelays: string[] | undefined,
max: number,
mergeOpts?: MergeRelayPriorityLayersOptions
): string[] {
const blocked = blockedNormSet(blockedRelays)
const k1 = mergeOpts?.applyKind1BlockedFilter ? kind1BlockedNormSet() : new Set<string>()
const seen = new Set<string>()
const out: string[] = []
for (const layer of layers) {
for (const u of layer) {
const n = normalizeUrl(u) || u
if (!n || blocked.has(n) || k1.has(n) || seen.has(n)) continue
seen.add(n)
out.push(n)
if (out.length >= max) return out
}
}
return out
}
const normFastRead = (): string[] =>
dedupeNormalizeRelayUrlsOrdered(
FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
)
const normFastWrite = (): string[] =>
dedupeNormalizeRelayUrlsOrdered(
FAST_WRITE_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean) as string[]
)
/**
* Ordered layers for REQ / read (before merge, dedupe, blocked strip, kind-1 strip, cap).
*/
export function buildReadRelayPriorityLayers(opts: {
userReadRelays: string[]
userWriteRelays?: string[]
authorWriteRelays?: string[]
favoriteRelays: string[]
}): string[][] {
const userWrite = opts.userWriteRelays ?? []
const writeLocals = userWrite.filter((u) => isLocalNetworkUrl(normalizeUrl(u) || u))
const userReadOrdered = relayUrlsLocalsFirst(opts.userReadRelays)
const tier1 = dedupeNormalizeRelayUrlsOrdered([...writeLocals, ...userReadOrdered])
const tier2 = dedupeNormalizeRelayUrlsOrdered(opts.authorWriteRelays ?? [])
const tier3 = dedupeNormalizeRelayUrlsOrdered(opts.favoriteRelays ?? [])
const tier4 = normFastRead()
return [tier1, tier2, tier3, tier4]
}
/**
* REQ / read: user inboxes (locals first) + user local outboxes author outboxes favorites FAST_READ.
* Blocked and (optionally) kind-1-blocked relays are removed before slicing to `maxRelays`.
*/
export function buildPrioritizedReadRelayUrls(opts: {
userReadRelays: string[]
userWriteRelays?: string[]
authorWriteRelays?: string[]
favoriteRelays: string[]
blockedRelays?: string[]
maxRelays?: number
/** Default true: strip {@link KIND_1_BLOCKED_RELAY_URLS} (kind-1-heavy timelines). Set false for non–kind-1 queries. */
applyKind1BlockedFilter?: boolean
}): string[] {
const max = opts.maxRelays ?? MAX_REQ_RELAY_URLS
const applyK1 = opts.applyKind1BlockedFilter !== false
const layers = buildReadRelayPriorityLayers({
userReadRelays: opts.userReadRelays,
userWriteRelays: opts.userWriteRelays,
authorWriteRelays: opts.authorWriteRelays,
favoriteRelays: opts.favoriteRelays
})
return mergeRelayPriorityLayers(layers, opts.blockedRelays, max, {
applyKind1BlockedFilter: applyK1
})
}
/**
* Ordered layers for publish / write (before merge, blocked strip, kind-1 strip, cap).
*/
export function buildWriteRelayPriorityLayers(opts: {
userWriteRelays: string[]
authorReadRelays?: string[]
favoriteRelays?: string[]
extraRelays?: string[]
}): string[][] {
const tier1 = relayUrlsLocalsFirst(opts.userWriteRelays)
const tier2 = dedupeNormalizeRelayUrlsOrdered(opts.authorReadRelays ?? [])
const tier3 = dedupeNormalizeRelayUrlsOrdered(opts.favoriteRelays ?? [])
const tier4 = dedupeNormalizeRelayUrlsOrdered(opts.extraRelays ?? [])
const tier5 = normFastWrite()
const tier6 = normFastRead()
return [tier1, tier2, tier3, tier4, tier5, tier6]
}
/**
* Publish / write: user outboxes (locals first) target author inboxes favorites extras FAST_WRITE FAST_READ.
*/
export function buildPrioritizedWriteRelayUrls(opts: {
userWriteRelays: string[]
authorReadRelays?: string[]
favoriteRelays?: string[]
extraRelays?: string[]
blockedRelays?: string[]
maxRelays?: number
/** When true, strip {@link KIND_1_BLOCKED_RELAY_URLS} before capping (kind 1 notes). */
applyKind1BlockedFilter?: boolean
}): string[] {
const max = opts.maxRelays ?? MAX_PUBLISH_RELAYS
const layers = buildWriteRelayPriorityLayers({
userWriteRelays: opts.userWriteRelays,
authorReadRelays: opts.authorReadRelays,
favoriteRelays: opts.favoriteRelays,
extraRelays: opts.extraRelays
})
return mergeRelayPriorityLayers(layers, opts.blockedRelays, max, {
applyKind1BlockedFilter: opts.applyKind1BlockedFilter === true
})
}

3
src/pages/primary/NoteListPage/FollowingFeed.tsx

@ -34,7 +34,8 @@ const FollowingFeed = forwardRef< @@ -34,7 +34,8 @@ const FollowingFeed = forwardRef<
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{ userWriteRelays: relayList?.write ?? [] }
)
)
}

8
src/pages/primary/SpellsPage/CreateSpellDialog.tsx

@ -321,11 +321,9 @@ export default function CreateSpellDialog({ @@ -321,11 +321,9 @@ export default function CreateSpellDialog({
const { draft, notices, pendingATags } = applyListEventToSpellDraft(base, ev)
setForm(draft)
setListImportNotices(notices)
const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const urls = getRelaysForSpellCatalogSync(favoriteRelays, blockedRelays, relayList?.read ?? [], {
userWriteRelays: relayList?.write ?? []
})
if (pendingATags.length === 0) return
void resolveSpellListATags(pendingATags, urls).then(({ ids, notices: extra }) => {
if (ids.length) {

21
src/pages/primary/SpellsPage/index.tsx

@ -411,11 +411,9 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -411,11 +411,9 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
if (!cancelled) void loadSpells()
}, 120)
}
const urls = getRelaysForSpellCatalogSync(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
)
const urls = getRelaysForSpellCatalogSync(favoriteRelays, blockedRelays, relayList?.read ?? [], {
userWriteRelays: relayList?.write ?? []
})
const catalogAuthors = buildSpellCatalogAuthors(pubkey, contacts)
const authorAllowlist = new Set(catalogAuthors)
const filter = {
@ -560,7 +558,8 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -560,7 +558,8 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
req,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{ userWriteRelays: relayList?.write ?? [] }
)
const withReadOnly = merged.map((r) => ({
...r,
@ -609,10 +608,18 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage( @@ -609,10 +608,18 @@ const SpellsPage = forwardRef<TPageRef>(function SpellsPage(
const syncFauxSubRequests = useMemo<TFeedSubRequest[]>(() => {
if (!selectedFauxSpell || selectedFauxSpell === 'following') return []
const fauxSpellSkipKind1Blocked =
selectedFauxSpell === 'calendar' ||
selectedFauxSpell === 'discussions' ||
selectedFauxSpell === 'followPacks'
const feedUrls = getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{
userWriteRelays: relayList?.write ?? [],
applyKind1BlockedFilter: fauxSpellSkipKind1Blocked ? false : undefined
}
)
if (selectedFauxSpell === 'notifications') {

32
src/pages/secondary/NoteListPage/index.tsx

@ -6,10 +6,8 @@ import { Button } from '@/components/ui/button' @@ -6,10 +6,8 @@ import { Button } from '@/components/ui/button'
import { SEARCHABLE_RELAY_URLS } from '@/constants'
import {
augmentSubRequestsWithFavoritesFastReadAndInbox,
getRelayUrlsWithFavoritesFastReadAndInbox,
mergeRelayUrlLayers
getRelayUrlsWithFavoritesFastReadAndInbox
} from '@/lib/favorites-feed-relays'
import { normalizeUrl } from '@/lib/url'
import SecondaryPageLayout from '@/layouts/SecondaryPageLayout'
import { toProfileList } from '@/lib/link'
import { fetchPubkeysFromDomain, getWellKnownNip05Url } from '@/lib/nip05'
@ -84,6 +82,10 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -84,6 +82,10 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
.getAll('k')
.map((k) => parseInt(k))
.filter((k) => !isNaN(k))
const readUrlOpts = {
userWriteRelays: relayList?.write ?? [],
applyKind1BlockedFilter: kinds.length === 0 || kinds.includes(1)
}
const hashtag = searchParams.get('t')
if (hashtag) {
setData({ type: 'hashtag' })
@ -94,7 +96,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -94,7 +96,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
readUrlOpts
)
}
])
@ -133,16 +136,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -133,16 +136,11 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
setSubRequests([
{
filter: { '#I': [externalContentId], ...(kinds.length > 0 ? { kinds } : {}) },
urls: mergeRelayUrlLayers(
[
getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
),
(relayList?.write || []).map((url) => normalizeUrl(url) || url).filter(Boolean) as string[]
],
blockedRelays
urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? [],
{ userWriteRelays: relayList?.write ?? [] }
)
}
])
@ -173,7 +171,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -173,7 +171,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
raw,
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
{ userWriteRelays: relayList?.write ?? [] }
)
)
setControls(
@ -210,7 +209,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid @@ -210,7 +209,8 @@ const NoteListPage = forwardRef<HTMLDivElement, NoteListPageProps>(({ index, hid
urls: getRelayUrlsWithFavoritesFastReadAndInbox(
favoriteRelays,
blockedRelays,
relayList?.read ?? []
relayList?.read ?? [],
readUrlOpts
)
}
])

38
src/providers/BookmarksProvider.tsx

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
import { buildATag, buildETag, createBookmarkDraftEvent } from '@/lib/draft-event'
import { getReplaceableCoordinateFromEvent, isReplaceableEvent } from '@/lib/event'
import { normalizeUrl } from '@/lib/url'
import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { buildPrioritizedReadRelayUrls, buildPrioritizedWriteRelayUrls } from '@/lib/relay-url-priority'
import logger from '@/lib/logger'
import client from '@/services/client.service'
import { replaceableEventService } from '@/services/client.service'
@ -28,25 +28,29 @@ export const useBookmarks = () => { @@ -28,25 +28,29 @@ export const useBookmarks = () => {
export function BookmarksProvider({ children }: { children: React.ReactNode }) {
const { pubkey: accountPubkey, publish, updateBookmarkListEvent } = useNostr()
const { favoriteRelays } = useFavoriteRelays()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
// Build comprehensive relay list for publishing (same as ProfileFeed)
const buildComprehensiveRelayList = useCallback(async () => {
const myRelayList = accountPubkey ? await client.fetchRelayList(accountPubkey) : { write: [], read: [] }
const allRelays = [
...(myRelayList.read || []), // User's inboxes (kind 10002)
...(myRelayList.write || []), // User's outboxes (kind 10002)
...(favoriteRelays || []), // User's favorite relays (kind 10012)
...FAST_READ_RELAY_URLS, // Fast read relays
...FAST_WRITE_RELAY_URLS // Fast write relays
]
const normalizedRelays = allRelays
.map(url => normalizeUrl(url))
.filter((url): url is string => !!url)
return Array.from(new Set(normalizedRelays))
}, [accountPubkey, favoriteRelays])
const favoritesTier = getFavoritesFeedRelayUrls(favoriteRelays ?? [], blockedRelays)
const read = buildPrioritizedReadRelayUrls({
userReadRelays: myRelayList.read ?? [],
userWriteRelays: myRelayList.write ?? [],
favoriteRelays: favoritesTier,
blockedRelays,
maxRelays: 100,
applyKind1BlockedFilter: false
})
const write = buildPrioritizedWriteRelayUrls({
userWriteRelays: myRelayList.write ?? [],
favoriteRelays: favoritesTier,
blockedRelays,
maxRelays: 100,
applyKind1BlockedFilter: false
})
return [...new Set([...read, ...write])]
}, [accountPubkey, favoriteRelays, blockedRelays])
const addBookmark = async (event: Event) => {
if (!accountPubkey) return

28
src/providers/GroupListProvider.tsx

@ -2,8 +2,8 @@ import { createContext, useContext, useEffect, useState, useCallback, useMemo } @@ -2,8 +2,8 @@ import { createContext, useContext, useEffect, useState, useCallback, useMemo }
import { useNostr } from '@/providers/NostrProvider'
import { useFavoriteRelays } from '@/providers/FavoriteRelaysProvider'
import { ExtendedKind } from '@/constants'
import { normalizeUrl } from '@/lib/url'
import { FAST_READ_RELAY_URLS } from '@/constants'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { buildPrioritizedReadRelayUrls } from '@/lib/relay-url-priority'
import client from '@/services/client.service'
import { queryService } from '@/services/client.service'
import logger from '@/lib/logger'
@ -27,26 +27,22 @@ export const useGroupList = () => { @@ -27,26 +27,22 @@ export const useGroupList = () => {
export function GroupListProvider({ children }: { children: React.ReactNode }) {
const { pubkey: accountPubkey } = useNostr()
const { favoriteRelays } = useFavoriteRelays()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const [userGroups, setUserGroups] = useState<string[]>([])
const [isLoading, setIsLoading] = useState(false)
// Build comprehensive relay list for fetching group list
const buildComprehensiveRelayList = useCallback(async () => {
const myRelayList = accountPubkey ? await client.fetchRelayList(accountPubkey) : { write: [], read: [] }
const allRelays = [
...(myRelayList.read || []), // User's inboxes (kind 10002)
...(myRelayList.write || []), // User's outboxes (kind 10002)
...(favoriteRelays || []), // User's favorite relays (kind 10012)
...FAST_READ_RELAY_URLS // Fast read relays
]
const normalizedRelays = allRelays
.map(url => normalizeUrl(url))
.filter((url): url is string => !!url)
return Array.from(new Set(normalizedRelays))
}, [accountPubkey, favoriteRelays])
const favoritesTier = getFavoritesFeedRelayUrls(favoriteRelays ?? [], blockedRelays)
return buildPrioritizedReadRelayUrls({
userReadRelays: myRelayList.read ?? [],
userWriteRelays: myRelayList.write ?? [],
favoriteRelays: favoritesTier,
blockedRelays,
applyKind1BlockedFilter: false
})
}, [accountPubkey, favoriteRelays, blockedRelays])
// Fetch user's group list (kind 10009)
const fetchGroupList = useCallback(async () => {

38
src/providers/InterestListProvider.tsx

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
import { createInterestListDraftEvent } from '@/lib/draft-event'
import { normalizeTopic } from '@/lib/discussion-topics'
import { normalizeUrl } from '@/lib/url'
import { FAST_READ_RELAY_URLS, FAST_WRITE_RELAY_URLS } from '@/constants'
import { getFavoritesFeedRelayUrls } from '@/lib/favorites-feed-relays'
import { buildPrioritizedReadRelayUrls, buildPrioritizedWriteRelayUrls } from '@/lib/relay-url-priority'
import logger from '@/lib/logger'
import client from '@/services/client.service'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
@ -32,7 +32,7 @@ export const useInterestList = () => { @@ -32,7 +32,7 @@ export const useInterestList = () => {
export function InterestListProvider({ children }: { children: React.ReactNode }) {
const { t } = useTranslation()
const { pubkey: accountPubkey, interestListEvent, publish, updateInterestListEvent } = useNostr()
const { favoriteRelays } = useFavoriteRelays()
const { favoriteRelays, blockedRelays } = useFavoriteRelays()
const [topics, setTopics] = useState<string[]>([])
const subscribedTopics = useMemo(() => new Set(topics), [topics])
const [changing, setChanging] = useState(false)
@ -40,20 +40,24 @@ export function InterestListProvider({ children }: { children: React.ReactNode } @@ -40,20 +40,24 @@ export function InterestListProvider({ children }: { children: React.ReactNode }
// Build comprehensive relay list for publishing (same as ProfileFeed)
const buildComprehensiveRelayList = useCallback(async () => {
const myRelayList = accountPubkey ? await client.fetchRelayList(accountPubkey) : { write: [], read: [] }
const allRelays = [
...(myRelayList.read || []), // User's inboxes (kind 10002)
...(myRelayList.write || []), // User's outboxes (kind 10002)
...(favoriteRelays || []), // User's favorite relays (kind 10012)
...FAST_READ_RELAY_URLS, // Fast read relays
...FAST_WRITE_RELAY_URLS // Fast write relays
]
const normalizedRelays = allRelays
.map(url => normalizeUrl(url))
.filter((url): url is string => !!url)
return Array.from(new Set(normalizedRelays))
}, [accountPubkey, favoriteRelays])
const favoritesTier = getFavoritesFeedRelayUrls(favoriteRelays ?? [], blockedRelays)
const read = buildPrioritizedReadRelayUrls({
userReadRelays: myRelayList.read ?? [],
userWriteRelays: myRelayList.write ?? [],
favoriteRelays: favoritesTier,
blockedRelays,
maxRelays: 100,
applyKind1BlockedFilter: false
})
const write = buildPrioritizedWriteRelayUrls({
userWriteRelays: myRelayList.write ?? [],
favoriteRelays: favoritesTier,
blockedRelays,
maxRelays: 100,
applyKind1BlockedFilter: false
})
return [...new Set([...read, ...write])]
}, [accountPubkey, favoriteRelays, blockedRelays])
useEffect(() => {
const updateTopics = () => {

53
src/providers/NostrProvider/index.tsx

@ -1,5 +1,12 @@ @@ -1,5 +1,12 @@
import LoginDialog from '@/components/LoginDialog'
import { FAST_READ_RELAY_URLS, ExtendedKind, FAST_WRITE_RELAY_URLS, PROFILE_FETCH_RELAY_URLS, PROFILE_RELAY_URLS } from '@/constants'
import {
DEFAULT_FAVORITE_RELAYS,
FAST_READ_RELAY_URLS,
ExtendedKind,
FAST_WRITE_RELAY_URLS,
PROFILE_FETCH_RELAY_URLS,
PROFILE_RELAY_URLS
} from '@/constants'
import {
buildAltTag,
buildClientTag,
@ -48,6 +55,33 @@ import { NsecSigner } from './nsec.signer' @@ -48,6 +55,33 @@ import { NsecSigner } from './nsec.signer'
export { useNostr } from '@/providers/nostr-context'
export type { TNostrContext } from '@/providers/nostr-context'
/** Kind 10012 `relay` tags for publish / target-relay prioritization. */
function favoriteRelayUrlsForPublish(favoriteRelaysEvent: Event | null, pubkey: string | null): string[] {
if (!favoriteRelaysEvent) {
return pubkey ? [...DEFAULT_FAVORITE_RELAYS] : []
}
const urls: string[] = []
favoriteRelaysEvent.tags.forEach(([name, v]) => {
if (name === 'relay' && v) {
const n = normalizeUrl(v) || v
if (n && !urls.includes(n)) urls.push(n)
}
})
return urls.length > 0 ? urls : pubkey ? [...DEFAULT_FAVORITE_RELAYS] : []
}
function blockedRelayUrlsFromEvent(blockedRelaysEvent: Event | null): string[] {
const out: string[] = []
if (!blockedRelaysEvent) return out
blockedRelaysEvent.tags.forEach(([tagName, tagValue]) => {
if (tagName === 'relay' && tagValue) {
const n = normalizeUrl(tagValue)
if (n && !out.includes(n)) out.push(n)
}
})
return out
}
export function NostrProvider({ children }: { children: React.ReactNode }) {
const { t } = useTranslation()
const [accounts, setAccounts] = useState<TAccountPointer[]>(
@ -829,12 +863,17 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -829,12 +863,17 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
}
logger.debug('[Publish] Determining target relays...', { kind: event.kind, pubkey: event.pubkey?.substring(0, 8) })
const relays = await client.determineTargetRelays(event, options)
const favoriteRelayUrls = favoriteRelayUrlsForPublish(favoriteRelaysEvent, account.pubkey)
const relays = await client.determineTargetRelays(event, {
...options,
favoriteRelayUrls,
blockedRelayUrls: options.blockedRelayUrls ?? blockedRelayUrlsFromEvent(blockedRelaysEvent)
})
logger.debug('[Publish] Target relays determined', { relayCount: relays.length, relays: relays.slice(0, 5) })
try {
logger.debug('[Publish] Calling client.publishEvent()...', { relayCount: relays.length, eventId: event.id?.substring(0, 8) })
const publishResult = await client.publishEvent(relays, event)
const publishResult = await client.publishEvent(relays, event, { favoriteRelayUrls })
logger.debug('[Publish] publishEvent completed', {
success: publishResult.success,
successCount: publishResult.successCount,
@ -922,9 +961,13 @@ export function NostrProvider({ children }: { children: React.ReactNode }) { @@ -922,9 +961,13 @@ export function NostrProvider({ children }: { children: React.ReactNode }) {
const deletionRequest = await signEvent(createDeletionRequestDraftEvent(targetEvent))
// Privacy: Only use user's own relays, never connect to "seen on" relays
const relays = await client.determineTargetRelays(targetEvent)
const favUrls = favoriteRelayUrlsForPublish(favoriteRelaysEvent, account?.pubkey ?? null)
const relays = await client.determineTargetRelays(targetEvent, {
favoriteRelayUrls: favUrls,
blockedRelayUrls: blockedRelayUrlsFromEvent(blockedRelaysEvent)
})
const result = await client.publishEvent(relays, deletionRequest)
const result = await client.publishEvent(relays, deletionRequest, { favoriteRelayUrls: favUrls })
await client.applyDeletionRequestToLocalCache(deletionRequest)

210
src/services/client-query.service.ts

@ -2,6 +2,7 @@ import { @@ -2,6 +2,7 @@ import {
FEED_FIRST_RELAY_RESULT_GRACE_MIN_LIMIT,
FIRST_RELAY_RESULT_GRACE_MS,
KIND_1_BLOCKED_RELAY_URLS,
MAX_CONCURRENT_RELAY_CONNECTIONS,
SEARCHABLE_RELAY_URLS
} from '@/constants'
import logger from '@/lib/logger'
@ -50,12 +51,35 @@ export class QueryService { @@ -50,12 +51,35 @@ export class QueryService {
private signer?: ISigner
private signerType?: TSignerType
/** Max concurrent REQ subscriptions per relay */
private static readonly MAX_CONCURRENT_SUBS_PER_RELAY = 8
/** Max concurrent REQ subscriptions per relay URL */
private static readonly MAX_CONCURRENT_SUBS_PER_RELAY = MAX_CONCURRENT_RELAY_CONNECTIONS
private activeSubCountByRelay = new Map<string, number>()
private subSlotWaitQueueByRelay = new Map<string, Array<() => void>>()
private eventSeenOnRelays = new Map<string, Set<string>>()
/** App-wide cap on parallel ensureRelay + initial subscribe setup (any relay). */
private globalRelayConnectionSlotsInUse = 0
private globalRelayConnectionWaitQueue: Array<() => void> = []
async acquireGlobalRelayConnectionSlot(): Promise<void> {
if (this.globalRelayConnectionSlotsInUse < MAX_CONCURRENT_RELAY_CONNECTIONS) {
this.globalRelayConnectionSlotsInUse++
return
}
await new Promise<void>((resolve) => {
this.globalRelayConnectionWaitQueue.push(() => {
this.globalRelayConnectionSlotsInUse++
resolve()
})
})
}
releaseGlobalRelayConnectionSlot(): void {
this.globalRelayConnectionSlotsInUse = Math.max(0, this.globalRelayConnectionSlotsInUse - 1)
const next = this.globalRelayConnectionWaitQueue.shift()
if (next) next()
}
constructor(pool: SimplePool) {
this.pool = pool
}
@ -372,99 +396,109 @@ export class QueryService { @@ -372,99 +396,109 @@ export class QueryService {
const subs: { relayKey: string; close: () => void }[] = []
const allOpened = Promise.all(
groupedRequests.map(async ({ url, filters: relayFilters }, i) => {
const relayKey = normalizeUrl(url) || url
await this.acquireSubSlot(relayKey)
let relay: AbstractRelay
await this.acquireGlobalRelayConnectionSlot()
try {
relay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased = false
const releaseOnce = () => {
if (!slotReleased) {
slotReleased = true
const relayKey = normalizeUrl(url) || url
await this.acquireSubSlot(relayKey)
let relay: AbstractRelay
try {
relay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
}
const sub = relay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => this.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => callbacks.onevent?.(evt),
oneose: () => handleEose(i),
onclose: (reason: string) => {
releaseOnce()
if (reason.startsWith('auth-required: ') && this.canSignerAuthenticateRelay()) {
relay
.auth(async (authEvt: EventTemplate) => {
const evt = await this.signer!.signEvent(authEvt)
if (!evt) throw new Error('sign event failed')
return evt as VerifiedEvent
})
.then(async () => {
await this.acquireSubSlot(relayKey)
let liveRelay: AbstractRelay
try {
liveRelay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased2 = false
const releaseSlot2 = () => {
if (!slotReleased2) {
slotReleased2 = true
this.releaseSubSlot(relayKey)
}
}
try {
const sub2 = liveRelay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => this.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => callbacks.onevent?.(evt),
oneose: () => handleEose(i),
onclose: (reason2: string) => {
releaseSlot2()
handleClose(i, reason2)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
})
subs.push({
relayKey,
close: () => {
let slotReleased = false
const releaseOnce = () => {
if (!slotReleased) {
slotReleased = true
this.releaseSubSlot(relayKey)
}
}
const sub = relay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => this.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => callbacks.onevent?.(evt),
oneose: () => handleEose(i),
onclose: (reason: string) => {
releaseOnce()
if (reason.startsWith('auth-required: ') && this.canSignerAuthenticateRelay()) {
relay
.auth(async (authEvt: EventTemplate) => {
const evt = await this.signer!.signEvent(authEvt)
if (!evt) throw new Error('sign event failed')
return evt as VerifiedEvent
})
.then(async () => {
await this.acquireGlobalRelayConnectionSlot()
try {
await this.acquireSubSlot(relayKey)
let liveRelay: AbstractRelay
try {
liveRelay = await this.pool.ensureRelay(url, { connectionTimeout: 5000 })
} catch (err) {
this.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased2 = false
const releaseSlot2 = () => {
if (!slotReleased2) {
slotReleased2 = true
this.releaseSubSlot(relayKey)
}
}
try {
const sub2 = liveRelay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => this.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => callbacks.onevent?.(evt),
oneose: () => handleEose(i),
onclose: (reason2: string) => {
releaseSlot2()
handleClose(i, reason2)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
})
subs.push({
relayKey,
close: () => {
releaseSlot2()
sub2.close()
}
})
} catch (err) {
releaseSlot2()
sub2.close()
handleClose(i, (err as Error)?.message ?? String(err))
}
})
} catch (err) {
releaseSlot2()
handleClose(i, (err as Error)?.message ?? String(err))
}
})
.catch((err) => {
handleClose(i, `auth failed: ${(err as Error)?.message ?? err}`)
})
return
}
if (reason.startsWith('auth-required: ')) {
callbacks.startLogin?.()
} finally {
this.releaseGlobalRelayConnectionSlot()
}
})
.catch((err) => {
handleClose(i, `auth failed: ${(err as Error)?.message ?? err}`)
})
return
}
if (reason.startsWith('auth-required: ')) {
callbacks.startLogin?.()
}
handleClose(i, reason)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
})
subs.push({
relayKey,
close: () => {
releaseOnce()
sub.close()
}
handleClose(i, reason)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: 10_000
})
subs.push({
relayKey,
close: () => {
releaseOnce()
sub.close()
}
})
})
} finally {
this.releaseGlobalRelayConnectionSlot()
}
})
)

32
src/services/client-replaceable-events.service.ts

@ -1,6 +1,7 @@ @@ -1,6 +1,7 @@
import {
ExtendedKind,
FAST_READ_RELAY_URLS,
MAX_CONCURRENT_RELAY_CONNECTIONS,
METADATA_BATCH_QUERY_EOSE_TIMEOUT_MS,
METADATA_BATCH_QUERY_GLOBAL_TIMEOUT_MS,
PROFILE_FETCH_RELAY_URLS
@ -21,6 +22,32 @@ import client from './client.service' @@ -21,6 +22,32 @@ import client from './client.service'
import { buildComprehensiveRelayList, buildExploreProfileAndUserRelayList } from '@/lib/relay-list-builder'
export class ReplaceableEventService {
/** Limits parallel Step 2/3 profile network work (relay list + wide metadata REQ). */
private static profileFallbackSlotsInUse = 0
private static profileFallbackWaitQueue: Array<() => void> = []
private static async acquireProfileFallbackNetworkSlot(): Promise<void> {
if (ReplaceableEventService.profileFallbackSlotsInUse < MAX_CONCURRENT_RELAY_CONNECTIONS) {
ReplaceableEventService.profileFallbackSlotsInUse++
return
}
await new Promise<void>((resolve) => {
ReplaceableEventService.profileFallbackWaitQueue.push(() => {
ReplaceableEventService.profileFallbackSlotsInUse++
resolve()
})
})
}
private static releaseProfileFallbackNetworkSlot(): void {
ReplaceableEventService.profileFallbackSlotsInUse = Math.max(
0,
ReplaceableEventService.profileFallbackSlotsInUse - 1
)
const next = ReplaceableEventService.profileFallbackWaitQueue.shift()
if (next) next()
}
private queryService: QueryService
private onProfileIndexed?: (profileEvent: NEvent) => void | Promise<void>
private followingFavoriteRelaysCache = new LRUCache<string, Promise<[string, string[]][]>>({
@ -745,6 +772,8 @@ export class ReplaceableEventService { @@ -745,6 +772,8 @@ export class ReplaceableEventService {
return profileEvent
}
await ReplaceableEventService.acquireProfileFallbackNetworkSlot()
try {
// Step 2: Only after cache + default relays miss — NIP-65 relay list (timeout-capped), then hints + outbox/inbox + defaults.
logger.debug('[ReplaceableEventService] Step 2: Fetching author relay list as fallback', {
pubkey,
@ -863,6 +892,9 @@ export class ReplaceableEventService { @@ -863,6 +892,9 @@ export class ReplaceableEventService {
error: error instanceof Error ? error.message : String(error)
})
}
} finally {
ReplaceableEventService.releaseProfileFallbackNetworkSlot()
}
logger.warn('[ReplaceableEventService] Profile not found after cache, relay-list fallback, and comprehensive search', {
pubkey,

561
src/services/client.service.ts

@ -4,6 +4,7 @@ import { @@ -4,6 +4,7 @@ import {
FAST_WRITE_RELAY_URLS,
FIRST_RELAY_RESULT_GRACE_MS,
KIND_1_BLOCKED_RELAY_URLS,
MAX_PUBLISH_RELAYS,
NIP66_DISCOVERY_RELAY_URLS,
PROFILE_FETCH_RELAY_URLS,
READ_ONLY_RELAY_URLS,
@ -21,6 +22,12 @@ import logger from '@/lib/logger' @@ -21,6 +22,12 @@ import logger from '@/lib/logger'
import { dispatchTombstonesUpdated } from '@/lib/tombstone-events'
import { isValidPubkey, pubkeyToNpub } from '@/lib/pubkey'
import { getPubkeysFromPTags, tagNameEquals } from '@/lib/tag'
import {
buildPrioritizedWriteRelayUrls,
dedupeNormalizeRelayUrlsOrdered,
mergeRelayPriorityLayers,
relayUrlsLocalsFirst
} from '@/lib/relay-url-priority'
import { isLocalNetworkUrl, normalizeUrl, simplifyUrl } from '@/lib/url'
import { isSafari } from '@/lib/utils'
import {
@ -159,16 +166,17 @@ class ClientService extends EventTarget { @@ -159,16 +166,17 @@ class ClientService extends EventTarget {
const discoveryRelays = Array.from(new Set([...FAST_READ_RELAY_URLS, ...NIP66_DISCOVERY_RELAY_URLS]))
const events = await this.queryService.query(
discoveryRelays,
{ kinds: [ExtendedKind.RELAY_DISCOVERY] },
{ kinds: [ExtendedKind.RELAY_DISCOVERY], limit: 2000 },
undefined,
{ eoseTimeout: 4000, globalTimeout: 8000 }
)
if (events.length > 0) {
nip66Service.loadFromEvents(events)
logger.info('NIP-66: loaded relay discovery events', { count: events.length })
const capped = events.length > 2000 ? events.slice(0, 2000) : events
nip66Service.loadFromEvents(capped)
logger.debug('NIP-66: loaded relay discovery events', { count: capped.length })
}
} catch (err) {
logger.info('NIP-66: failed to fetch relay discovery', { err })
logger.debug('NIP-66: failed to fetch relay discovery', { err })
}
}
@ -203,6 +211,121 @@ class ClientService extends EventTarget { @@ -203,6 +211,121 @@ class ClientService extends EventTarget {
return true
}
/**
* Pubkeys to pull **read** (inbox) relays for: `p`/`P` mentions and `e` tag relay pubkey hint (NIP-10).
* Excludes the event author.
*/
private collectReplyAndMentionPubkeys(event: NEvent): string[] {
const out: string[] = []
const seen = new Set<string>()
const add = (pk: string | undefined) => {
if (!pk || !isValidPubkey(pk) || pk === event.pubkey || seen.has(pk)) return
seen.add(pk)
out.push(pk)
}
for (const t of event.tags) {
const name = t[0]
const v = t[1]
const hint = t[3]
if ((name === 'p' || name === 'P') && v) add(v)
if (name === 'e' && hint) add(hint)
}
return out
}
/**
* Write publish order: user outboxes author inboxes favorites FAST_WRITE FAST_READ other.
* Normalize, dedupe, then cap at {@link MAX_PUBLISH_RELAYS}.
*/
private filterPublishingRelays(relays: string[], event: NEvent): string[] {
const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u))
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
return dedupeNormalizeRelayUrlsOrdered(
relays.filter((url) => {
const n = normalizeUrl(url) || url
if (readOnlySet.has(n)) return false
if (event.kind === kinds.ShortTextNote && kind1BlockedSet.has(n)) return false
return true
})
)
}
private async prioritizePublishUrlList(
relayUrls: string[],
event: NEvent,
favoriteRelayUrls: string[] = []
): Promise<string[]> {
let userWriteSet = new Set<string>()
try {
const rl = await this.fetchRelayList(event.pubkey)
userWriteSet = new Set(
(rl?.write ?? [])
.map((u) => normalizeUrl(u) || u)
.filter((u): u is string => !!u)
)
} catch {
// ignore
}
const ctx = this.collectReplyAndMentionPubkeys(event)
let authorReadSet = new Set<string>()
if (ctx.length > 0) {
const lists = await this.fetchRelayLists(ctx)
for (const list of lists) {
for (const u of list?.read ?? []) {
const n = normalizeUrl(u) || u
if (n) authorReadSet.add(n)
}
}
}
const favSet = new Set(
favoriteRelayUrls.map((f) => normalizeUrl(f) || f).filter((u): u is string => !!u)
)
const fastWSet = new Set(
FAST_WRITE_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter((u): u is string => !!u)
)
const fastRSet = new Set(
FAST_READ_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter((u): u is string => !!u)
)
const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u))
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
const t0: string[] = []
const t1: string[] = []
const t2: string[] = []
const t3: string[] = []
const t4: string[] = []
const t5: string[] = []
for (const u of relayUrls) {
const n = normalizeUrl(u) || u
if (!n) continue
if (userWriteSet.has(n)) t0.push(n)
else if (authorReadSet.has(n)) t1.push(n)
else if (favSet.has(n)) t2.push(n)
else if (fastWSet.has(n)) t3.push(n)
else if (fastRSet.has(n)) t4.push(n)
else t5.push(n)
}
return dedupeNormalizeRelayUrlsOrdered([...t0, ...t1, ...t2, ...t3, ...t4, ...t5])
.filter((url) => {
const n = normalizeUrl(url) || url
if (readOnlySet.has(n)) return false
if (event.kind === kinds.ShortTextNote && kind1BlockedSet.has(n)) return false
return true
})
.slice(0, MAX_PUBLISH_RELAYS)
}
private async capPublishRelayUrlsForPublish(
relayUrls: string[],
event: NEvent,
favoriteRelayUrls: string[] = []
): Promise<string[]> {
return this.prioritizePublishUrlList(relayUrls, event, favoriteRelayUrls)
}
/**
* Determine which relays to publish an event to.
* Fallbacks (used when user relay list is empty or fetch fails):
@ -213,8 +336,12 @@ class ClientService extends EventTarget { @@ -213,8 +336,12 @@ class ClientService extends EventTarget {
*/
async determineTargetRelays(
event: NEvent,
{ specifiedRelayUrls, additionalRelayUrls }: TPublishOptions = {}
{ specifiedRelayUrls, additionalRelayUrls, favoriteRelayUrls, blockedRelayUrls }: TPublishOptions = {}
) {
const writeRelayPubOpts = {
blockedRelays: blockedRelayUrls,
applyKind1BlockedFilter: event.kind === kinds.ShortTextNote
}
if (event.kind === kinds.RelayList) {
logger.info('[DetermineTargetRelays] Determining target relays for relay list event', {
pubkey: event.pubkey,
@ -228,7 +355,9 @@ class ClientService extends EventTarget { @@ -228,7 +355,9 @@ class ClientService extends EventTarget {
if (event.kind === kinds.Report) {
// Start with user's write relays (outboxes) - these are the primary targets for reports
const relayList = await this.fetchRelayList(event.pubkey)
const userWriteRelays = relayList?.write.slice(0, 10) ?? []
const userWriteRelays = dedupeNormalizeRelayUrlsOrdered(
(relayList?.write ?? []).map((url) => normalizeUrl(url) || url).filter((u): u is string => !!u)
)
// Get seen relays where the reported event was found
const targetEventId = event.tags.find(tagNameEquals('e'))?.[1]
@ -245,21 +374,32 @@ class ClientService extends EventTarget { @@ -245,21 +374,32 @@ class ClientService extends EventTarget {
}))
}
// Combine: user's write relays first (primary), then seen write relays (additional context)
const reportRelays = Array.from(new Set([
...userWriteRelays,
...seenRelays
]))
// If we still don't have any relays, fall back to fast write relays
if (reportRelays.length === 0) {
reportRelays.push(...FAST_WRITE_RELAY_URLS)
if (userWriteRelays.length === 0 && seenRelays.length === 0) {
return this.filterPublishingRelays(
buildPrioritizedWriteRelayUrls({
userWriteRelays: [...FAST_WRITE_RELAY_URLS],
favoriteRelays: favoriteRelayUrls ?? [],
maxRelays: MAX_PUBLISH_RELAYS,
...writeRelayPubOpts
}),
event
)
}
return reportRelays
return this.filterPublishingRelays(
buildPrioritizedWriteRelayUrls({
userWriteRelays: userWriteRelays,
authorReadRelays: [],
favoriteRelays: favoriteRelayUrls ?? [],
extraRelays: seenRelays,
maxRelays: MAX_PUBLISH_RELAYS,
...writeRelayPubOpts
}),
event
)
}
// Public messages (kind 24) and calendar RSVPs (kind 31925): only author's outboxes + each recipient's inboxes
// Public messages (kind 24) and calendar RSVPs (kind 31925): only author's outboxes + each recipient's
// inboxes — no user favorites, FAST_WRITE, or FAST_READ padding (see relay-selection getPublicMessageRelays).
if (
event.kind === ExtendedKind.PUBLIC_MESSAGE ||
event.kind === ExtendedKind.CALENDAR_EVENT_RSVP
@ -282,14 +422,29 @@ class ClientService extends EventTarget { @@ -282,14 +422,29 @@ class ClientService extends EventTarget {
.map((url) => normalizeUrl(url))
.filter((url): url is string => !!url && !isLocalNetworkUrl(url))
}
const relays = Array.from(new Set([...authorWrite, ...recipientRead]))
let pubRelays = mergeRelayPriorityLayers(
[relayUrlsLocalsFirst(authorWrite), dedupeNormalizeRelayUrlsOrdered(recipientRead)],
blockedRelayUrls,
MAX_PUBLISH_RELAYS,
{ applyKind1BlockedFilter: false }
)
pubRelays = this.filterPublishingRelays(pubRelays, event)
logger.debug('[DetermineTargetRelays] Public message / calendar RSVP: author outbox + recipient inboxes only', {
kind: event.kind,
relayCount: relays.length,
relayCount: pubRelays.length,
authorWriteCount: authorWrite.length,
recipientReadCount: recipientRead.length
})
return relays.length > 0 ? relays : [...FAST_WRITE_RELAY_URLS]
if (pubRelays.length > 0) return pubRelays
return this.filterPublishingRelays(
mergeRelayPriorityLayers(
[relayUrlsLocalsFirst([...FAST_WRITE_RELAY_URLS])],
blockedRelayUrls,
MAX_PUBLISH_RELAYS,
{ applyKind1BlockedFilter: false }
),
event
)
}
let relays: string[]
@ -308,47 +463,42 @@ class ClientService extends EventTarget { @@ -308,47 +463,42 @@ class ClientService extends EventTarget {
})
spellRelayList = { write: [], read: [], originalRelays: [] }
}
const normalizedWrite = (spellRelayList?.write ?? [])
.map((url) => normalizeUrl(url))
.filter((url): url is string => !!url)
const cappedWrite = normalizedWrite.slice(0, 10)
const merged = [...cappedWrite, ...FAST_WRITE_RELAY_URLS]
const seen = new Set<string>()
let spellRelays: string[] = []
for (const u of merged) {
const n = normalizeUrl(u) || u
if (!n || seen.has(n)) continue
seen.add(n)
spellRelays.push(n)
}
if (!spellRelays.length) {
spellRelays = [...FAST_WRITE_RELAY_URLS]
}
const normalizedWrite = dedupeNormalizeRelayUrlsOrdered(
(spellRelayList?.write ?? [])
.map((url) => normalizeUrl(url))
.filter((url): url is string => !!url)
)
const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u))
spellRelays = spellRelays.filter((url) => {
const spellWriteFiltered = normalizedWrite.filter((url) => {
const n = normalizeUrl(url) || url
return !readOnlySet.has(n)
})
return spellRelays.length > 0 ? spellRelays : [...FAST_WRITE_RELAY_URLS]
return this.filterPublishingRelays(
buildPrioritizedWriteRelayUrls({
userWriteRelays:
spellWriteFiltered.length > 0
? spellWriteFiltered
: dedupeNormalizeRelayUrlsOrdered(FAST_WRITE_RELAY_URLS),
favoriteRelays: favoriteRelayUrls ?? [],
extraRelays: [],
maxRelays: MAX_PUBLISH_RELAYS,
...writeRelayPubOpts
}),
event
)
}
const _additionalRelayUrls: string[] = additionalRelayUrls ?? []
const bootstrapExtras: string[] = [...(additionalRelayUrls ?? [])]
let authorInboxFromContext: string[] = []
if (!specifiedRelayUrls?.length && ![kinds.Contacts, kinds.Mutelist].includes(event.kind)) {
const mentions: string[] = []
event.tags.forEach(([tagName, tagValue]) => {
if (
['p', 'P'].includes(tagName) &&
!!tagValue &&
isValidPubkey(tagValue) &&
!mentions.includes(tagValue)
) {
mentions.push(tagValue)
}
})
if (mentions.length > 0) {
const relayLists = await this.fetchRelayLists(mentions)
const ctxPubkeys = this.collectReplyAndMentionPubkeys(event)
if (ctxPubkeys.length > 0) {
const relayLists = await this.fetchRelayLists(ctxPubkeys)
relayLists.forEach((relayList) => {
_additionalRelayUrls.push(...relayList.read.slice(0, 4))
for (const u of relayList.read ?? []) {
const n = normalizeUrl(u) || u
if (n) authorInboxFromContext.push(n)
}
})
}
}
@ -361,22 +511,22 @@ class ClientService extends EventTarget { @@ -361,22 +511,22 @@ class ClientService extends EventTarget {
ExtendedKind.RELAY_REVIEW
].includes(event.kind)
) {
_additionalRelayUrls.push(...PROFILE_FETCH_RELAY_URLS)
bootstrapExtras.push(...PROFILE_FETCH_RELAY_URLS)
logger.debug('[DetermineTargetRelays] Relay list event detected, adding PROFILE_FETCH_RELAY_URLS', {
kind: event.kind,
profileFetchRelays: PROFILE_FETCH_RELAY_URLS,
additionalRelayCount: _additionalRelayUrls.length
additionalRelayCount: bootstrapExtras.length
})
} else if (event.kind === ExtendedKind.FAVORITE_RELAYS) {
// Use fast write relays for favorite relays to avoid timeouts and payment requirements
_additionalRelayUrls.push(...FAST_WRITE_RELAY_URLS)
bootstrapExtras.push(...FAST_WRITE_RELAY_URLS)
logger.debug('[DetermineTargetRelays] Favorite relays event detected, adding FAST_WRITE_RELAY_URLS', {
kind: event.kind,
fastWriteRelays: FAST_WRITE_RELAY_URLS,
additionalRelayCount: _additionalRelayUrls.length
additionalRelayCount: bootstrapExtras.length
})
} else if (event.kind === ExtendedKind.RSS_FEED_LIST) {
_additionalRelayUrls.push(...FAST_WRITE_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS)
bootstrapExtras.push(...FAST_WRITE_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS)
}
if (event.kind === kinds.RelayList || event.kind === ExtendedKind.FAVORITE_RELAYS) {
@ -403,15 +553,26 @@ class ClientService extends EventTarget { @@ -403,15 +553,26 @@ class ClientService extends EventTarget {
writeRelays: relayList?.write?.slice(0, 10) ?? []
})
}
relays = (relayList?.write.slice(0, 10) ?? []).concat(
Array.from(new Set(_additionalRelayUrls)) ?? []
const userWritesOrdered = dedupeNormalizeRelayUrlsOrdered(
(relayList?.write ?? []).map((u) => normalizeUrl(u) || u).filter((u): u is string => !!u)
)
relays = this.filterPublishingRelays(
buildPrioritizedWriteRelayUrls({
userWriteRelays: userWritesOrdered,
authorReadRelays: authorInboxFromContext,
favoriteRelays: favoriteRelayUrls ?? [],
extraRelays: bootstrapExtras,
maxRelays: MAX_PUBLISH_RELAYS,
...writeRelayPubOpts
}),
event
)
if (event.kind === kinds.RelayList || event.kind === ExtendedKind.FAVORITE_RELAYS) {
logger.info('[DetermineTargetRelays] Final relay list for event publication', {
kind: event.kind,
totalRelayCount: relays.length,
userWriteRelays: relayList?.write?.slice(0, 10) ?? [],
additionalRelays: Array.from(new Set(_additionalRelayUrls)),
userWriteRelays: userWritesOrdered.slice(0, MAX_PUBLISH_RELAYS),
additionalRelays: dedupeNormalizeRelayUrlsOrdered(bootstrapExtras),
allRelays: relays
})
}
@ -426,15 +587,13 @@ class ClientService extends EventTarget { @@ -426,15 +587,13 @@ class ClientService extends EventTarget {
})
}
const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u))
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
relays = relays.filter((url) => {
const n = normalizeUrl(url) || url
if (readOnlySet.has(n)) return false
if (event.kind === kinds.ShortTextNote && kind1BlockedSet.has(n)) return false
return true
})
relays = this.filterPublishingRelays(relays, event)
if (specifiedRelayUrls?.length) {
relays = await this.prioritizePublishUrlList(relays, event, favoriteRelayUrls ?? [])
} else {
relays = dedupeNormalizeRelayUrlsOrdered(relays).slice(0, MAX_PUBLISH_RELAYS)
}
return relays
}
@ -555,7 +714,11 @@ class ClientService extends EventTarget { @@ -555,7 +714,11 @@ class ClientService extends EventTarget {
return result.slice(0, count)
}
async publishEvent(relayUrls: string[], event: NEvent) {
async publishEvent(
relayUrls: string[],
event: NEvent,
publishExtras?: { favoriteRelayUrls?: string[] }
) {
const readOnlySet = new Set(READ_ONLY_RELAY_URLS.map((u) => normalizeUrl(u) || u))
const kind1BlockedSet = new Set(KIND_1_BLOCKED_RELAY_URLS.map((u) => normalizeUrl(u) || u))
let filtered = relayUrls.filter((url) => {
@ -567,6 +730,11 @@ class ClientService extends EventTarget { @@ -567,6 +730,11 @@ class ClientService extends EventTarget {
return true
})
filtered = Array.from(new Set(filtered))
filtered = await this.capPublishRelayUrlsForPublish(
filtered,
event,
publishExtras?.favoriteRelayUrls ?? []
)
logger.debug('[PublishEvent] Starting publishEvent', {
eventId: event.id?.substring(0, 8),
@ -886,7 +1054,7 @@ class ClientService extends EventTarget { @@ -886,7 +1054,7 @@ class ClientService extends EventTarget {
) {
const timelineBatchId = `tl-${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 9)}`
const timelineT0 = performance.now()
logger.info('[relay-req] timeline_batch_start', {
logger.debug('[relay-req] timeline_batch_start', {
timelineBatchId,
subRequestCount: subRequests.length,
relayCounts: subRequests.map((r) => r.urls.length)
@ -909,22 +1077,13 @@ class ClientService extends EventTarget { @@ -909,22 +1077,13 @@ class ClientService extends EventTarget {
const deliverTimelineToConsumer = (snapshot: NEvent[], allEosed: boolean) => {
if (!firstPaintLogged && snapshot.length > 0) {
firstPaintLogged = true
logger.info('[relay-req] first_paint', {
logger.debug('[relay-req] first_paint', {
timelineBatchId,
phase: 'data_to_feed',
rowCount: snapshot.length,
allEosed,
ms: Math.round(performance.now() - timelineT0)
})
if (typeof requestAnimationFrame === 'function') {
requestAnimationFrame(() => {
logger.info('[relay-req] first_paint', {
timelineBatchId,
phase: 'raf',
ms: Math.round(performance.now() - timelineT0)
})
})
}
}
onEvents(snapshot, allEosed)
}
@ -1106,7 +1265,7 @@ class ClientService extends EventTarget { @@ -1106,7 +1265,7 @@ class ClientService extends EventTarget {
// Kind-1 queries drop KIND_1_BLOCKED_RELAY_URLS; if every URL was removed, no subs run and
// oneose would never fire — timelines stay loading forever (e.g. favorites feed).
if (groupedRequests.length === 0) {
logger.info('[relay-req] batch_skip', {
logger.debug('[relay-req] batch_skip', {
reason: 'no_relays_after_filters',
filterSummary: summarizeFiltersForRelayLog(filters)
})
@ -1127,7 +1286,7 @@ class ClientService extends EventTarget { @@ -1127,7 +1286,7 @@ class ClientService extends EventTarget {
if (firstRelayResponseLogged) return
firstRelayResponseLogged = true
if (kind === 'eose') awaitingFirstEventAfterEoseFirstResponse = true
logger.info('[relay-req] first_response', {
logger.debug('[relay-req] first_response', {
reqGroupId,
kind,
relayUrl,
@ -1137,7 +1296,7 @@ class ClientService extends EventTarget { @@ -1137,7 +1296,7 @@ class ClientService extends EventTarget {
const logFirstEventIfFirstResponseWasEmpty = (evt: NEvent, relayKey: string) => {
if (!awaitingFirstEventAfterEoseFirstResponse) return
awaitingFirstEventAfterEoseFirstResponse = false
logger.info('[relay-req] first_event', {
logger.debug('[relay-req] first_event', {
reqGroupId,
relayUrl: relayKey,
eventId: evt.id,
@ -1147,7 +1306,7 @@ class ClientService extends EventTarget { @@ -1147,7 +1306,7 @@ class ClientService extends EventTarget {
})
}
logger.info('[relay-req] batch_start', {
logger.debug('[relay-req] batch_start', {
reqGroupId,
relayCount: groupedRequests.length,
relays: groupedRequests.map((r) => r.url),
@ -1185,122 +1344,132 @@ class ClientService extends EventTarget { @@ -1185,122 +1344,132 @@ class ClientService extends EventTarget {
const subs: { relayKey: string; close: () => void }[] = []
const allOpened = Promise.all(
groupedRequests.map(async ({ url, filters: relayFilters }, i) => {
const relayKey = normalizeUrl(url) || url
await that.queryService.acquireSubSlot(relayKey)
let relay: AbstractRelay
await that.queryService.acquireGlobalRelayConnectionSlot()
try {
relay = await that.pool.ensureRelay(url, { connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS })
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased = false
const releaseOnce = () => {
if (!slotReleased) {
slotReleased = true
const relayKey = normalizeUrl(url) || url
await that.queryService.acquireSubSlot(relayKey)
let relay: AbstractRelay
try {
relay = await that.pool.ensureRelay(url, { connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS })
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
}
const sub = relay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => that.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => {
logFirstEventIfFirstResponseWasEmpty(evt, relayKey)
logFirstRelayResponse('event', relayKey)
onevent?.(evt)
},
oneose: () => handleEose(i),
onclose: (reason: string) => {
releaseOnce()
if (reason.startsWith('auth-required: ') && that.canSignerAuthenticateRelay()) {
relay
.auth(async (authEvt: EventTemplate) => {
const evt = await that.signer!.signEvent(authEvt)
if (!evt) throw new Error('sign event failed')
return evt as VerifiedEvent
})
.then(async () => {
await that.queryService.acquireSubSlot(relayKey)
// After AUTH the socket may be closed or the relay dropped from the pool;
// resubscribe on a fresh connection from ensureRelay (fixes SendingOnClosedConnection).
let liveRelay: AbstractRelay
try {
liveRelay = await that.pool.ensureRelay(url, {
connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS
})
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased2 = false
const releaseSlot2 = () => {
if (!slotReleased2) {
slotReleased2 = true
that.queryService.releaseSubSlot(relayKey)
}
}
try {
const sub2 = liveRelay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => that.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => {
logFirstEventIfFirstResponseWasEmpty(evt, relayKey)
logFirstRelayResponse('event', relayKey)
onevent?.(evt)
},
oneose: () => handleEose(i),
onclose: (reason2: string) => {
releaseSlot2()
handleClose(i, reason2)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
logger.info('[relay-req] req_sent', {
reqGroupId,
url: relayKey,
ms: Math.round(performance.now() - reqT0),
note: 'after_auth'
})
subs.push({
relayKey,
close: () => {
let slotReleased = false
const releaseOnce = () => {
if (!slotReleased) {
slotReleased = true
that.queryService.releaseSubSlot(relayKey)
}
}
const sub = relay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => that.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => {
logFirstEventIfFirstResponseWasEmpty(evt, relayKey)
logFirstRelayResponse('event', relayKey)
onevent?.(evt)
},
oneose: () => handleEose(i),
onclose: (reason: string) => {
releaseOnce()
if (reason.startsWith('auth-required: ') && that.canSignerAuthenticateRelay()) {
relay
.auth(async (authEvt: EventTemplate) => {
const evt = await that.signer!.signEvent(authEvt)
if (!evt) throw new Error('sign event failed')
return evt as VerifiedEvent
})
.then(async () => {
await that.queryService.acquireGlobalRelayConnectionSlot()
try {
await that.queryService.acquireSubSlot(relayKey)
// After AUTH the socket may be closed or the relay dropped from the pool;
// resubscribe on a fresh connection from ensureRelay (fixes SendingOnClosedConnection).
let liveRelay: AbstractRelay
try {
liveRelay = await that.pool.ensureRelay(url, {
connectionTimeout: SUBSCRIBE_RELAY_CONNECTION_TIMEOUT_MS
})
} catch (err) {
that.queryService.releaseSubSlot(relayKey)
handleClose(i, (err as Error)?.message ?? String(err))
return
}
let slotReleased2 = false
const releaseSlot2 = () => {
if (!slotReleased2) {
slotReleased2 = true
that.queryService.releaseSubSlot(relayKey)
}
}
try {
const sub2 = liveRelay.subscribe(relayFilters, {
receivedEvent: (_relay, id) => that.trackEventSeenOn(id, _relay),
onevent: (evt: NEvent) => {
logFirstEventIfFirstResponseWasEmpty(evt, relayKey)
logFirstRelayResponse('event', relayKey)
onevent?.(evt)
},
oneose: () => handleEose(i),
onclose: (reason2: string) => {
releaseSlot2()
handleClose(i, reason2)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
logger.debug('[relay-req] req_sent', {
reqGroupId,
url: relayKey,
ms: Math.round(performance.now() - reqT0),
note: 'after_auth'
})
subs.push({
relayKey,
close: () => {
releaseSlot2()
sub2.close()
}
})
} catch (err) {
releaseSlot2()
sub2.close()
handleClose(i, (err as Error)?.message ?? String(err))
}
})
} catch (err) {
releaseSlot2()
handleClose(i, (err as Error)?.message ?? String(err))
}
})
.catch((err) => {
handleClose(i, `auth failed: ${(err as Error)?.message ?? err}`)
})
return
}
if (reason.startsWith('auth-required: ')) {
startLogin?.()
} finally {
that.queryService.releaseGlobalRelayConnectionSlot()
}
})
.catch((err) => {
handleClose(i, `auth failed: ${(err as Error)?.message ?? err}`)
})
return
}
if (reason.startsWith('auth-required: ')) {
startLogin?.()
}
handleClose(i, reason)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
logger.debug('[relay-req] req_sent', {
reqGroupId,
url: relayKey,
ms: Math.round(performance.now() - reqT0)
})
subs.push({
relayKey,
close: () => {
releaseOnce()
sub.close()
}
handleClose(i, reason)
},
alreadyHaveEvent: localAlreadyHaveEvent,
eoseTimeout: SUBSCRIBE_RELAY_EOSE_TIMEOUT_MS
})
logger.info('[relay-req] req_sent', {
reqGroupId,
url: relayKey,
ms: Math.round(performance.now() - reqT0)
})
subs.push({
relayKey,
close: () => {
releaseOnce()
sub.close()
}
})
})
} finally {
that.queryService.releaseGlobalRelayConnectionSlot()
}
})
)

8
src/services/spell.service.ts

@ -87,9 +87,13 @@ export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): st @@ -87,9 +87,13 @@ export function buildSpellCatalogAuthors(pubkey: string, contacts: string[]): st
export function getRelaysForSpellCatalogSync(
favoriteRelays: string[],
blockedRelays: string[],
userInboxReadRelays: string[]
userInboxReadRelays: string[],
options?: { userWriteRelays?: string[] }
): string[] {
return getRelayUrlsWithFavoritesFastReadAndInbox(favoriteRelays, blockedRelays, userInboxReadRelays)
return getRelayUrlsWithFavoritesFastReadAndInbox(favoriteRelays, blockedRelays, userInboxReadRelays, {
userWriteRelays: options?.userWriteRelays ?? [],
applyKind1BlockedFilter: false
})
}
function dedupeRelayUrls(urls: string[]): string[] {

4
src/types/index.d.ts vendored

@ -171,6 +171,10 @@ export type TImetaInfo = { @@ -171,6 +171,10 @@ export type TImetaInfo = {
export type TPublishOptions = {
specifiedRelayUrls?: string[]
additionalRelayUrls?: string[]
/** Kind 10012 `relay` URLs for publish priority (outboxes → author inboxes → favorites → fast relays). */
favoriteRelayUrls?: string[]
/** User-blocked relay URLs (normalized); excluded from prioritized publish lists before capping. */
blockedRelayUrls?: string[]
minPow?: number
disableFallbacks?: boolean // If true, don't use fallback relays when publishing fails
/** Override global "Add client tag" preference for this publish (default: read from localStorage) */

Loading…
Cancel
Save