diff --git a/src/components/Username/index.tsx b/src/components/Username/index.tsx
index d852b03d..7534677e 100644
--- a/src/components/Username/index.tsx
+++ b/src/components/Username/index.tsx
@@ -1,8 +1,10 @@
import { Skeleton } from '@/components/ui/skeleton'
import { useFetchProfile } from '@/hooks'
import { toProfile } from '@/lib/link'
+import { formatPubkey, userIdToPubkey, pubkeyToNpub, formatNpub } from '@/lib/pubkey'
import { cn } from '@/lib/utils'
import { useSmartProfileNavigation } from '@/PageManager'
+import { useMemo } from 'react'
export default function Username({
userId,
@@ -19,10 +21,17 @@ export default function Username({
withoutSkeleton?: boolean
style?: React.CSSProperties
}) {
- const { profile } = useFetchProfile(userId)
+ const { profile, isFetching } = useFetchProfile(userId)
const { navigateToProfile } = useSmartProfileNavigation()
- if (!profile && !withoutSkeleton) {
+ // Get pubkey from userId (works even if profile isn't loaded)
+ const pubkey = useMemo(() => {
+ if (profile?.pubkey) return profile.pubkey
+ return userIdToPubkey(userId) || ''
+ }, [userId, profile?.pubkey])
+
+ // Show skeleton while fetching (unless withoutSkeleton is true)
+ if (isFetching && !withoutSkeleton) {
return (
@@ -30,26 +39,57 @@ export default function Username({
)
}
- if (!profile) {
- return null
+ // If we have a profile, show the username
+ if (profile) {
+ const { username, pubkey: profilePubkey } = profile
+ return (
+
{
+ e.stopPropagation()
+ navigateToProfile(toProfile(profilePubkey))
+ }}
+ >
+ {showAt && '@'}
+ {username}
+
+ )
+ }
+
+ // Fallback: show formatted npub (bech32) if we have a pubkey (even if profile fetch failed)
+ if (pubkey) {
+ // Convert to npub (bech32) format for display
+ const npub = pubkeyToNpub(pubkey)
+ const displayName = npub ? formatNpub(npub) : formatPubkey(pubkey)
+
+ return (
+
{
+ e.stopPropagation()
+ navigateToProfile(toProfile(pubkey))
+ }}
+ >
+ {showAt && '@'}
+ {displayName}
+
+ )
}
- const { username, pubkey } = profile
+ // No pubkey available - return null or skeleton based on withoutSkeleton
+ if (!withoutSkeleton) {
+ return (
+
+
+
+ )
+ }
- return (
-
{
- e.stopPropagation()
- navigateToProfile(toProfile(pubkey))
- }}
- >
- {showAt && '@'}
- {username}
-
- )
+ return null
}
export function SimpleUsername({
@@ -67,9 +107,16 @@ export function SimpleUsername({
withoutSkeleton?: boolean
style?: React.CSSProperties
}) {
- const { profile } = useFetchProfile(userId)
+ const { profile, isFetching } = useFetchProfile(userId)
- if (!profile && !withoutSkeleton) {
+ // Get pubkey from userId (works even if profile isn't loaded)
+ const pubkey = useMemo(() => {
+ if (profile?.pubkey) return profile.pubkey
+ return userIdToPubkey(userId) || ''
+ }, [userId, profile?.pubkey])
+
+ // Show skeleton while fetching (unless withoutSkeleton is true)
+ if (isFetching && !withoutSkeleton) {
return (
@@ -77,19 +124,45 @@ export function SimpleUsername({
)
}
- if (!profile) {
- return null
+ // If we have a profile, show the username
+ if (profile) {
+ const { username } = profile
+ return (
+
+ {showAt && '@'}
+ {username}
+
+ )
+ }
+
+ // Fallback: show formatted npub (bech32) if we have a pubkey (even if profile fetch failed)
+ if (pubkey) {
+ // Convert to npub (bech32) format for display
+ const npub = pubkeyToNpub(pubkey)
+ const displayName = npub ? formatNpub(npub) : formatPubkey(pubkey)
+
+ return (
+
+ {showAt && '@'}
+ {displayName}
+
+ )
}
- const { username } = profile
+ // No pubkey available - return null or skeleton based on withoutSkeleton
+ if (!withoutSkeleton) {
+ return (
+
+
+
+ )
+ }
- return (
-
- {showAt && '@'}
- {username}
-
- )
+ return null
}
\ No newline at end of file
diff --git a/src/hooks/useFetchProfile.tsx b/src/hooks/useFetchProfile.tsx
index bc7115ca..a130d541 100644
--- a/src/hooks/useFetchProfile.tsx
+++ b/src/hooks/useFetchProfile.tsx
@@ -4,7 +4,7 @@ import { useNostr } from '@/providers/NostrProvider'
import { replaceableEventService } from '@/services/client.service'
import { kinds } from 'nostr-tools'
import { TProfile } from '@/types'
-import { useEffect, useState } from 'react'
+import { useEffect, useState, useRef } from 'react'
export function useFetchProfile(id?: string, skipCache = false) {
const { profile: currentAccountProfile } = useNostr()
@@ -12,6 +12,39 @@ export function useFetchProfile(id?: string, skipCache = false) {
const [error, setError] = useState
(null)
const [profile, setProfile] = useState(null)
const [pubkey, setPubkey] = useState(null)
+ const checkIntervalRef = useRef(null)
+
+ // Function to check for profile updates
+ const checkProfile = async (pubkey: string, cancelled: { current: boolean }) => {
+ if (cancelled.current) return
+
+ try {
+ // Re-check cache (might have been updated by background fetch)
+ const profileEvent = await replaceableEventService.fetchReplaceableEvent(pubkey, kinds.Metadata)
+
+ if (cancelled.current) return
+
+ if (profileEvent) {
+ const newProfile = getProfileFromEvent(profileEvent)
+ if (newProfile) {
+ setProfile(newProfile)
+ setIsFetching(false)
+ // Clear interval once we have a profile
+ if (checkIntervalRef.current) {
+ clearInterval(checkIntervalRef.current)
+ checkIntervalRef.current = null
+ }
+ return true
+ }
+ }
+ return false
+ } catch (err) {
+ if (!cancelled.current) {
+ setError(err as Error)
+ }
+ return false
+ }
+ }
useEffect(() => {
if (!id) {
@@ -22,47 +55,71 @@ export function useFetchProfile(id?: string, skipCache = false) {
return
}
- let cancelled = false
+ const cancelled = { current: false }
const pubkey = userIdToPubkey(id)
setPubkey(pubkey)
const run = async () => {
setIsFetching(true)
- try {
- // fetchReplaceableEvent now checks in-memory cache first (instant), then IndexedDB, then network
- // This is optimized for speed - memory cache is synchronous
- const profileEvent = await replaceableEventService.fetchReplaceableEvent(pubkey, kinds.Metadata)
-
- if (cancelled) return
-
- if (profileEvent) {
- const profile = getProfileFromEvent(profileEvent)
- if (profile) {
- setProfile(profile)
- setIsFetching(false)
- return // Return immediately with cached/fetched profile
+ setError(null)
+
+ // Initial fetch
+ const found = await checkProfile(pubkey, cancelled)
+
+ if (cancelled.current) return
+
+ if (found) {
+ // Profile found, we're done
+ return
+ }
+
+ // No profile found yet - set fetching to false but keep checking in background
+ setIsFetching(false)
+
+ // If no profile was found, periodically re-check (profiles might load asynchronously)
+ // Check every 2 seconds for up to 30 seconds (15 checks)
+ let checkCount = 0
+ const maxChecks = 15
+
+ checkIntervalRef.current = setInterval(async () => {
+ if (cancelled.current || checkCount >= maxChecks) {
+ if (checkIntervalRef.current) {
+ clearInterval(checkIntervalRef.current)
+ checkIntervalRef.current = null
}
+ return
}
- // If we get here, no profile was found
- setIsFetching(false)
- } catch (err) {
- if (!cancelled) {
- setError(err as Error)
- setIsFetching(false)
+ checkCount++
+ const found = await checkProfile(pubkey, cancelled)
+ if (found) {
+ // Profile found, stop checking
+ if (checkIntervalRef.current) {
+ clearInterval(checkIntervalRef.current)
+ checkIntervalRef.current = null
+ }
}
- }
+ }, 2000) // Check every 2 seconds
}
run()
return () => {
- cancelled = true
+ cancelled.current = true
+ if (checkIntervalRef.current) {
+ clearInterval(checkIntervalRef.current)
+ checkIntervalRef.current = null
+ }
}
}, [id, skipCache])
useEffect(() => {
if (currentAccountProfile && pubkey === currentAccountProfile.pubkey) {
setProfile(currentAccountProfile)
+ // Clear interval if we got the profile from current account
+ if (checkIntervalRef.current) {
+ clearInterval(checkIntervalRef.current)
+ checkIntervalRef.current = null
+ }
}
}, [currentAccountProfile, pubkey])
diff --git a/src/services/client-events.service.ts b/src/services/client-events.service.ts
index 21a87412..bc0eced5 100644
--- a/src/services/client-events.service.ts
+++ b/src/services/client-events.service.ts
@@ -1,11 +1,80 @@
import { FAST_READ_RELAY_URLS } from '@/constants'
import logger from '@/lib/logger'
+import { normalizeUrl } from '@/lib/url'
import type { Event as NEvent, Filter } from 'nostr-tools'
import { nip19 } from 'nostr-tools'
import DataLoader from 'dataloader'
import { LRUCache } from 'lru-cache'
import indexedDb from './indexed-db.service'
import type { QueryService } from './client-query.service'
+import client from './client.service'
+
+/**
+ * Build comprehensive relay list: author's outboxes + user's inboxes + relay hints + defaults
+ */
+async function buildComprehensiveRelayList(
+ authorPubkey: string | undefined,
+ relayHints: string[] = [],
+ seenRelays: string[] = []
+): Promise {
+ const relayUrls = new Set()
+
+ // 1. Add relay hints (highest priority - these are explicit hints)
+ relayHints.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+
+ // 2. Add relays where event was seen
+ seenRelays.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+
+ // 3. Add author's outboxes (write relays) - where they publish
+ if (authorPubkey) {
+ try {
+ const authorRelayList = await client.fetchRelayList(authorPubkey)
+ const authorOutboxes = (authorRelayList.write || []).slice(0, 10) // Limit to 10 to avoid too many
+ authorOutboxes.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+ logger.debug('[EventService] Added author outboxes', {
+ author: authorPubkey.substring(0, 8),
+ count: authorOutboxes.length
+ })
+ } catch (error) {
+ logger.debug('[EventService] Failed to fetch author relay list', { error })
+ }
+ }
+
+ // 4. Add logged-in user's inboxes (read relays) - where they receive events
+ const userPubkey = client.pubkey
+ if (userPubkey) {
+ try {
+ const userRelayList = await client.fetchRelayList(userPubkey)
+ const userInboxes = (userRelayList.read || []).slice(0, 10) // Limit to 10
+ userInboxes.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+ logger.debug('[EventService] Added user inboxes', {
+ count: userInboxes.length
+ })
+ } catch (error) {
+ logger.debug('[EventService] Failed to fetch user relay list', { error })
+ }
+ }
+
+ // 5. Add default fast read relays as fallback
+ FAST_READ_RELAY_URLS.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+
+ return Array.from(relayUrls)
+}
export class EventService {
private queryService: QueryService
@@ -129,6 +198,39 @@ export class EventService {
return results
}
+ /**
+ * Extract relay hints from event tags
+ * Relay hints are in the 3rd position (index 2) of e, a, q, etc. tags
+ * Also checks for a dedicated "relays" tag
+ */
+ private extractRelayHintsFromEvent(event: NEvent | undefined): string[] {
+ if (!event) return []
+ const hints = new Set()
+
+ // Extract from e, a, q tags (relay hint is in position 2, index 2)
+ const tagTypesWithRelayHints = ['e', 'a', 'q']
+ for (const tag of event.tags) {
+ if (tagTypesWithRelayHints.includes(tag[0]) && tag.length > 2 && typeof tag[2] === 'string') {
+ const hint = tag[2]
+ if (hint.startsWith('wss://') || hint.startsWith('ws://')) {
+ hints.add(hint)
+ }
+ }
+ }
+
+ // Also check for dedicated "relays" tag
+ const relaysTag = event.tags.find(tag => tag[0] === 'relays')
+ if (relaysTag && relaysTag.length > 1) {
+ relaysTag.slice(1).forEach(url => {
+ if (typeof url === 'string' && (url.startsWith('wss://') || url.startsWith('ws://'))) {
+ hints.add(url)
+ }
+ })
+ }
+
+ return Array.from(hints)
+ }
+
/**
* Clear all in-memory event caches
*/
@@ -180,32 +282,34 @@ export class EventService {
const cached = await indexedDb.getEventFromPublicationStore(filter.ids[0])
if (cached) {
this.addEventToCache(cached)
+ // Extract relay hints from cached event's tags (e, a, q tags)
+ const eventRelayHints = this.extractRelayHintsFromEvent(cached)
+ if (eventRelayHints.length > 0) {
+ relays = [...new Set([...relays, ...eventRelayHints])]
+ }
return cached
}
}
- // Try big relays first
+ // Try big relays first (uses user's inboxes + defaults)
if (filter.ids?.length) {
const event = await this.fetchEventFromBigRelaysDataloader.load(filter.ids[0])
if (event) {
this.addEventToCache(event)
+ // Extract relay hints from found event's tags (e, a, q tags)
+ const eventRelayHints = this.extractRelayHintsFromEvent(event)
+ if (eventRelayHints.length > 0) {
+ relays = [...new Set([...relays, ...eventRelayHints])]
+ }
return event
}
}
- // Try harder with specified relays or author relays
- if (filter.ids?.length && relays.length) {
- const event = await this.tryHarderToFetchEvent(relays, filter, true)
- if (event) {
- this.addEventToCache(event)
- return event
- }
- } else if (filter.authors?.length) {
- const event = await this.tryHarderToFetchEvent(relays, filter, false)
- if (event) {
- this.addEventToCache(event)
- return event
- }
+ // Always try comprehensive relay list (author's outboxes + user's inboxes + hints + seen + defaults)
+ const event = await this.tryHarderToFetchEvent(relays, filter, true)
+ if (event) {
+ this.addEventToCache(event)
+ return event
}
return undefined
@@ -213,19 +317,36 @@ export class EventService {
/**
* Private: Try harder to fetch event from relays
+ * ALWAYS uses: author's outboxes + user's inboxes + relay hints + seen relays + defaults
*/
private async tryHarderToFetchEvent(
- relayUrls: string[],
+ relayHints: string[],
filter: Filter,
alreadyFetchedFromBigRelays = false
): Promise {
- if (!relayUrls.length && filter.authors?.length) {
- // Would need relay list service - for now use big relays
- relayUrls = FAST_READ_RELAY_URLS
- } else if (!relayUrls.length && !alreadyFetchedFromBigRelays) {
- relayUrls = FAST_READ_RELAY_URLS
+ // Get seen relays if we have an event ID
+ const seenRelays = filter.ids?.length ? client.getSeenEventRelayUrls(filter.ids[0]) : []
+
+ // Get author pubkey
+ const authorPubkey = filter.authors?.length === 1 ? filter.authors[0] : undefined
+
+ // Build comprehensive relay list
+ const relayUrls = await buildComprehensiveRelayList(authorPubkey, relayHints, seenRelays)
+
+ if (!relayUrls.length) {
+ // Fallback to default relays if comprehensive list is empty
+ if (!alreadyFetchedFromBigRelays) {
+ return undefined
+ }
+ return undefined
}
- if (!relayUrls.length) return undefined
+
+ logger.debug('[EventService] Using comprehensive relay list', {
+ author: authorPubkey?.substring(0, 8),
+ relayCount: relayUrls.length,
+ hasHints: relayHints.length > 0,
+ hasSeen: seenRelays.length > 0
+ })
const isSingleEventById = filter.ids && filter.ids.length === 1 && filter.limit === 1
const events = await this.queryService.query(relayUrls, filter, undefined, {
@@ -238,10 +359,12 @@ export class EventService {
/**
* Private: Fetch events from big relays (batch)
+ * Uses comprehensive relay list: user's inboxes + defaults
*/
private async fetchEventsFromBigRelays(ids: readonly string[]): Promise<(NEvent | undefined)[]> {
- const initialRelays = FAST_READ_RELAY_URLS
- const relayUrls = initialRelays.length > 0 ? initialRelays : FAST_READ_RELAY_URLS
+ // Build comprehensive relay list (user's inboxes + defaults)
+ // Note: For batch fetches, we don't have author info, so we use user's inboxes + defaults
+ const relayUrls = await buildComprehensiveRelayList(undefined, [], [])
const isSingleEventFetch = ids.length === 1
const events = await this.queryService.query(relayUrls, {
@@ -256,6 +379,8 @@ export class EventService {
const eventsMap = new Map()
for (const event of events) {
eventsMap.set(event.id, event)
+ // Note: We can't track which relay returned which event in batch queries,
+ // but events are still cached and will be found in future queries
}
return ids.map((id) => eventsMap.get(id))
diff --git a/src/services/client-replaceable-events.service.ts b/src/services/client-replaceable-events.service.ts
index 9d678640..9df7efb7 100644
--- a/src/services/client-replaceable-events.service.ts
+++ b/src/services/client-replaceable-events.service.ts
@@ -11,6 +11,8 @@ import { LRUCache } from 'lru-cache'
import indexedDb from './indexed-db.service'
import type { QueryService } from './client-query.service'
import { isReplaceableEvent, getReplaceableCoordinateFromEvent } from '@/lib/event'
+import logger from '@/lib/logger'
+import client from './client.service'
export class ReplaceableEventService {
private queryService: QueryService
@@ -69,9 +71,108 @@ export class ReplaceableEventService {
)
}
+ /**
+ * Extract relay hints from event tags (e, a, q tags - 3rd position)
+ */
+ private extractRelayHintsFromEvent(event: NEvent | undefined): string[] {
+ if (!event) return []
+ const hints = new Set()
+
+ // Extract from e, a, q tags (relay hint is in position 2, index 2)
+ const tagTypesWithRelayHints = ['e', 'a', 'q']
+ for (const tag of event.tags) {
+ if (tagTypesWithRelayHints.includes(tag[0]) && tag.length > 2 && typeof tag[2] === 'string') {
+ const hint = tag[2]
+ if (hint.startsWith('wss://') || hint.startsWith('ws://')) {
+ hints.add(hint)
+ }
+ }
+ }
+
+ // Also check for dedicated "relays" tag
+ const relaysTag = event.tags.find(tag => tag[0] === 'relays')
+ if (relaysTag && relaysTag.length > 1) {
+ relaysTag.slice(1).forEach(url => {
+ if (typeof url === 'string' && (url.startsWith('wss://') || url.startsWith('ws://'))) {
+ hints.add(url)
+ }
+ })
+ }
+
+ return Array.from(hints)
+ }
+
+ /**
+ * Build comprehensive relay list: author's outboxes + user's inboxes + relay hints + defaults
+ */
+ private async buildComprehensiveRelayListForAuthor(
+ authorPubkey: string,
+ kind: number,
+ relayHints: string[] = []
+ ): Promise {
+ const relayUrls = new Set()
+
+ // 1. Add relay hints (highest priority - these are explicit hints)
+ relayHints.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+
+ // 2. Add author's outboxes (write relays) - where they publish
+ try {
+ const authorRelayList = await client.fetchRelayList(authorPubkey)
+ const authorOutboxes = (authorRelayList.write || []).slice(0, 10)
+ authorOutboxes.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+ logger.debug('[ReplaceableEventService] Added author outboxes', {
+ author: authorPubkey.substring(0, 8),
+ count: authorOutboxes.length
+ })
+ } catch (error) {
+ logger.debug('[ReplaceableEventService] Failed to fetch author relay list', { error })
+ }
+
+ // 3. Add logged-in user's inboxes (read relays) - where they receive events
+ const userPubkey = client.pubkey
+ if (userPubkey) {
+ try {
+ const userRelayList = await client.fetchRelayList(userPubkey)
+ const userInboxes = (userRelayList.read || []).slice(0, 10)
+ userInboxes.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+ logger.debug('[ReplaceableEventService] Added user inboxes', {
+ count: userInboxes.length
+ })
+ } catch (error) {
+ logger.debug('[ReplaceableEventService] Failed to fetch user relay list', { error })
+ }
+ }
+
+ // 4. Add default fast read relays as fallback
+ FAST_READ_RELAY_URLS.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+
+ // 5. Add profile fetch relays for profiles
+ if (kind === kinds.Metadata) {
+ PROFILE_FETCH_RELAY_URLS.forEach(url => {
+ const normalized = normalizeUrl(url)
+ if (normalized) relayUrls.add(normalized)
+ })
+ }
+
+ return Array.from(relayUrls)
+ }
+
/**
* Fetch replaceable event (profile, relay list, etc.)
* Always checks in-memory cache FIRST (instant), then IndexedDB, then fetches from relays
+ * ALWAYS uses: author's outboxes + user's inboxes + relay hints + defaults
*/
async fetchReplaceableEvent(pubkey: string, kind: number, d?: string): Promise {
const cacheKey = d ? `${kind}:${pubkey}:${d}` : `${kind}:${pubkey}`
@@ -114,14 +215,48 @@ export class ReplaceableEventService {
}
// 3. Not in cache, fetch from network
- const event = d
- ? await this.replaceableEventDataLoader.load({ pubkey, kind, d })
- : await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind })
-
- if (event) {
- // Add to memory cache for instant access next time
- this.replaceableEventMemoryCache.set(cacheKey, event)
- return event
+ // Note: DataLoader will use comprehensive relay list from batch load function
+ try {
+ const event = d
+ ? await this.replaceableEventDataLoader.load({ pubkey, kind, d })
+ : await this.replaceableEventFromBigRelaysDataloader.load({ pubkey, kind })
+
+ if (event) {
+ // Extract relay hints from the found event (for future related fetches)
+ const eventRelayHints = this.extractRelayHintsFromEvent(event)
+
+ // Add to memory cache for instant access next time
+ this.replaceableEventMemoryCache.set(cacheKey, event)
+ if (kind === kinds.Metadata) {
+ this.profileMemoryCache.set(pubkey, event)
+ }
+
+ // If we found relay hints, log them (they're already used in the batch load function)
+ if (eventRelayHints.length > 0) {
+ logger.debug('[ReplaceableEventService] Found relay hints in event', {
+ pubkey: formatPubkey(pubkey),
+ hintCount: eventRelayHints.length
+ })
+ }
+
+ return event
+ }
+
+ // Log when no event is found (helps debug relay failures)
+ if (kind === kinds.Metadata) {
+ logger.debug('[ReplaceableEventService] No profile found for pubkey', {
+ pubkey: formatPubkey(pubkey),
+ cacheKey
+ })
+ }
+ } catch (error) {
+ // Log errors but don't throw - return undefined so UI can show fallback
+ if (kind === kinds.Metadata) {
+ logger.warn('[ReplaceableEventService] Error fetching profile', {
+ pubkey: formatPubkey(pubkey),
+ error: error instanceof Error ? error.message : String(error)
+ })
+ }
}
return undefined
@@ -284,18 +419,27 @@ export class ReplaceableEventService {
const eventsMap = new Map()
await Promise.allSettled(
Array.from(groups.entries()).map(async ([kind, pubkeys]) => {
- // Use more relays in parallel for better performance
- // Browsers can handle many concurrent subscriptions, so we use all available relays
- let relayUrls: string[]
- if (kind === kinds.Metadata || kind === kinds.RelayList) {
- // Combine all available relays for profiles and relay lists
- const base = Array.from(new Set([...FAST_READ_RELAY_URLS, ...PROFILE_FETCH_RELAY_URLS]))
- // TODO: Inject relay list service to get user's relays
- relayUrls = base
- } else {
- // Use all big relays for other replaceable events
- relayUrls = FAST_READ_RELAY_URLS
- }
+ // ALWAYS use comprehensive relay list: author's outboxes + user's inboxes + defaults
+ // For each pubkey, build comprehensive relay list
+ const relayUrlSets = await Promise.all(
+ pubkeys.map(async (pubkey) => {
+ // Build comprehensive relay list for this author
+ return await this.buildComprehensiveRelayListForAuthor(pubkey, kind, [])
+ })
+ )
+
+ // Merge all relay sets
+ const mergedRelays = new Set()
+ relayUrlSets.forEach(relayList => {
+ relayList.forEach(url => mergedRelays.add(url))
+ })
+
+ const relayUrls = Array.from(mergedRelays)
+ logger.debug('[ReplaceableEventService] Using comprehensive relay list', {
+ pubkeyCount: pubkeys.length,
+ totalRelayCount: relayUrls.length,
+ kind
+ })
// Use all relays in parallel - browsers can handle many concurrent subscriptions
// The QueryService manages per-relay concurrency limits to avoid overloading individual relays
@@ -308,6 +452,15 @@ export class ReplaceableEventService {
eoseTimeout: 200,
globalTimeout: 3000
})
+
+ // Log when no events are found (helps debug relay failures)
+ if (kind === kinds.Metadata && events.length === 0 && pubkeys.length > 0) {
+ logger.debug('[ReplaceableEventService] No profile events found from relays', {
+ pubkeyCount: pubkeys.length,
+ relayCount: relayUrls.length,
+ relays: relayUrls.slice(0, 3) // Show first 3 for brevity
+ })
+ }
for (const event of events) {
// Check tombstone in background (non-blocking)