Browse Source

bug-fixes

imwald
Silberengel 2 weeks ago
parent
commit
b3cfcd5b60
  1. 34
      electron/main.cjs
  2. 4
      package-lock.json
  3. 2
      package.json
  4. 29
      src/components/AudioPlayer/index.tsx
  5. 90
      src/components/PostEditor/PostContent.tsx
  6. 15
      src/components/PostEditor/PostTextarea/ClipboardAndDropHandler.ts
  7. 7
      src/components/PostEditor/PostTextarea/Mention/MentionAndEventToolbarButtons.tsx
  8. 10
      src/components/PostEditor/PostTextarea/Mention/suggestion.ts
  9. 46
      src/components/PostEditor/PostTextarea/index.tsx
  10. 46
      src/components/ProfileListBySearch/index.tsx
  11. 15
      src/components/SearchResult/index.tsx
  12. 9
      src/constants.ts
  13. 18
      src/hooks/useSearchProfiles.tsx
  14. 16
      src/services/client-events.service.ts
  15. 1
      src/services/client-replaceable-events.service.ts
  16. 170
      src/services/client.service.ts
  17. 70
      src/services/indexed-db.service.ts
  18. 9
      src/services/mention-event-search.service.ts

34
electron/main.cjs

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
'use strict'
const { app, BrowserWindow, ipcMain, shell, Menu } = require('electron')
const { app, BrowserWindow, ipcMain, shell, Menu, session } = require('electron')
const fs = require('fs')
const http = require('http')
const path = require('path')
@ -188,6 +188,36 @@ function loadRenderer(win) { @@ -188,6 +188,36 @@ function loadRenderer(win) {
})
}
/**
* Packaged (and dev) renderer runs on http://127.0.0.1; hls.js and other fetches hit third-party
* streams without CORS. Chromium still enforces CORS, so inject a permissive ACAO on subresources only.
*/
function relaxCorsForRendererSubresources() {
session.defaultSession.webRequest.onHeadersReceived((details, callback) => {
if (details.resourceType === 'mainFrame' || details.resourceType === 'subFrame') {
callback({ cancel: false, responseHeaders: details.responseHeaders })
return
}
const raw = details.responseHeaders
if (!raw) {
callback({ cancel: false })
return
}
const responseHeaders = { ...raw }
for (const key of Object.keys(responseHeaders)) {
const lower = key.toLowerCase()
if (
lower === 'access-control-allow-origin' ||
lower === 'access-control-allow-credentials'
) {
delete responseHeaders[key]
}
}
responseHeaders['Access-Control-Allow-Origin'] = ['*']
callback({ cancel: false, responseHeaders })
})
}
function createWindow() {
const win = new BrowserWindow({
width: 1280,
@ -249,6 +279,8 @@ function createWindow() { @@ -249,6 +279,8 @@ function createWindow() {
}
app.whenReady().then(() => {
relaxCorsForRendererSubresources()
ipcMain.handle('imwald:reload-app', async (event) => {
const win = BrowserWindow.fromWebContents(event.sender)
if (!win || win.isDestroyed()) return false

4
package-lock.json generated

@ -1,12 +1,12 @@ @@ -1,12 +1,12 @@
{
"name": "imwald",
"version": "22.5.6",
"version": "22.5.7",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "imwald",
"version": "22.5.6",
"version": "22.5.7",
"license": "MIT",
"dependencies": {
"@asciidoctor/core": "^3.0.4",

2
package.json

@ -1,6 +1,6 @@ @@ -1,6 +1,6 @@
{
"name": "imwald",
"version": "22.5.6",
"version": "22.5.7",
"description": "Imwald — a user-friendly Nostr client focused on relay feed browsing, publications, and relay discovery",
"private": true,
"type": "module",

29
src/components/AudioPlayer/index.tsx

@ -51,10 +51,16 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl @@ -51,10 +51,16 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl
const updateTime = () => {
if (!isSeeking.current) {
setCurrentTime(audio.currentTime)
const t = audio.currentTime
if (Number.isFinite(t)) {
setCurrentTime(t)
}
}
}
const updateDuration = () => setDuration(audio.duration)
const updateDuration = () => {
const d = audio.duration
setDuration(Number.isFinite(d) && d > 0 ? d : 0)
}
const handleEnded = () => setIsPlaying(false)
const handlePause = () => setIsPlaying(false)
const handlePlay = () => setIsPlaying(true)
@ -92,15 +98,26 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl @@ -92,15 +98,26 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl
const audio = audioRef.current
if (!audio) return
let t = value[0]
if (!Number.isFinite(t) || t < 0) {
return
}
const d = audio.duration
if (Number.isFinite(d) && d > 0) {
t = Math.min(t, d)
}
isSeeking.current = true
setCurrentTime(value[0])
setCurrentTime(t)
if (seekTimeoutRef.current) {
clearTimeout(seekTimeoutRef.current)
}
seekTimeoutRef.current = setTimeout(() => {
audio.currentTime = value[0]
if (Number.isFinite(t) && t >= 0) {
audio.currentTime = t
}
isSeeking.current = false
}, 300)
}
@ -148,8 +165,8 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl @@ -148,8 +165,8 @@ export default function AudioPlayer({ src, className, poster, onReady }: AudioPl
<div className="relative min-w-0 flex-1">
<Slider
value={[currentTime]}
max={duration || 100}
value={[Number.isFinite(currentTime) ? currentTime : 0]}
max={Number.isFinite(duration) && duration > 0 ? duration : 100}
step={1}
onValueChange={handleSeek}
hideThumb

90
src/components/PostEditor/PostContent.tsx

@ -309,7 +309,12 @@ export default function PostContent({ @@ -309,7 +309,12 @@ export default function PostContent({
const [hasPrivateRelaysAvailable, setHasPrivateRelaysAvailable] = useState(false)
const [showMediaKindDialog, setShowMediaKindDialog] = useState(false)
const [pendingMediaUpload, setPendingMediaUpload] = useState<{ url: string; tags: string[][]; file: File } | null>(null)
const [pendingMediaUpload, setPendingMediaUpload] = useState<{
url: string
tags: string[][]
file: File
urlAlreadyInEditor?: boolean
} | null>(null)
const uploadedMediaFileMap = useRef<Map<string, File>>(new Map())
/** Accumulates imeta tags across uploads (short note or multi-attachment) so files are not dropped. */
const composerImetaTagsRef = useRef<string[][]>([])
@ -1695,15 +1700,23 @@ export default function PostContent({ @@ -1695,15 +1700,23 @@ export default function PostContent({
const handleMediaKindSelection = (selectedKind: number) => {
if (!pendingMediaUpload) return
const { url, tags, file } = pendingMediaUpload
const { url, tags, file, urlAlreadyInEditor } = pendingMediaUpload
setShowMediaKindDialog(false)
setPendingMediaUpload(null)
// Process the upload with the selected kind
processMediaUpload(url, tags, file, selectedKind)
processMediaUpload(url, tags, file, selectedKind, {
skipComposerUrlAppend: urlAlreadyInEditor === true
})
}
const processMediaUpload = async (url: string, tags: string[][], uploadingFile: File, selectedKind?: number) => {
const processMediaUpload = async (
url: string,
tags: string[][],
uploadingFile: File,
selectedKind?: number,
opts?: { skipComposerUrlAppend?: boolean }
) => {
try {
let resolvedKind: number
if (selectedKind !== undefined) {
@ -1757,14 +1770,16 @@ export default function PostContent({ @@ -1757,14 +1770,16 @@ export default function PostContent({
appendComposerImetaTag(newImetaTag)
setTimeout(() => {
if (textareaRef.current) {
const currentText = textareaRef.current.getText()
if (!currentText.includes(url)) {
textareaRef.current.appendText(url, true)
if (!opts?.skipComposerUrlAppend) {
setTimeout(() => {
if (textareaRef.current) {
const currentText = textareaRef.current.getText()
if (!currentText.includes(url)) {
textareaRef.current.appendText(url, true)
}
}
}
}, 100)
}, 100)
}
} catch (error) {
logger.error('Error processing media upload', { error, file: uploadingFile.name })
const imetaTag = mediaUpload.getImetaTagByUrl(url)
@ -1785,11 +1800,13 @@ export default function PostContent({ @@ -1785,11 +1800,13 @@ export default function PostContent({
const handleMediaUploadSuccess = async ({
url,
tags,
file: fileFromCallback
file: fileFromCallback,
urlAlreadyInEditor
}: {
url: string
tags: string[][]
file?: File
urlAlreadyInEditor?: boolean
}) => {
try {
let uploadingFile: File | undefined = fileFromCallback
@ -1809,12 +1826,14 @@ export default function PostContent({ @@ -1809,12 +1826,14 @@ export default function PostContent({
}
if (isDiscussionThread && !parentEvent) {
setTimeout(() => {
const ed = textareaRef.current
if (ed && !ed.getText().includes(url)) {
ed.appendText(url, true)
}
}, 100)
if (!urlAlreadyInEditor) {
setTimeout(() => {
const ed = textareaRef.current
if (ed && !ed.getText().includes(url)) {
ed.appendText(url, true)
}
}, 100)
}
uploadedMediaFileMap.current.delete(`${uploadingFile.name}-${uploadingFile.size}-${uploadingFile.lastModified}`)
handleUploadEnd(uploadingFile)
return
@ -1893,15 +1912,15 @@ export default function PostContent({ @@ -1893,15 +1912,15 @@ export default function PostContent({
}
// Insert the URL into the editor content so it shows in the edit pane
// Use setTimeout to ensure the state has updated and editor is ready
setTimeout(() => {
if (textareaRef.current) {
// Check if URL is already in the text
const currentText = text || ''
if (!currentText.includes(url)) {
textareaRef.current.appendText(url, true)
if (!urlAlreadyInEditor) {
setTimeout(() => {
const ed = textareaRef.current
if (!ed) return
if (!ed.getText().includes(url)) {
ed.appendText(url, true)
}
}
}, 100)
}, 100)
}
} else {
// Non-audio media in replies/PMs - don't set mediaNoteKind, will be handled as regular comment/PM
// Clear any existing media note kind
@ -1909,21 +1928,32 @@ export default function PostContent({ @@ -1909,21 +1928,32 @@ export default function PostContent({
setMediaUrl('')
setMediaImetaTags([])
composerImetaTagsRef.current = []
// Just add the media URL to the text content
textareaRef.current?.appendText(url, true)
if (!urlAlreadyInEditor) {
const ed = textareaRef.current
if (ed && !ed.getText().includes(url)) {
ed.appendText(url, true)
}
}
return // Don't set media note kind for non-audio in replies/PMs
}
} else {
// For new posts, check if file is ambiguous (could be audio or video)
if (isAmbiguousMediaFile(uploadingFile)) {
// Show dialog to let user choose
setPendingMediaUpload({ url, tags, file: uploadingFile })
setPendingMediaUpload({
url,
tags,
file: uploadingFile,
urlAlreadyInEditor: urlAlreadyInEditor === true
})
setShowMediaKindDialog(true)
return
}
// Not ambiguous, auto-detect and process
await processMediaUpload(url, tags, uploadingFile)
await processMediaUpload(url, tags, uploadingFile, undefined, {
skipComposerUrlAppend: urlAlreadyInEditor === true
})
}
} catch (error) {
logger.error('Error in handleMediaUploadSuccess', { error })

15
src/components/PostEditor/PostTextarea/ClipboardAndDropHandler.ts

@ -16,7 +16,13 @@ const DRAGOVER_CLASS_LIST = [ @@ -16,7 +16,13 @@ const DRAGOVER_CLASS_LIST = [
export interface ClipboardAndDropHandlerOptions {
onUploadStart?: (file: File, cancel: () => void) => void
/** Same contract as `Uploader` — drop/paste uploads append URLs + imeta while staying on kind 1 unless the user picks a native media kind. */
onUploadSuccess?: (result: { url: string; tags: string[][]; file: File }) => void
onUploadSuccess?: (result: {
url: string
tags: string[][]
file: File
/** True when the URL was already written into the ProseMirror doc (replace placeholder). */
urlAlreadyInEditor?: boolean
}) => void
onUploadEnd?: (file: File) => void
onUploadProgress?: (file: File, progress: number) => void
/** Same as `Uploader.onUploadCompressPhase` — keeps the post editor progress row in sync during local compression. */
@ -181,7 +187,12 @@ async function uploadFiles( @@ -181,7 +187,12 @@ async function uploadFiles(
view.dispatch(insertTr)
}
options.onUploadSuccess?.({ url: result.url, tags: result.tags, file })
options.onUploadSuccess?.({
url: result.url,
tags: result.tags,
file,
urlAlreadyInEditor: true
})
})
.catch((error) => {
logger.error('Clipboard/drop upload failed', { error, file: file.name })

7
src/components/PostEditor/PostTextarea/Mention/MentionAndEventToolbarButtons.tsx

@ -7,7 +7,10 @@ import { Button } from '@/components/ui/button' @@ -7,7 +7,10 @@ import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
import { SimpleUsername } from '@/components/Username'
import { searchNpubsForMention } from '@/services/mention-event-search.service'
import {
MENTION_NPUB_DROPDOWN_LIMIT,
searchNpubsForMention
} from '@/services/mention-event-search.service'
import { AtSign, FileSearch } from 'lucide-react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
@ -43,7 +46,7 @@ export function MentionAndEventToolbarButtons({ @@ -43,7 +46,7 @@ export function MentionAndEventToolbarButtons({
}
mentionDebounceRef.current = setTimeout(() => {
setMentionLoading(true)
searchNpubsForMention(q, 20)
searchNpubsForMention(q, MENTION_NPUB_DROPDOWN_LIMIT)
.then((list) => {
setMentionResults(list ?? [])
})

10
src/components/PostEditor/PostTextarea/Mention/suggestion.ts

@ -1,4 +1,5 @@ @@ -1,4 +1,5 @@
import {
MENTION_NPUB_DROPDOWN_LIMIT,
searchNpubsForMention,
type PickerSearchMode
} from '@/services/mention-event-search.service'
@ -99,9 +100,12 @@ const suggestion = { @@ -99,9 +100,12 @@ const suggestion = {
// Start search with callback - returns cached results immediately, then updates with relay results
backgroundSearchController = new AbortController()
const results = await searchNpubsForMention(query, 20, updateComponent)
return results ?? []
try {
const results = await searchNpubsForMention(query, MENTION_NPUB_DROPDOWN_LIMIT, updateComponent)
return results ?? []
} catch {
return []
}
},
render: () => {

46
src/components/PostEditor/PostTextarea/index.tsx

@ -33,6 +33,9 @@ import Preview from './Preview' @@ -33,6 +33,9 @@ import Preview from './Preview'
import { HighlightData } from '../HighlightEditor'
import { getKindDescription } from '@/lib/kind-description'
/** Draft JSON uses relay fetches (e.g. thread root); cap wait so the Json tab cannot spin forever. */
const DRAFT_JSON_PREVIEW_TIMEOUT_MS = 25_000
export type TPostTextareaHandle = {
appendText: (text: string, addNewline?: boolean) => void
insertText: (text: string) => void
@ -53,7 +56,12 @@ const PostTextarea = forwardRef< @@ -53,7 +56,12 @@ const PostTextarea = forwardRef<
onUploadStart?: (file: File, cancel: () => void) => void
onUploadProgress?: (file: File, progress: number) => void
onUploadEnd?: (file: File) => void
onUploadSuccess?: (result: { url: string; tags: string[][]; file: File }) => void
onUploadSuccess?: (result: {
url: string
tags: string[][]
file: File
urlAlreadyInEditor?: boolean
}) => void
onUploadCompressPhase?: (file: File, phase: 'compressing' | 'uploading') => void
onUploadCompressProgress?: (file: File, percent: number) => void
kind?: number
@ -111,11 +119,14 @@ const PostTextarea = forwardRef< @@ -111,11 +119,14 @@ const PostTextarea = forwardRef<
const [activeTab, setActiveTab] = useState('preview')
const [draftEventJson, setDraftEventJson] = useState<string>('')
const [isLoadingJson, setIsLoadingJson] = useState(false)
/** Bumps when preview tab is shown or a new JSON fetch starts; completions only apply if seq still matches. */
const jsonPanelFetchSeq = useRef(0)
const kindDescription = useMemo(() => getKindDescription(kind), [kind])
useEffect(() => {
if (activeTab === 'preview') {
jsonPanelFetchSeq.current += 1
setDraftEventJson('')
setIsLoadingJson(false)
return
@ -125,27 +136,44 @@ const PostTextarea = forwardRef< @@ -125,27 +136,44 @@ const PostTextarea = forwardRef<
return
}
let cancelled = false
const seq = ++jsonPanelFetchSeq.current
setIsLoadingJson(true)
let timeoutId: number | undefined = window.setTimeout(() => {
timeoutId = undefined
if (seq !== jsonPanelFetchSeq.current) return
setDraftEventJson(
`Error generating JSON: Timed out after ${Math.round(DRAFT_JSON_PREVIEW_TIMEOUT_MS / 1000)}s (relays or network slow)`
)
setIsLoadingJson(false)
}, DRAFT_JSON_PREVIEW_TIMEOUT_MS)
const clearJsonTimeout = () => {
if (timeoutId !== undefined) {
clearTimeout(timeoutId)
timeoutId = undefined
}
}
void Promise.resolve(getDraftEventJson())
.then((json) => {
if (cancelled) return
clearJsonTimeout()
if (seq !== jsonPanelFetchSeq.current) return
setDraftEventJson(json)
setIsLoadingJson(false)
})
.catch((error: unknown) => {
if (cancelled) return
clearJsonTimeout()
if (seq !== jsonPanelFetchSeq.current) return
const msg = error instanceof Error ? error.message : String(error)
setDraftEventJson(`Error generating JSON: ${msg}`)
setIsLoadingJson(false)
})
return () => {
cancelled = true
}
// `text` is included so JSON refreshes when the parent memoizes `getDraftEventJson` too narrowly;
// `kind` catches compose-mode switches even if callback identity were ever stable across them.
// Use `jsonPanelFetchSeq` instead of an effect cleanup `cancelled` flag so a superseded fetch
// does not skip `setIsLoadingJson(false)` and leave the Json tab stuck on "Loading...".
}, [activeTab, getDraftEventJson, kind, text])
const editor = useEditor({
// TipTap + Radix Dialog/Tabs: defer init so React 18 does not warn about flushSync in a lifecycle.

46
src/components/ProfileListBySearch/index.tsx

@ -1,5 +1,6 @@ @@ -1,5 +1,6 @@
import { useSecondaryPage } from '@/PageManager'
import { SEARCHABLE_RELAY_URLS } from '@/constants'
import { PROFILE_FETCH_RELAY_URLS } from '@/constants'
import { normalizeUrl } from '@/lib/url'
import { toProfile } from '@/lib/link'
import client from '@/services/client.service'
import { cn } from '@/lib/utils'
@ -9,6 +10,10 @@ import UserItem, { UserItemSkeleton } from '../UserItem' @@ -9,6 +10,10 @@ import UserItem, { UserItemSkeleton } from '../UserItem'
const LIMIT = 50
const PROFILE_SEARCH_RELAY_URLS = Array.from(
new Set(PROFILE_FETCH_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean))
)
export function ProfileListBySearch({ search }: { search: string }) {
const { push } = useSecondaryPage()
const [until, setUntil] = useState<number>(() => dayjs().unix())
@ -51,21 +56,40 @@ export function ProfileListBySearch({ search }: { search: string }) { @@ -51,21 +56,40 @@ export function ProfileListBySearch({ search }: { search: string }) {
}, [hasMore, search, until])
const loadMore = async () => {
const profiles = await client.searchProfiles(SEARCHABLE_RELAY_URLS, {
const nextSeen = new Set(pubkeySet)
const batchPubkeys: string[] = []
if (pubkeySet.size === 0) {
const cached = await client.searchProfilesFromIndexedDBCache(search, LIMIT)
for (const p of cached) {
if (!nextSeen.has(p.pubkey)) {
nextSeen.add(p.pubkey)
batchPubkeys.push(p.pubkey)
}
}
}
const relayProfiles = await client.searchProfiles(PROFILE_SEARCH_RELAY_URLS, {
search,
until,
limit: LIMIT
})
const newPubkeySet = new Set<string>()
profiles.forEach((profile) => {
if (!pubkeySet.has(profile.pubkey)) {
newPubkeySet.add(profile.pubkey)
for (const profile of relayProfiles) {
if (!nextSeen.has(profile.pubkey)) {
nextSeen.add(profile.pubkey)
batchPubkeys.push(profile.pubkey)
}
})
setPubkeySet((prev) => new Set([...prev, ...newPubkeySet]))
setHasMore(profiles.length >= LIMIT)
const lastProfileCreatedAt = profiles[profiles.length - 1].created_at
setUntil(lastProfileCreatedAt ? lastProfileCreatedAt - 1 : 0)
}
if (batchPubkeys.length === 0) {
setHasMore(false)
return
}
setPubkeySet((prev) => new Set([...prev, ...batchPubkeys]))
setHasMore(relayProfiles.length >= LIMIT)
const last = relayProfiles[relayProfiles.length - 1]
setUntil(last?.created_at ? last.created_at - 1 : 0)
}
return (

15
src/components/SearchResult/index.tsx

@ -1,7 +1,7 @@ @@ -1,7 +1,7 @@
import {
FAST_READ_RELAY_URLS,
FAST_WRITE_RELAY_URLS,
NIP_SEARCH_DOCUMENT_KINDS,
NIP_SEARCH_PAGE_KINDS,
SEARCHABLE_RELAY_URLS
} from '@/constants'
import { compareEventsForDTagQuery } from '@/lib/dtag-search'
@ -58,10 +58,19 @@ export default function SearchResult({ searchParams }: { searchParams: TSearchPa @@ -58,10 +58,19 @@ export default function SearchResult({ searchParams }: { searchParams: TSearchPa
return (
<NormalFeed
subRequests={[
{ urls: searchRelays, filter: { search: searchParams.search, kinds: [...NIP_SEARCH_DOCUMENT_KINDS] } }
{
urls: searchRelays,
filter: {
search: searchParams.search,
kinds: [...NIP_SEARCH_PAGE_KINDS],
limit: 100
}
}
]}
useFilterAsIs
clientSideKindFilter
progressiveWarmupQuery={searchParams.search}
progressiveDocumentKinds={NIP_SEARCH_DOCUMENT_KINDS}
progressiveDocumentKinds={NIP_SEARCH_PAGE_KINDS}
oneShotAfterMergeComparator={(a, b) => compareEventsForDTagQuery(searchParams.search, a, b)}
/>
)

9
src/constants.ts

@ -624,6 +624,15 @@ export const NIP_SEARCH_DOCUMENT_KINDS: readonly number[] = [ @@ -624,6 +624,15 @@ export const NIP_SEARCH_DOCUMENT_KINDS: readonly number[] = [
ExtendedKind.PUBLICATION_CONTENT
]
/**
* Primary Search page NIP-50 `kinds`: profiles, short notes, and document kinds.
* Search used only {@link NIP_SEARCH_DOCUMENT_KINDS} before, so handles and npub-related
* metadata (kind 0) and normal notes (kind 1) never matched.
*/
export const NIP_SEARCH_PAGE_KINDS: readonly number[] = Array.from(
new Set<number>([kinds.Metadata, kinds.ShortTextNote, ...NIP_SEARCH_DOCUMENT_KINDS])
).sort((a, b) => a - b)
export function relayFilterIncludesDocumentRelayKind(filter: Filter): boolean {
const k = filter.kinds
if (k === undefined) return false

18
src/hooks/useSearchProfiles.tsx

@ -1,8 +1,13 @@ @@ -1,8 +1,13 @@
import { SEARCHABLE_RELAY_URLS } from '@/constants'
import { PROFILE_FETCH_RELAY_URLS } from '@/constants'
import { normalizeUrl } from '@/lib/url'
import client from '@/services/client.service'
import { TProfile } from '@/types'
import { useEffect, useState } from 'react'
const PROFILE_SEARCH_RELAY_URLS = Array.from(
new Set(PROFILE_FETCH_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean))
)
export function useSearchProfiles(search: string, limit: number) {
const [isFetching, setIsFetching] = useState(false)
const [error, setError] = useState<Error | null>(null)
@ -25,13 +30,10 @@ export function useSearchProfiles(search: string, limit: number) { @@ -25,13 +30,10 @@ export function useSearchProfiles(search: string, limit: number) {
return
}
const existingPubkeys = new Set(profiles.map((profile) => profile.pubkey))
const fetchedProfiles = await client.searchProfiles(
SEARCHABLE_RELAY_URLS,
{
search,
limit
}
)
const fetchedProfiles = await client.searchProfiles(PROFILE_SEARCH_RELAY_URLS, {
search,
limit
})
if (fetchedProfiles.length) {
fetchedProfiles.forEach((profile) => {
if (existingPubkeys.has(profile.pubkey)) {

16
src/services/client-events.service.ts

@ -461,15 +461,19 @@ export class EventService { @@ -461,15 +461,19 @@ export class EventService {
if (out.length >= limit) break
try {
const o = JSON.parse(ev.content) as Record<string, unknown>
const blob = [
o.display_name,
o.name,
typeof o.nip05 === 'string' ? o.nip05 : ''
]
const nip05 =
typeof o.nip05 === 'string'
? o.nip05
.split('@')
.map((s: string) => s.trim())
.join(' ')
: ''
const blob = [o.display_name, o.name, nip05]
.map((x) => (typeof x === 'string' ? x : ''))
.join(' ')
.toLowerCase()
if (blob.includes(q)) {
const qNeedle = q.startsWith('@') ? q.slice(1) : q
if (blob.includes(q) || (qNeedle.length > 0 && blob.includes(qNeedle))) {
out.push(ev.pubkey.toLowerCase())
}
} catch {

1
src/services/client-replaceable-events.service.ts

@ -835,6 +835,7 @@ export class ReplaceableEventService { @@ -835,6 +835,7 @@ export class ReplaceableEventService {
Promise.resolve(sessionEv)
)
await this.indexProfile(sessionEv)
void indexedDb.putReplaceableEvent(sessionEv).catch(() => {})
return sessionEv
}
}

170
src/services/client.service.ts

@ -330,6 +330,12 @@ class ClientService extends EventTarget { @@ -330,6 +330,12 @@ class ClientService extends EventTarget {
this.queryService.setQueryResultIngest((events) => {
for (const e of events) {
this.eventService.addEventToCache(e)
// Kind 0 from timelines/REQs was only kept in the session LRU, not in PROFILE_EVENTS or FlexSearch,
// so @-mention / profile search missed people you already saw on feeds (e.g. notifications).
if (e.kind === kinds.Metadata && !shouldDropEventOnIngest(e)) {
void this.addUsernameToIndex(e)
void indexedDb.putReplaceableEvent(e).catch(() => {})
}
}
})
this.bookstrService = createBookstrService(this.queryService)
@ -3024,8 +3030,9 @@ class ClientService extends EventTarget { @@ -3024,8 +3030,9 @@ class ClientService extends EventTarget {
kinds: [kinds.Metadata]
}, undefined, {
replaceableRace: true,
eoseTimeout: 200,
globalTimeout: 3000
// Search spans many relays; sub-second EOSE was cutting off almost all index relays.
eoseTimeout: 4500,
globalTimeout: 9000
})
const profileEvents = events.sort((a, b) => b.created_at - a.created_at)
@ -3059,7 +3066,7 @@ class ClientService extends EventTarget { @@ -3059,7 +3066,7 @@ class ClientService extends EventTarget {
/**
* Npubs for @-mention dropdown: (1) follow-list profiles matching the query,
* (2) local index, (3) relay search on SEARCHABLE_RELAY_URLS (same as search page).
* (2) local index, (3) kind-0 relay search on PROFILE_FETCH_RELAY_URLS (deduped).
* Returns cached results immediately, then streams relay results via callback.
*/
/**
@ -3161,7 +3168,7 @@ class ClientService extends EventTarget { @@ -3161,7 +3168,7 @@ class ClientService extends EventTarget {
async searchNpubsForMention(
query: string,
limit: number = 100,
limit: number = 50,
onUpdate?: (npubs: string[]) => void
): Promise<string[]> {
const q = query.trim()
@ -3185,10 +3192,29 @@ class ClientService extends EventTarget { @@ -3185,10 +3192,29 @@ class ClientService extends EventTarget {
const matchProfileText = (p: TProfile) =>
((p.username ?? '') + ' ' + (p.original_username ?? '') + ' ' + (p.nip05 ?? '')).toLowerCase()
// Relay query starts immediately so it can run in parallel with local + follow work (slow relays).
const profileSearchRelayUrls = dedupeNormalizeRelayUrlsOrdered(
PROFILE_FETCH_RELAY_URLS.map((u) => normalizeUrl(u) || u).filter(Boolean)
)
const relayTask =
q.length >= 1
? this.searchProfiles(profileSearchRelayUrls, {
search: q,
limit
}).catch(() => [] as TProfile[])
: Promise.resolve([] as TProfile[])
// 1. Local index first (FlexSearch + session) — fills the @-mention list immediately.
// Previously follow-list ran first and awaited up to 80 fetchProfile() calls, so the dropdown
// stayed empty until those finished; @nevent / @naddr stayed instant (sync branch in suggestion.ts).
const local = await this.searchNpubsFromLocal(q, limit)
// Cap how many local hits we take so we never fill `limit` here alone; otherwise we returned
// early and skipped relay search entirely (bad for handle search beyond the local index).
const localCap = Math.min(limit, 24)
let local: string[] = []
try {
local = await this.searchNpubsFromLocal(q, localCap)
} catch {
// FlexSearch / session search should not throw; if it does, still return relay + follow hits.
local = []
}
for (const npub of local) {
if (addNpub(npub)) {
updateIfNeeded()
@ -3203,32 +3229,58 @@ class ClientService extends EventTarget { @@ -3203,32 +3229,58 @@ class ClientService extends EventTarget {
return out
}
// 2. Follow list — IndexedDB-cached profiles only (no network per follow; relay search still covers gaps)
// 2. Follow list — must never block TipTap `items()`: no await here.
// Previously we awaited merge when the follow list was in IDB; that ran up to 80 parallel
// getReplaceableEvent(metadata) calls and could stall Firefox for seconds with no dropdown.
if (this.pubkey && qLower.length >= 1) {
try {
const followListEvent = await this.replaceableEventService.fetchFollowListEvent(this.pubkey)
const followPubkeys = followListEvent ? getPubkeysFromPTags(followListEvent.tags) : []
const toCheck = followPubkeys.slice(0, 80)
const cachedRows = await Promise.all(
toCheck.map(async (pubkey) => {
const npub = pubkeyToNpub(pubkey)
if (!npub) return null
const p = await this.replaceableEventService.getProfileFromIndexedDB(npub)
return p ? { npub, p } : null
})
)
const pk = this.pubkey.trim().toLowerCase()
const viewerPubkey = this.pubkey
for (const row of cachedRows) {
if (!row || out.length >= limit) break
if (!matchProfileText(row.p).includes(qLower)) continue
if (addNpub(row.npub)) {
updateIfNeeded()
const mergeFollowMatches = async (followListEvent: NEvent | undefined | null) => {
if (!followListEvent || out.length >= limit) return
try {
const followPubkeys = getPubkeysFromPTags(followListEvent.tags)
.map((hex) => hex.trim().toLowerCase())
.filter((hex) => /^[0-9a-f]{64}$/.test(hex))
.slice(0, 80)
if (followPubkeys.length === 0) return
const events = await indexedDb.getManyReplaceableEvents(followPubkeys, kinds.Metadata)
for (let i = 0; i < followPubkeys.length; i++) {
if (out.length >= limit) break
const ev = events[i]
if (!ev) continue
const p = getProfileFromEvent(ev)
const npub = pubkeyToNpub(followPubkeys[i]!)
if (!npub) continue
if (!matchProfileText(p).includes(qLower)) continue
if (addNpub(npub)) {
updateIfNeeded()
}
}
} catch {
// ignore
}
} catch {
// ignore follow-list errors; relay search still runs
}
void (async () => {
try {
const cachedFollow = await indexedDb.getReplaceableEvent(pk, kinds.Contacts)
if (cachedFollow) {
await mergeFollowMatches(cachedFollow)
} else {
const ev = await this.replaceableEventService.fetchFollowListEvent(viewerPubkey)
await mergeFollowMatches(ev)
}
} catch {
try {
const ev = await this.replaceableEventService.fetchFollowListEvent(viewerPubkey)
await mergeFollowMatches(ev)
} catch {
// ignore
}
}
})()
}
if (out.length >= limit) {
@ -3238,13 +3290,10 @@ class ClientService extends EventTarget { @@ -3238,13 +3290,10 @@ class ClientService extends EventTarget {
return out
}
// 3. Relay search (slow, but runs in background and updates incrementally)
// 3. Relay search — merge after local + follow so ordering stays local → follows → wider index.
// relayTask was started at the beginning; do not await before return (first paint stays fast).
if (q.length >= 1) {
// Start relay search in background - don't await, let it update via callback
this.searchProfiles(SEARCHABLE_RELAY_URLS, {
search: q,
limit: limit - out.length
})
relayTask
.then((relayProfiles) => {
for (const p of relayProfiles) {
const npub = pubkeyToNpub(p.pubkey)
@ -3254,8 +3303,7 @@ class ClientService extends EventTarget { @@ -3254,8 +3303,7 @@ class ClientService extends EventTarget {
}
if (out.length >= limit) break
}
// Prime profile cache for relay results
relayProfiles.forEach((p) => {
const npub = pubkeyToNpub(p.pubkey)
if (npub) {
@ -3276,10 +3324,52 @@ class ClientService extends EventTarget { @@ -3276,10 +3324,52 @@ class ClientService extends EventTarget {
return out
}
async searchProfilesFromLocal(query: string, limit: number = 100) {
const npubs = await this.searchNpubsFromLocal(query, limit)
const profiles = await Promise.all(npubs.map((npub) => this.replaceableEventService.fetchProfile(npub)))
return profiles.filter((profile) => !!profile) as TProfile[]
/** Kind-0 profiles whose metadata is already in IndexedDB (substring match on name / nip05 / pubkey hex). */
async searchProfilesFromIndexedDBCache(query: string, limit: number = 100): Promise<TProfile[]> {
const q = query.trim()
if (!q || limit <= 0) return []
const events = await indexedDb.searchProfileEventsInCache(q, limit)
return events.map((e) => getProfileFromEvent(e))
}
/**
* Profile search local sources: IndexedDB kind-0 cache first, then FlexSearch/session npubs + fetchProfile.
*/
async searchProfilesFromLocal(query: string, limit: number = 100): Promise<TProfile[]> {
const q = query.trim()
if (!q) return []
const seen = new Set<string>()
const out: TProfile[] = []
const fromIdb = await this.searchProfilesFromIndexedDBCache(q, limit)
for (const p of fromIdb) {
const pk = p.pubkey.toLowerCase()
if (seen.has(pk)) continue
seen.add(pk)
out.push(p)
if (out.length >= limit) return out
}
const remaining = limit - out.length
if (remaining <= 0) return out
const npubs = await this.searchNpubsFromLocal(q, remaining)
for (const npub of npubs) {
let pkHex: string
try {
pkHex = userIdToPubkey(npub).toLowerCase()
} catch {
continue
}
if (seen.has(pkHex)) continue
const p = await this.replaceableEventService.fetchProfile(npub)
if (!p) continue
seen.add(pkHex)
out.push(p)
if (out.length >= limit) break
}
return out
}
private async addUsernameToIndex(profileEvent: NEvent) {

70
src/services/indexed-db.service.ts

@ -61,6 +61,33 @@ function isLikelyCachedNostrEvent(v: unknown): v is Event { @@ -61,6 +61,33 @@ function isLikelyCachedNostrEvent(v: unknown): v is Event {
)
}
/** Kind 0 JSON fields for profile search (display name, handle, NIP-05). */
function profileMetadataMatchesQuery(ev: Event, qLower: string): boolean {
if (!qLower || ev.kind !== kinds.Metadata) return false
if (ev.pubkey.toLowerCase().includes(qLower)) return true
try {
const profileObj = JSON.parse(ev.content) as Record<string, unknown>
const nip05Raw = profileObj.nip05
const nip05 =
typeof nip05Raw === 'string'
? nip05Raw
.split('@')
.map((s: string) => s.trim())
.join(' ')
: ''
const text = [
typeof profileObj.display_name === 'string' ? profileObj.display_name.trim() : '',
typeof profileObj.name === 'string' ? profileObj.name.trim() : '',
nip05
]
.join(' ')
.toLowerCase()
return text.includes(qLower)
} catch {
return false
}
}
function cachedEventMatchesFullTextQuery(ev: Event, qLower: string): boolean {
if (!qLower) return false
if (ev.id.toLowerCase().includes(qLower)) return true
@ -772,6 +799,49 @@ class IndexedDbService { @@ -772,6 +799,49 @@ class IndexedDbService {
})
}
/**
* Scan cached kind-0 rows for a handle / display name / NIP-05 substring (case-insensitive).
* Newest replaceable wins per pubkey.
*/
async searchProfileEventsInCache(query: string, limit: number): Promise<Event[]> {
const qLower = query.trim().toLowerCase()
if (!qLower || limit <= 0) return []
await this.initPromise
if (!this.db) return []
return new Promise((resolve, reject) => {
const byPubkey = new Map<string, Event>()
const transaction = this.db!.transaction(StoreNames.PROFILE_EVENTS, 'readonly')
const store = transaction.objectStore(StoreNames.PROFILE_EVENTS)
const request = store.openCursor()
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result as IDBCursorWithValue | null
if (!cursor) {
transaction.commit()
const list = [...byPubkey.values()].sort((a, b) => b.created_at - a.created_at).slice(0, limit)
resolve(list)
return
}
const row = cursor.value as TValue<Event>
const value = row?.value
if (value && profileMetadataMatchesQuery(value, qLower)) {
const pk = value.pubkey.toLowerCase()
const prev = byPubkey.get(pk)
if (!prev || value.created_at > prev.created_at) {
byPubkey.set(pk, value)
}
}
cursor.continue()
}
request.onerror = () => {
transaction.commit()
reject(request.error ?? new Error('searchProfileEventsInCache failed'))
}
})
}
async iterateProfileEvents(callback: (event: Event) => Promise<void>): Promise<void> {
await this.initPromise
if (!this.db) {

9
src/services/mention-event-search.service.ts

@ -10,7 +10,9 @@ import client from './client.service' @@ -10,7 +10,9 @@ import client from './client.service'
import indexedDb from './indexed-db.service'
const DEFAULT_NOTES_LIMIT = 20
const DEFAULT_NPUBS_LIMIT = 100
/** Max npubs in the @-mention dropdown (local + follows + relay merge). */
export const MENTION_NPUB_DROPDOWN_LIMIT = 50
/** Kinds for nevent search: notes, threads, long-form, etc. */
export const NEVENT_KINDS = [
@ -100,8 +102,9 @@ export async function searchNotesForPicker( @@ -100,8 +102,9 @@ export async function searchNotesForPicker(
*/
export async function searchNpubsForMention(
query: string,
limit: number = DEFAULT_NPUBS_LIMIT,
limit: number = MENTION_NPUB_DROPDOWN_LIMIT,
onUpdate?: (npubs: string[]) => void
): Promise<string[]> {
return client.searchNpubsForMention(query, limit, onUpdate)
const capped = Math.min(Math.max(1, Math.floor(limit)), MENTION_NPUB_DROPDOWN_LIMIT)
return client.searchNpubsForMention(query, capped, onUpdate)
}

Loading…
Cancel
Save