Browse Source

implemented IndexedDB to organize the persistent event cache

background deletion removal
corrected and expanded search and added cancel button
show maintainers on the search result cards
remove code search
removed hard-coded theme classes

Nostr-Signature: 8080f3cad9abacfc9a5fe08bc26744ff8444d0228ea8a6e8a449c8c2704885d6 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc 70120c99f5e8a1e9df6d74af756a51641c4998265b9233d5a7d187d9e21302dc6377ae274b07be4d6515af1dabfada43fa9af1a087a34e2879b028ac34e551ca
main
Silberengel 3 weeks ago
parent
commit
82b183f41e
  1. 1
      nostr/commit-signatures.jsonl
  2. 36
      src/app.css
  3. 83
      src/lib/components/UserBadge.svelte
  4. 83
      src/lib/services/nostr/event-cache.ts
  5. 17
      src/lib/services/nostr/maintainer-service.ts
  6. 240
      src/lib/services/nostr/nostr-client.ts
  7. 978
      src/lib/services/nostr/persistent-event-cache.ts
  8. 99
      src/lib/utils/pubkey-resolver.ts
  9. 588
      src/routes/api/search/+server.ts
  10. 159
      src/routes/repos/[npub]/[repo]/+page.svelte
  11. 237
      src/routes/search/+page.svelte

1
nostr/commit-signatures.jsonl

@ -16,3 +16,4 @@ @@ -16,3 +16,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771533104,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","add more api help"]],"content":"Signed commit: add more api help","id":"165d9bb66132123e1ac956f442e13f2ffb784e204ecdd1d3643152a5274cdd5a","sig":"deb8866643413806ec43e30faa8a47a78f0ede64616d6304e3b0a87ee3e267122e2308ed67131b73290a3ec10124c19198b05d2b5f142a3ff3e44858d1dff4fe"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771581869,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","gix build and publish CLI to npm"]],"content":"Signed commit: gix build and publish CLI to npm","id":"7515d5ecd835df785a5e896062818b469bcad83a22efa84499d1736e73ae4844","sig":"b4bb7849515c545a609df14939a0a2ddfcd08ee2160cdc01c932a4b0b55668a54fa3fe1d15ad55fe74cfdb23e6c357cf581ab0aaef44da8c64dc098202a7383f"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771584107,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","pubkey lookup for maintainer\ninclude all tags in the r.a. preset\nupdate client tags on publish\nadd verification/correction step"]],"content":"Signed commit: pubkey lookup for maintainer\ninclude all tags in the r.a. preset\nupdate client tags on publish\nadd verification/correction step","id":"cc27d54e23cecca7e126e7a1b9e0881ee9c9addf39a97841992ac35422221e5d","sig":"7c5e7173e4bfc17a71cec49c8ac2fad15ecab3a84ef53ac90ba7ab6f1c051e2e6d108cecfa075917b6be8a9d1d54d3995595a0b95c004995ec89fe8a621315cd"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771584611,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix login persistence"]],"content":"Signed commit: fix login persistence","id":"e02d4dbaf56fb0498ca6871ae25bd5da1061eeca1d28c88d54ff5f6549982f11","sig":"647fa0385224b33546c55c786b3c2cf3b2cfab5de9f9748ce814e40e8c6819131ebb9e86d7682bffa327e3b690297f17bcfb2f6b2d5fb6b65e1d9474d66659b1"}

36
src/app.css

@ -44,6 +44,9 @@ @@ -44,6 +44,9 @@
--success-text: #034a2e; /* Darker for better contrast */
--warning-bg: #fef3c7;
--warning-text: #6a3000; /* Darker for better contrast */
--shadow-color: rgba(0, 0, 0, 0.15); /* Shadow for light theme */
--shadow-color-light: rgba(0, 0, 0, 0.1); /* Lighter shadow for light theme */
--focus-ring: rgba(123, 30, 109, 0.15); /* Focus ring for light theme */
}
/* GitRepublic Dark Theme (Purple) - Default */
@ -86,6 +89,9 @@ @@ -86,6 +89,9 @@
--success-text: #6aff9a; /* Brighter for better contrast */
--warning-bg: #4a3a1f;
--warning-text: #ffcc44; /* Brighter for better contrast */
--shadow-color: rgba(0, 0, 0, 0.3); /* Shadow for dark theme */
--shadow-color-light: rgba(0, 0, 0, 0.2); /* Lighter shadow for dark theme */
--focus-ring: rgba(123, 30, 109, 0.15); /* Focus ring for dark theme */
}
/* GitRepublic Black Theme - GitHub-style all black */
@ -128,6 +134,9 @@ @@ -128,6 +134,9 @@
--success-text: #3fb950; /* GitHub's success color */
--warning-bg: #3d2f1f;
--warning-text: #d29922; /* GitHub's warning color */
--shadow-color: rgba(0, 0, 0, 0.4); /* Shadow for black theme */
--shadow-color-light: rgba(0, 0, 0, 0.3); /* Lighter shadow for black theme */
--focus-ring: rgba(196, 30, 58, 0.15); /* Focus ring for black theme (crimson) */
}
/* Base styles */
@ -532,7 +541,7 @@ textarea:disabled::placeholder { @@ -532,7 +541,7 @@ textarea:disabled::placeholder {
background: var(--card-bg);
border: 1px solid var(--border-color);
border-radius: 8px;
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.15);
box-shadow: 0 4px 12px var(--shadow-color);
width: 600px;
max-width: 90vw;
max-height: 500px;
@ -1066,7 +1075,7 @@ button.theme-option.active img.theme-icon-option, @@ -1066,7 +1075,7 @@ button.theme-option.active img.theme-icon-option,
background: var(--bg-secondary);
border-color: var(--accent);
transform: translateY(-1px);
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
box-shadow: 0 2px 4px var(--shadow-color-light);
}
.repo-badge-image {
@ -1142,7 +1151,7 @@ button.theme-option.active img.theme-icon-option, @@ -1142,7 +1151,7 @@ button.theme-option.active img.theme-icon-option,
.card:hover, .repo-card:hover {
border-color: var(--accent);
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
box-shadow: 0 2px 8px var(--shadow-color-light);
}
.repo-card-banner {
@ -1180,7 +1189,7 @@ button.theme-option.active img.theme-icon-option, @@ -1180,7 +1189,7 @@ button.theme-option.active img.theme-icon-option,
}
.repo-item:hover, .code-item:hover {
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
box-shadow: 0 2px 8px var(--shadow-color-light);
border-color: var(--accent);
}
@ -1496,7 +1505,7 @@ pre code { @@ -1496,7 +1505,7 @@ pre code {
background: var(--card-bg);
border-radius: 8px;
padding: 2rem;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
box-shadow: 0 2px 4px var(--shadow-color-light);
border: 1px solid var(--border-color);
}
@ -1592,8 +1601,15 @@ pre code { @@ -1592,8 +1601,15 @@ pre code {
.search-bar-container .search-input:focus {
outline: none;
border-color: var(--accent);
box-shadow: 0 0 0 3px rgba(126, 40, 94, 0.1);
border-color: var(--input-focus);
/* Use theme-aware focus ring - will be crimson in black theme, purple in others */
}
/* Theme-specific focus rings */
[data-theme="black"] .search-bar-container .search-input:focus,
[data-theme="dark"] .search-bar-container .search-input:focus,
[data-theme="light"] .search-bar-container .search-input:focus {
box-shadow: 0 0 0 3px var(--focus-ring);
}
.search-bar-container .search-input:disabled {
@ -1678,7 +1694,7 @@ label.filter-checkbox > span, @@ -1678,7 +1694,7 @@ label.filter-checkbox > span,
.repo-card-local {
border-left: 4px solid var(--warning, #f59e0b);
background: var(--bg-secondary, rgba(245, 158, 11, 0.05));
background: var(--bg-secondary);
}
/* Repo actions */
@ -1701,7 +1717,7 @@ label.filter-checkbox > span, @@ -1701,7 +1717,7 @@ label.filter-checkbox > span,
}
.delete-button {
background: var(--error-bg, rgba(239, 68, 68, 0.1));
background: var(--error-bg);
color: var(--error, #ef4444);
border-color: var(--error, #ef4444);
}
@ -1733,7 +1749,7 @@ label.filter-checkbox > span, @@ -1733,7 +1749,7 @@ label.filter-checkbox > span,
.register-button:hover {
opacity: 0.9;
transform: translateY(-1px);
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
box-shadow: 0 2px 4px var(--shadow-color-light);
}
.repo-header {

83
src/lib/components/UserBadge.svelte

@ -23,7 +23,7 @@ @@ -23,7 +23,7 @@
async function loadUserProfile() {
try {
// Check cache first for faster lookups
// Check in-memory cache first for fastest lookups
const cachedProfile = eventCache.getProfile(pubkey);
if (cachedProfile) {
try {
@ -33,13 +33,92 @@ @@ -33,13 +33,92 @@
picture: profile.picture
};
loading = false;
// Fetch fresh data in background to update cache (non-blocking)
nostrClient.fetchEvents([
{
kinds: [0],
authors: [pubkey],
limit: 1
}
]).then(profileEvents => {
if (profileEvents.length > 0) {
try {
const freshProfile = JSON.parse(profileEvents[0].content);
// Update if profile changed
if (freshProfile.name !== profile.name || freshProfile.picture !== profile.picture) {
userProfile = {
name: freshProfile.name,
picture: freshProfile.picture
};
}
} catch {
// Invalid JSON, ignore
}
}
}).catch(() => {
// Ignore background fetch errors
});
return;
} catch {
// Invalid JSON in cache, continue to fetch fresh
}
}
// Fetch user profile (kind 0 - metadata) if not in cache
// Check persistent cache (IndexedDB) if available (browser only)
if (typeof window !== 'undefined') {
try {
const { persistentEventCache } = await import('../services/nostr/persistent-event-cache.js');
const persistentProfile = await persistentEventCache.getProfile(pubkey);
if (persistentProfile) {
try {
const profile = JSON.parse(persistentProfile.content);
userProfile = {
name: profile.name,
picture: profile.picture
};
loading = false;
// Also update in-memory cache for faster future lookups
eventCache.setProfile(pubkey, persistentProfile);
// Fetch fresh data in background (non-blocking)
nostrClient.fetchEvents([
{
kinds: [0],
authors: [pubkey],
limit: 1
}
]).then(profileEvents => {
if (profileEvents.length > 0) {
try {
const freshProfile = JSON.parse(profileEvents[0].content);
if (freshProfile.name !== profile.name || freshProfile.picture !== profile.picture) {
userProfile = {
name: freshProfile.name,
picture: freshProfile.picture
};
}
} catch {
// Invalid JSON, ignore
}
}
}).catch(() => {
// Ignore background fetch errors
});
return;
} catch {
// Invalid JSON in persistent cache, continue to fetch fresh
}
}
} catch {
// Persistent cache not available, continue to fetch
}
}
// No cache available, fetch from relays
const profileEvents = await nostrClient.fetchEvents([
{
kinds: [0],

83
src/lib/services/nostr/event-cache.ts

@ -4,6 +4,7 @@ @@ -4,6 +4,7 @@
*/
import type { NostrEvent, NostrFilter } from '../../types/nostr.js';
import { KIND } from '../../types/nostr.js';
import logger from '../logger.js';
interface CacheEntry {
@ -283,6 +284,88 @@ export class EventCache { @@ -283,6 +284,88 @@ export class EventCache {
entries: Array.from(this.cache.values()).reduce((sum, entry) => sum + entry.events.length, 0)
};
}
/**
* Process deletion events (NIP-09) and remove deleted events from cache
* @param deletionEvents - Array of kind 5 deletion events
*/
processDeletionEvents(deletionEvents: NostrEvent[]): void {
if (deletionEvents.length === 0) {
return;
}
const deletedEventIds = new Set<string>();
const deletedAddresses = new Set<string>(); // Format: kind:pubkey:d-tag
// Extract deleted event IDs and addresses from deletion events
for (const deletionEvent of deletionEvents) {
if (deletionEvent.kind !== KIND.DELETION_REQUEST) {
continue;
}
// Extract 'e' tags (deleted event IDs)
for (const tag of deletionEvent.tags) {
if (tag[0] === 'e' && tag[1]) {
deletedEventIds.add(tag[1]);
}
// Extract 'a' tags (deleted parameterized replaceable events)
if (tag[0] === 'a' && tag[1]) {
deletedAddresses.add(tag[1]);
}
}
}
if (deletedEventIds.size === 0 && deletedAddresses.size === 0) {
return; // No deletions to process
}
let removedCount = 0;
// Remove events from all cache entries
for (const [key, entry] of this.cache.entries()) {
const originalLength = entry.events.length;
// Filter out deleted events
entry.events = entry.events.filter(event => {
// Check if event ID is deleted
if (deletedEventIds.has(event.id)) {
removedCount++;
return false;
}
// Check if event matches a deleted address (parameterized replaceable)
for (const deletedAddr of deletedAddresses) {
const parts = deletedAddr.split(':');
if (parts.length === 3) {
const [kindStr, pubkey, dTag] = parts;
const kind = parseInt(kindStr, 10);
if (event.kind === kind && event.pubkey === pubkey) {
const eventDTag = event.tags.find(t => t[0] === 'd')?.[1];
if (eventDTag === dTag) {
removedCount++;
return false;
}
}
}
}
return true;
});
// If all events were removed, remove the cache entry
if (entry.events.length === 0) {
this.cache.delete(key);
} else if (entry.events.length !== originalLength) {
// Update timestamp since we modified the entry
entry.timestamp = Date.now();
}
}
if (removedCount > 0) {
logger.debug({ removedCount, deletedEventIds: deletedEventIds.size, deletedAddresses: deletedAddresses.size }, 'Processed deletion events and removed from in-memory cache');
}
}
}
// Singleton instance

17
src/lib/services/nostr/maintainer-service.ts

@ -88,10 +88,18 @@ export class MaintainerService { @@ -88,10 +88,18 @@ export class MaintainerService {
const maintainers: string[] = [currentOwner]; // Current owner is always a maintainer
// Extract maintainers from tags
// Maintainers tag format: ['maintainers', 'pubkey1', 'pubkey2', 'pubkey3', ...]
for (const tag of announcement.tags) {
if (tag[0] === 'maintainers' && tag[1]) {
if (tag[0] === 'maintainers') {
// Iterate through all maintainers in the tag (skip index 0 which is 'maintainers')
for (let i = 1; i < tag.length; i++) {
const maintainerValue = tag[i];
if (!maintainerValue || typeof maintainerValue !== 'string') {
continue;
}
// Maintainers can be npub or hex pubkey
let pubkey = tag[1];
let pubkey = maintainerValue;
try {
// Try to decode if it's an npub
const decoded = nip19.decode(pubkey);
@ -101,11 +109,14 @@ export class MaintainerService { @@ -101,11 +109,14 @@ export class MaintainerService {
} catch {
// Assume it's already a hex pubkey
}
if (pubkey && !maintainers.includes(pubkey)) {
// Add maintainer if it's valid and not already in the list (case-insensitive check)
if (pubkey && !maintainers.some(m => m.toLowerCase() === pubkey.toLowerCase())) {
maintainers.push(pubkey);
}
}
}
}
const result = { owner: currentOwner, maintainers, isPrivate };
this.cache.set(cacheKey, { ...result, timestamp: Date.now() });

240
src/lib/services/nostr/nostr-client.ts

@ -7,6 +7,28 @@ import logger from '../logger.js'; @@ -7,6 +7,28 @@ import logger from '../logger.js';
import { isNIP07Available, getPublicKeyWithNIP07, signEventWithNIP07 } from './nip07-signer.js';
import { shouldUseTor, getTorProxy } from '../../utils/tor.js';
import { eventCache } from './event-cache.js';
import { KIND } from '../../types/nostr.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
// Lazy load persistent cache (only in browser)
let persistentEventCache: typeof import('./persistent-event-cache.js').persistentEventCache | null = null;
async function getPersistentCache() {
if (typeof window === 'undefined') {
return null; // Server-side, no IndexedDB
}
if (!persistentEventCache) {
try {
const module = await import('./persistent-event-cache.js');
persistentEventCache = module.persistentEventCache;
} catch (error) {
logger.debug({ error }, 'Persistent cache not available');
return null;
}
}
return persistentEventCache;
}
// Polyfill WebSocket for Node.js environments (lazy initialization)
// Note: The 'module' import warning in browser builds is expected and harmless.
@ -127,6 +149,7 @@ async function createWebSocketWithTor(url: string): Promise<WebSocket> { @@ -127,6 +149,7 @@ async function createWebSocketWithTor(url: string): Promise<WebSocket> {
export class NostrClient {
private relays: string[] = [];
private authenticatedRelays: Set<string> = new Set();
private processingDeletions: boolean = false; // Guard to prevent recursive deletion processing
constructor(relays: string[]) {
this.relays = relays;
@ -193,13 +216,63 @@ export class NostrClient { @@ -193,13 +216,63 @@ export class NostrClient {
}
async fetchEvents(filters: NostrFilter[]): Promise<NostrEvent[]> {
// Check cache first
const cached = eventCache.get(filters);
if (cached !== null) {
logger.debug({ filters, cachedCount: cached.length }, 'Returning cached events');
return cached;
// Strategy: Check persistent cache first, return immediately if available
// Then fetch from relays in background and merge results
// Skip cache for search queries - search results should always be fresh
const hasSearchQuery = filters.some(f => f.search && f.search.trim().length > 0);
if (!hasSearchQuery) {
// 1. Check persistent cache first (IndexedDB) - only in browser
const persistentCache = await getPersistentCache();
if (persistentCache) {
try {
const cachedEvents = await persistentCache.get(filters);
if (cachedEvents && cachedEvents.length > 0) {
logger.debug({ filters, cachedCount: cachedEvents.length }, 'Returning cached events from IndexedDB');
// Return cached events immediately, but also fetch from relays in background to update cache
this.fetchAndMergeFromRelays(filters, cachedEvents).catch(err => {
logger.debug({ error: err, filters }, 'Background fetch failed, using cached events');
});
return cachedEvents;
}
} catch (error) {
logger.debug({ error, filters }, 'Error reading from persistent cache, falling back');
}
}
// 2. Check in-memory cache as fallback
const memoryCached = eventCache.get(filters);
if (memoryCached !== null && memoryCached.length > 0) {
logger.debug({ filters, cachedCount: memoryCached.length }, 'Returning cached events from memory');
// Also store in persistent cache and fetch from relays in background
if (persistentCache) {
persistentCache.set(filters, memoryCached).catch(err => {
logger.debug({ error: err }, 'Failed to persist cache');
});
}
this.fetchAndMergeFromRelays(filters, memoryCached).catch(err => {
logger.debug({ error: err, filters }, 'Background fetch failed');
});
return memoryCached;
}
} else {
logger.debug({ filters }, 'Skipping cache for search query');
}
// 3. No cache available (or search query), fetch from relays
return this.fetchAndMergeFromRelays(filters, []);
}
/**
* Fetch events from relays and merge with existing events
* Never deletes valid events, only appends/integrates new ones
*/
private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[]): Promise<NostrEvent[]> {
const events: NostrEvent[] = [];
// Fetch from all relays in parallel
@ -212,34 +285,164 @@ export class NostrClient { @@ -212,34 +285,164 @@ export class NostrClient {
}
}
// Deduplicate by event ID
const uniqueEvents = new Map<string, NostrEvent>();
// Merge with existing events - never delete valid events
const eventMap = new Map<string, NostrEvent>();
// Add existing events first
for (const event of existingEvents) {
eventMap.set(event.id, event);
}
// Add/update with new events from relays
// For replaceable events (kind 0, 3, 10002), use latest per pubkey
const replaceableEvents = new Map<string, NostrEvent>(); // pubkey -> latest event
for (const event of events) {
if (!uniqueEvents.has(event.id) || event.created_at > uniqueEvents.get(event.id)!.created_at) {
uniqueEvents.set(event.id, event);
if (REPLACEABLE_KINDS.includes(event.kind)) {
// Replaceable event - only keep latest per pubkey
const existing = replaceableEvents.get(event.pubkey);
if (!existing || event.created_at > existing.created_at) {
replaceableEvents.set(event.pubkey, event);
}
} else {
// Regular event - add if newer or doesn't exist
const existing = eventMap.get(event.id);
if (!existing || event.created_at > existing.created_at) {
eventMap.set(event.id, event);
}
}
}
// Add replaceable events to the map (replacing older versions)
for (const [pubkey, event] of replaceableEvents.entries()) {
// Remove any existing replaceable events for this pubkey
for (const [id, existingEvent] of eventMap.entries()) {
if (existingEvent.pubkey === pubkey && REPLACEABLE_KINDS.includes(existingEvent.kind)) {
eventMap.delete(id);
}
}
eventMap.set(event.id, event);
}
const finalEvents = Array.from(uniqueEvents.values());
const finalEvents = Array.from(eventMap.values());
// Sort by created_at descending
finalEvents.sort((a, b) => b.created_at - a.created_at);
// Get persistent cache once (if available)
const persistentCache = await getPersistentCache();
// For kind 0 (profile) events, also cache individually by pubkey for faster lookups
// Cache in both persistent and in-memory caches
// For kind 0 (profile) events, also cache individually by pubkey
const profileEvents = finalEvents.filter(e => e.kind === 0);
for (const profileEvent of profileEvents) {
eventCache.setProfile(profileEvent.pubkey, profileEvent);
// Also cache in persistent cache if available
if (persistentCache) {
persistentCache.setProfile(profileEvent.pubkey, profileEvent).catch(err => {
logger.debug({ error: err, pubkey: profileEvent.pubkey }, 'Failed to cache profile');
});
}
}
// Cache the results (use longer TTL for successful fetches)
// Cache the merged results (skip cache for search queries)
const hasSearchQuery = filters.some(f => f.search && f.search.trim().length > 0);
if (!hasSearchQuery) {
if (finalEvents.length > 0 || results.some(r => r.status === 'fulfilled')) {
// Cache successful fetches for 5 minutes, empty results for 1 minute
// Profile events get longer TTL (handled in eventCache.set)
const ttl = finalEvents.length > 0 ? 5 * 60 * 1000 : 60 * 1000;
// Update in-memory cache
eventCache.set(filters, finalEvents, ttl);
logger.debug({ filters, eventCount: finalEvents.length, ttl, profileEvents: profileEvents.length }, 'Cached events');
// Update persistent cache (async, don't wait) - only in browser
if (persistentCache) {
persistentCache.set(filters, finalEvents, ttl).catch(err => {
logger.debug({ error: err, filters }, 'Failed to update persistent cache');
});
}
logger.debug({
filters,
eventCount: finalEvents.length,
existingCount: existingEvents.length,
newCount: events.length,
mergedCount: finalEvents.length,
ttl,
profileEvents: profileEvents.length
}, 'Merged and cached events');
}
} else {
logger.debug({ filters }, 'Skipping cache for search query results');
}
// Process deletion events in the background (non-blocking)
// Fetch recent deletion events and remove deleted events from cache
this.processDeletionsInBackground().catch(err => {
logger.debug({ error: err }, 'Error processing deletions in background');
});
return finalEvents;
}
/**
* Process deletion events in the background
* Fetches recent deletion events and removes deleted events from both caches
*/
private async processDeletionsInBackground(): Promise<void> {
if (typeof window === 'undefined' || this.processingDeletions) {
return; // Only run in browser, and prevent recursive calls
}
this.processingDeletions = true;
try {
// Fetch recent deletion events (last 24 hours)
// Use fetchFromRelay directly to avoid triggering another deletion processing cycle
const since = Math.floor((Date.now() - 24 * 60 * 60 * 1000) / 1000);
const events: NostrEvent[] = [];
// Fetch from all relays in parallel, bypassing cache to avoid recursion
const promises = this.relays.map(relay => this.fetchFromRelay(relay, [{
kinds: [KIND.DELETION_REQUEST],
since,
limit: 100
}]));
const results = await Promise.allSettled(promises);
for (const result of results) {
if (result.status === 'fulfilled') {
events.push(...result.value);
}
}
// Deduplicate deletion events by ID
const uniqueDeletionEvents = new Map<string, NostrEvent>();
for (const event of events) {
if (!uniqueDeletionEvents.has(event.id) || event.created_at > uniqueDeletionEvents.get(event.id)!.created_at) {
uniqueDeletionEvents.set(event.id, event);
}
}
const deletionEvents = Array.from(uniqueDeletionEvents.values());
if (deletionEvents.length > 0) {
// Process deletions in in-memory cache
eventCache.processDeletionEvents(deletionEvents);
// Process deletions in persistent cache
const persistentCache = await getPersistentCache();
if (persistentCache && typeof persistentCache.processDeletionEvents === 'function') {
await persistentCache.processDeletionEvents(deletionEvents);
}
}
} catch (error) {
logger.debug({ error }, 'Error processing deletions in background');
} finally {
this.processingDeletions = false;
}
}
private async fetchFromRelay(relay: string, filters: NostrFilter[]): Promise<NostrEvent[]> {
// Ensure WebSocket polyfill is initialized
await initializeWebSocketPolyfill();
@ -397,6 +600,15 @@ export class NostrClient { @@ -397,6 +600,15 @@ export class NostrClient {
// This ensures fresh data on next fetch
if (success.length > 0) {
eventCache.invalidatePubkey(event.pubkey);
// Also invalidate persistent cache
const persistentCache = await getPersistentCache();
if (persistentCache) {
persistentCache.invalidatePubkey(event.pubkey).catch(err => {
logger.debug({ error: err, pubkey: event.pubkey }, 'Failed to invalidate persistent cache');
});
}
logger.debug({ eventId: event.id, pubkey: event.pubkey }, 'Invalidated cache after event publish');
}

978
src/lib/services/nostr/persistent-event-cache.ts

@ -0,0 +1,978 @@ @@ -0,0 +1,978 @@
/**
* Persistent event cache using IndexedDB for client-side storage
* Provides offline access and reduces relay load
*
* Strategy:
* - Client-side only (IndexedDB) - events are immutable and user-specific
* - Check cache first, return immediately if available
* - Fetch from relays in background and merge results
* - Never delete valid events, only append/integrate new ones
* - Replaceable events (kind 0, 3, 10002) use latest version per pubkey
*/
import type { NostrEvent, NostrFilter } from '../../types/nostr.js';
import { KIND } from '../../types/nostr.js';
import logger from '../logger.js';
import type { NostrClient } from './nostr-client.js';
const DB_NAME = 'gitrepublic_events';
const DB_VERSION = 1;
const STORE_EVENTS = 'events';
const STORE_FILTERS = 'filters';
const STORE_PROFILES = 'profiles'; // Optimized storage for kind 0 events
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
interface CachedEvent {
event: NostrEvent;
cachedAt: number;
filterKey?: string; // Which filter(s) this event matches
}
interface FilterCacheEntry {
filterKey: string;
eventIds: string[];
cachedAt: number;
ttl: number;
}
/**
* Generate a deterministic cache key from a filter
*/
function generateFilterKey(filter: NostrFilter): string {
const sortedFilter = Object.keys(filter)
.sort()
.reduce((acc, key) => {
const value = filter[key as keyof NostrFilter];
if (value !== undefined) {
if (Array.isArray(value)) {
acc[key] = [...value].sort();
} else {
acc[key] = value;
}
}
return acc;
}, {} as Record<string, unknown>);
return JSON.stringify(sortedFilter);
}
/**
* Generate cache key for multiple filters
*/
function generateMultiFilterKey(filters: NostrFilter[]): string {
const keys = filters.map(f => generateFilterKey(f)).sort();
return keys.join('|');
}
/**
* Check if an event matches a filter
*/
function eventMatchesFilter(event: NostrEvent, filter: NostrFilter): boolean {
// Check kind
if (filter.kinds && !filter.kinds.includes(event.kind)) {
return false;
}
// Check authors
if (filter.authors && filter.authors.length > 0) {
if (!filter.authors.includes(event.pubkey)) {
return false;
}
}
// Check IDs
if (filter.ids && filter.ids.length > 0) {
if (!filter.ids.includes(event.id)) {
return false;
}
}
// Check #d tag (for parameterized replaceable events)
if (filter['#d'] && filter['#d'].length > 0) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1];
if (!dTag || !filter['#d'].includes(dTag)) {
return false;
}
}
// Check #a tag (for parameterized replaceable events)
if (filter['#a'] && filter['#a'].length > 0) {
const aTag = event.tags.find(t => t[0] === 'a')?.[1];
if (!aTag || !filter['#a'].includes(aTag)) {
return false;
}
}
// Check #e tag
if (filter['#e'] && filter['#e'].length > 0) {
const eTags = event.tags.filter(t => t[0] === 'e').map(t => t[1]);
if (!eTags.some(e => filter['#e']!.includes(e))) {
return false;
}
}
// Check #p tag
if (filter['#p'] && filter['#p'].length > 0) {
const pTags = event.tags.filter(t => t[0] === 'p').map(t => t[1]);
if (!pTags.some(p => filter['#p']!.includes(p))) {
return false;
}
}
// Check created_at range
if (filter.since && event.created_at < filter.since) {
return false;
}
if (filter.until && event.created_at > filter.until) {
return false;
}
return true;
}
/**
* Check if an event matches any of the filters
*/
function eventMatchesAnyFilter(event: NostrEvent, filters: NostrFilter[]): boolean {
return filters.some(filter => eventMatchesFilter(event, filter));
}
export class PersistentEventCache {
private db: IDBDatabase | null = null;
private initPromise: Promise<void> | null = null;
private defaultTTL: number = 5 * 60 * 1000; // 5 minutes
private profileTTL: number = 30 * 60 * 1000; // 30 minutes for profiles
private maxCacheAge: number = 7 * 24 * 60 * 60 * 1000; // 7 days max age
constructor() {
this.init();
}
/**
* Initialize IndexedDB
*/
private async init(): Promise<void> {
if (this.initPromise) {
return this.initPromise;
}
if (typeof window === 'undefined' || !window.indexedDB) {
logger.warn('IndexedDB not available, using in-memory cache only');
return;
}
this.initPromise = new Promise((resolve, reject) => {
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => {
logger.error('Failed to open IndexedDB');
reject(new Error('Failed to open IndexedDB'));
};
request.onsuccess = () => {
this.db = request.result;
resolve();
};
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
// Events store - stores all events by ID
if (!db.objectStoreNames.contains(STORE_EVENTS)) {
const eventStore = db.createObjectStore(STORE_EVENTS, { keyPath: 'id' });
eventStore.createIndex('pubkey', 'event.pubkey', { unique: false });
eventStore.createIndex('kind', 'event.kind', { unique: false });
eventStore.createIndex('created_at', 'event.created_at', { unique: false });
eventStore.createIndex('cachedAt', 'cachedAt', { unique: false });
}
// Filter cache store - maps filter keys to event IDs
if (!db.objectStoreNames.contains(STORE_FILTERS)) {
const filterStore = db.createObjectStore(STORE_FILTERS, { keyPath: 'filterKey' });
filterStore.createIndex('cachedAt', 'cachedAt', { unique: false });
}
// Profiles store - optimized for kind 0 events (latest per pubkey)
if (!db.objectStoreNames.contains(STORE_PROFILES)) {
db.createObjectStore(STORE_PROFILES, { keyPath: 'pubkey' });
}
};
});
return this.initPromise;
}
/**
* Get events from cache that match the filters
*/
async get(filters: NostrFilter[]): Promise<NostrEvent[] | null> {
await this.init();
if (!this.db) {
return null;
}
try {
const filterKey = generateMultiFilterKey(filters);
// Check filter cache first
const filterEntry = await this.getFilterEntry(filterKey);
if (!filterEntry) {
return null;
}
// Check if filter cache is expired
const now = Date.now();
if (now - filterEntry.cachedAt > filterEntry.ttl) {
// Expired, but we can still return events if they exist
// Don't delete, just mark as stale
}
// Get events from events store
const events: NostrEvent[] = [];
const eventStore = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS);
for (const eventId of filterEntry.eventIds) {
const request = eventStore.get(eventId);
const cached = await new Promise<CachedEvent | undefined>((resolve) => {
request.onsuccess = () => resolve(request.result);
request.onerror = () => resolve(undefined);
});
if (cached) {
// Check if event is too old (beyond max cache age)
if (now - cached.cachedAt < this.maxCacheAge) {
// Verify event still matches filters (in case filters changed)
if (eventMatchesAnyFilter(cached.event, filters)) {
events.push(cached.event);
}
}
}
}
// For replaceable events, ensure we only return the latest per pubkey
const replaceableEvents = new Map<string, NostrEvent>();
const regularEvents: NostrEvent[] = [];
for (const event of events) {
if (REPLACEABLE_KINDS.includes(event.kind)) {
const existing = replaceableEvents.get(event.pubkey);
if (!existing || event.created_at > existing.created_at) {
replaceableEvents.set(event.pubkey, event);
}
} else {
regularEvents.push(event);
}
}
const result = [...Array.from(replaceableEvents.values()), ...regularEvents];
// Sort by created_at descending
result.sort((a, b) => b.created_at - a.created_at);
return result.length > 0 ? result : null;
} catch (error) {
logger.error({ error, filters }, 'Error reading from event cache');
return null;
}
}
/**
* Get filter cache entry
*/
private async getFilterEntry(filterKey: string): Promise<FilterCacheEntry | null> {
if (!this.db) return null;
try {
const store = this.db.transaction([STORE_FILTERS], 'readonly').objectStore(STORE_FILTERS);
const request = store.get(filterKey);
return new Promise((resolve) => {
request.onsuccess = () => resolve(request.result || null);
request.onerror = () => resolve(null);
});
} catch {
return null;
}
}
/**
* Store events in cache, merging with existing events
*/
async set(filters: NostrFilter[], events: NostrEvent[], ttl?: number): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
const filterKey = generateMultiFilterKey(filters);
const now = Date.now();
const cacheTTL = ttl || this.defaultTTL;
// Determine if this is a profile query
const isProfileQuery = filters.some(f =>
f.kinds?.includes(0) && f.authors && f.authors.length > 0
);
// Use longer TTL for profile events
const effectiveTTL = isProfileQuery ? this.profileTTL : cacheTTL;
// Get existing filter entry
const existingEntry = await this.getFilterEntry(filterKey);
const existingEventIds = new Set(existingEntry?.eventIds || []);
// Store/update events
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
const newEventIds: string[] = [];
for (const event of events) {
// For replaceable events, check if we have a newer version for this pubkey
if (REPLACEABLE_KINDS.includes(event.kind)) {
// Check if we already have a newer replaceable event for this pubkey
const existingProfile = await this.getProfile(event.pubkey);
if (existingProfile && existingProfile.kind === event.kind && existingProfile.created_at >= event.created_at) {
// Existing event is newer or same, skip
if (existingEventIds.has(existingProfile.id)) {
newEventIds.push(existingProfile.id);
}
continue;
}
} else {
// For non-replaceable events, check if we already have this event
if (existingEventIds.has(event.id)) {
newEventIds.push(event.id);
continue;
}
}
// Store the event
const cached: CachedEvent = {
event,
cachedAt: now,
filterKey
};
await new Promise<void>((resolve, reject) => {
const request = eventStore.put(cached);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
newEventIds.push(event.id);
// Also store in profiles store if it's a profile event
if (event.kind === 0) {
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES);
const existingProfile = await new Promise<CachedEvent | undefined>((resolve) => {
const req = profileStore.get(event.pubkey);
req.onsuccess = () => resolve(req.result);
req.onerror = () => resolve(undefined);
});
if (!existingProfile || event.created_at > existingProfile.event.created_at) {
await new Promise<void>((resolve, reject) => {
const req = profileStore.put({ pubkey: event.pubkey, ...cached });
req.onsuccess = () => resolve();
req.onerror = () => reject(req.error);
});
}
}
}
// Merge with existing event IDs (don't delete valid events)
const mergedEventIds = Array.from(new Set([...existingEntry?.eventIds || [], ...newEventIds]));
// Update filter cache entry
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS);
const filterEntry: FilterCacheEntry = {
filterKey,
eventIds: mergedEventIds,
cachedAt: now,
ttl: effectiveTTL
};
await new Promise<void>((resolve, reject) => {
const request = filterStore.put(filterEntry);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
logger.debug({
filterKey,
eventCount: events.length,
mergedCount: mergedEventIds.length,
ttl: effectiveTTL
}, 'Cached events in IndexedDB');
} catch (error) {
logger.error({ error, filters }, 'Error writing to event cache');
}
}
/**
* Get a single event by ID
*/
private async getEventById(eventId: string): Promise<NostrEvent | null> {
if (!this.db) return null;
try {
const store = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS);
const request = store.get(eventId);
return new Promise((resolve) => {
request.onsuccess = () => {
const cached = request.result as CachedEvent | undefined;
resolve(cached?.event || null);
};
request.onerror = () => resolve(null);
});
} catch {
return null;
}
}
/**
* Get profile event (kind 0) for a pubkey
*/
async getProfile(pubkey: string): Promise<NostrEvent | null> {
await this.init();
if (!this.db) {
return null;
}
try {
const store = this.db.transaction([STORE_PROFILES], 'readonly').objectStore(STORE_PROFILES);
const request = store.get(pubkey);
return new Promise((resolve) => {
request.onsuccess = () => {
const cached = request.result as CachedEvent | undefined;
if (cached) {
// Check if not too old
const now = Date.now();
if (now - cached.cachedAt < this.maxCacheAge) {
resolve(cached.event);
} else {
resolve(null);
}
} else {
resolve(null);
}
};
request.onerror = () => resolve(null);
});
} catch (error) {
logger.error({ error, pubkey }, 'Error reading profile from cache');
return null;
}
}
/**
* Set profile event (kind 0)
*/
async setProfile(pubkey: string, event: NostrEvent): Promise<void> {
await this.init();
if (!this.db || event.kind !== 0) {
return;
}
try {
// Check if we have a newer profile
const existing = await this.getProfile(pubkey);
if (existing && existing.created_at >= event.created_at) {
return; // Existing is newer or same
}
const cached: CachedEvent = {
event,
cachedAt: Date.now()
};
const store = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES);
await new Promise<void>((resolve, reject) => {
const request = store.put({ pubkey, ...cached });
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
// Also store in events store
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
await new Promise<void>((resolve, reject) => {
const request = eventStore.put(cached);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
} catch (error) {
logger.error({ error, pubkey }, 'Error writing profile to cache');
}
}
/**
* Invalidate cache for a specific pubkey
*/
async invalidatePubkey(pubkey: string): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
// Remove from profiles store
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES);
await new Promise<void>((resolve, reject) => {
const request = profileStore.delete(pubkey);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
// Remove events from events store
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
const index = eventStore.index('pubkey');
const request = index.openKeyCursor(IDBKeyRange.only(pubkey));
await new Promise<void>((resolve) => {
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
eventStore.delete(cursor.primaryKey);
cursor.continue();
} else {
resolve();
}
};
request.onerror = () => resolve();
});
// Invalidate filter entries that reference these events
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS);
const filterRequest = filterStore.openCursor();
await new Promise<void>((resolve) => {
filterRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const entry = cursor.value as FilterCacheEntry;
// Remove event IDs that match this pubkey
// We'd need to check each event, but for simplicity, just invalidate the filter
filterStore.delete(cursor.primaryKey);
cursor.continue();
} else {
resolve();
}
};
filterRequest.onerror = () => resolve();
});
} catch (error) {
logger.error({ error, pubkey }, 'Error invalidating pubkey cache');
}
}
/**
* Invalidate cache for specific filters
*/
async invalidate(filters: NostrFilter[]): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
const filterKey = generateMultiFilterKey(filters);
const store = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS);
await new Promise<void>((resolve, reject) => {
const request = store.delete(filterKey);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
} catch (error) {
logger.error({ error, filters }, 'Error invalidating filter cache');
}
}
/**
* Clear all cache
*/
async clear(): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
const stores = [STORE_EVENTS, STORE_FILTERS, STORE_PROFILES];
for (const storeName of stores) {
const store = this.db.transaction([storeName], 'readwrite').objectStore(storeName);
await new Promise<void>((resolve, reject) => {
const request = store.clear();
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
}
logger.debug('Cleared all event cache');
} catch (error) {
logger.error({ error }, 'Error clearing event cache');
}
}
/**
* Cleanup old entries
*/
async cleanup(): Promise<void> {
await this.init();
if (!this.db) {
return;
}
try {
const now = Date.now();
let cleaned = 0;
// Clean up expired filter entries
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS);
const filterRequest = filterStore.openCursor();
await new Promise<void>((resolve) => {
filterRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const entry = cursor.value as FilterCacheEntry;
if (now - entry.cachedAt > entry.ttl) {
filterStore.delete(cursor.primaryKey);
cleaned++;
}
cursor.continue();
} else {
resolve();
}
};
filterRequest.onerror = () => resolve();
});
// Clean up old events (beyond max cache age)
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
const eventRequest = eventStore.openCursor();
await new Promise<void>((resolve) => {
eventRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const cached = cursor.value as CachedEvent;
if (now - cached.cachedAt > this.maxCacheAge) {
eventStore.delete(cursor.primaryKey);
cleaned++;
}
cursor.continue();
} else {
resolve();
}
};
eventRequest.onerror = () => resolve();
});
// Clean up old profiles
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES);
const profileRequest = profileStore.openCursor();
await new Promise<void>((resolve) => {
profileRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const cached = cursor.value as CachedEvent;
if (now - cached.cachedAt > this.maxCacheAge) {
profileStore.delete(cursor.primaryKey);
cleaned++;
}
cursor.continue();
} else {
resolve();
}
};
profileRequest.onerror = () => resolve();
});
if (cleaned > 0) {
logger.debug({ cleaned }, 'Cleaned up old cache entries');
}
} catch (error) {
logger.error({ error }, 'Error during cache cleanup');
}
}
/**
* Get cache statistics
*/
async getStats(): Promise<{ events: number; filters: number; profiles: number }> {
await this.init();
if (!this.db) {
return { events: 0, filters: 0, profiles: 0 };
}
try {
const eventStore = this.db.transaction([STORE_EVENTS], 'readonly').objectStore(STORE_EVENTS);
const filterStore = this.db.transaction([STORE_FILTERS], 'readonly').objectStore(STORE_FILTERS);
const profileStore = this.db.transaction([STORE_PROFILES], 'readonly').objectStore(STORE_PROFILES);
const [eventCount, filterCount, profileCount] = await Promise.all([
new Promise<number>((resolve) => {
const request = eventStore.count();
request.onsuccess = () => resolve(request.result);
request.onerror = () => resolve(0);
}),
new Promise<number>((resolve) => {
const request = filterStore.count();
request.onsuccess = () => resolve(request.result);
request.onerror = () => resolve(0);
}),
new Promise<number>((resolve) => {
const request = profileStore.count();
request.onsuccess = () => resolve(request.result);
request.onerror = () => resolve(0);
})
]);
return { events: eventCount, filters: filterCount, profiles: profileCount };
} catch (error) {
logger.error({ error }, 'Error getting cache stats');
return { events: 0, filters: 0, profiles: 0 };
}
}
/**
* Process deletion events (NIP-09) and remove deleted events from cache
* @param deletionEvents - Array of kind 5 deletion events
*/
async processDeletionEvents(deletionEvents: NostrEvent[]): Promise<void> {
await this.init();
if (!this.db || deletionEvents.length === 0) {
return;
}
try {
const deletedEventIds = new Set<string>();
const deletedAddresses = new Set<string>(); // Format: kind:pubkey:d-tag
// Extract deleted event IDs and addresses from deletion events
for (const deletionEvent of deletionEvents) {
if (deletionEvent.kind !== KIND.DELETION_REQUEST) {
continue;
}
// Extract 'e' tags (deleted event IDs)
for (const tag of deletionEvent.tags) {
if (tag[0] === 'e' && tag[1]) {
deletedEventIds.add(tag[1]);
}
// Extract 'a' tags (deleted parameterized replaceable events)
if (tag[0] === 'a' && tag[1]) {
deletedAddresses.add(tag[1]);
}
}
}
if (deletedEventIds.size === 0 && deletedAddresses.size === 0) {
return; // No deletions to process
}
let removedCount = 0;
// Remove events by ID
if (deletedEventIds.size > 0) {
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
for (const eventId of deletedEventIds) {
try {
await new Promise<void>((resolve, reject) => {
const request = eventStore.delete(eventId);
request.onsuccess = () => resolve();
request.onerror = () => reject(request.error);
});
removedCount++;
} catch {
// Event might not exist in cache, ignore
}
}
}
// Remove events by address (parameterized replaceable events)
if (deletedAddresses.size > 0) {
const eventStore = this.db.transaction([STORE_EVENTS], 'readwrite').objectStore(STORE_EVENTS);
const cursorRequest = eventStore.openCursor();
await new Promise<void>((resolve) => {
cursorRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const cached = cursor.value as CachedEvent;
const event = cached.event;
// Check if this event matches any deleted address
// Format: kind:pubkey:d-tag
for (const deletedAddr of deletedAddresses) {
const parts = deletedAddr.split(':');
if (parts.length === 3) {
const [kindStr, pubkey, dTag] = parts;
const kind = parseInt(kindStr, 10);
if (event.kind === kind && event.pubkey === pubkey) {
const eventDTag = event.tags.find(t => t[0] === 'd')?.[1];
if (eventDTag === dTag) {
// This event matches the deleted address
cursor.delete();
removedCount++;
break;
}
}
}
}
cursor.continue();
} else {
resolve();
}
};
cursorRequest.onerror = () => resolve();
});
}
// Remove deleted event IDs from filter cache entries
if (deletedEventIds.size > 0 || deletedAddresses.size > 0) {
const filterStore = this.db.transaction([STORE_FILTERS], 'readwrite').objectStore(STORE_FILTERS);
const filterRequest = filterStore.openCursor();
await new Promise<void>((resolve) => {
filterRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const entry = cursor.value as FilterCacheEntry;
const originalLength = entry.eventIds.length;
// Remove deleted event IDs
entry.eventIds = entry.eventIds.filter(id => !deletedEventIds.has(id));
// If we removed any IDs, update the entry
if (entry.eventIds.length !== originalLength) {
cursor.update(entry);
}
cursor.continue();
} else {
resolve();
}
};
filterRequest.onerror = () => resolve();
});
}
// Also remove from profiles store if applicable
if (deletedEventIds.size > 0) {
const profileStore = this.db.transaction([STORE_PROFILES], 'readwrite').objectStore(STORE_PROFILES);
const profileRequest = profileStore.openCursor();
await new Promise<void>((resolve) => {
profileRequest.onsuccess = (event) => {
const cursor = (event.target as IDBRequest<IDBCursorWithValue>).result;
if (cursor) {
const cached = cursor.value as CachedEvent;
if (deletedEventIds.has(cached.event.id)) {
cursor.delete();
removedCount++;
}
cursor.continue();
} else {
resolve();
}
};
profileRequest.onerror = () => resolve();
});
}
if (removedCount > 0) {
logger.debug({ removedCount, deletedEventIds: deletedEventIds.size, deletedAddresses: deletedAddresses.size }, 'Processed deletion events and removed from cache');
}
} catch (error) {
logger.error({ error, deletionEvents: deletionEvents.length }, 'Error processing deletion events');
}
}
/**
* Fetch deletion events from relays and process them
* @param nostrClient - NostrClient instance to fetch events
* @param userPubkeys - Array of user pubkeys to fetch deletions for (optional)
*/
async fetchAndProcessDeletions(nostrClient: NostrClient, userPubkeys: string[] = []): Promise<void> {
try {
// Fetch deletion events (kind 5) for the specified users
const filters: NostrFilter[] = [];
if (userPubkeys.length > 0) {
// Fetch deletions for specific users
filters.push({
kinds: [KIND.DELETION_REQUEST],
authors: userPubkeys,
limit: 100
});
} else {
// If no specific users, we can't fetch all deletions (would be too many)
// In this case, we'll just process any deletions that are already in cache
// or skip this call
logger.debug('No user pubkeys provided, skipping deletion fetch');
return;
}
const deletionEvents = await nostrClient.fetchEvents(filters);
if (deletionEvents.length > 0) {
await this.processDeletionEvents(deletionEvents);
}
} catch (error) {
logger.error({ error, userPubkeys: userPubkeys.length }, 'Error fetching and processing deletion events');
throw error;
}
}
}
// Singleton instance
export const persistentEventCache = new PersistentEventCache();
// Run cleanup every hour and process deletions every 15 minutes
if (typeof window !== 'undefined') {
setInterval(() => {
persistentEventCache.cleanup().catch(err => {
logger.error({ error: err }, 'Error during scheduled cache cleanup');
});
}, 60 * 60 * 1000); // 1 hour
// Process deletion events in the background every 15 minutes
// This will be triggered when a NostrClient is available
let deletionProcessingInterval: ReturnType<typeof setInterval> | null = null;
// Start deletion processing when a client becomes available
// We'll set this up in the NostrClient or a service that has access to it
(window as any).__startDeletionProcessing = async (nostrClient: any, userPubkeys: string[] = []) => {
if (deletionProcessingInterval) {
clearInterval(deletionProcessingInterval);
}
// Process immediately, then every 15 minutes
persistentEventCache.fetchAndProcessDeletions(nostrClient, userPubkeys).catch((err: unknown) => {
logger.debug({ error: err }, 'Error during initial deletion processing');
});
deletionProcessingInterval = setInterval(() => {
persistentEventCache.fetchAndProcessDeletions(nostrClient, userPubkeys).catch((err: unknown) => {
logger.debug({ error: err }, 'Error during scheduled deletion processing');
});
}, 15 * 60 * 1000); // 15 minutes
};
}

99
src/lib/utils/pubkey-resolver.ts

@ -0,0 +1,99 @@ @@ -0,0 +1,99 @@
/**
* Utility to resolve various pubkey formats to hex pubkey
* Supports: hex pubkey, npub, nprofile, NIP-05
*/
import { nip19 } from 'nostr-tools';
import logger from '../services/logger.js';
/**
* Resolve a pubkey from various formats to hex
* Supports:
* - Hex pubkey (64 hex characters)
* - npub (NIP-19 encoded pubkey)
* - nprofile (NIP-19 encoded profile with relays)
* - NIP-05 (e.g., user@domain.com)
*/
export async function resolvePubkey(input: string): Promise<string | null> {
if (!input || !input.trim()) {
return null;
}
const trimmed = input.trim();
// Check if it's already a hex pubkey (64 hex characters)
if (/^[0-9a-f]{64}$/i.test(trimmed)) {
return trimmed.toLowerCase();
}
// Check if it's a NIP-19 encoded value (npub or nprofile)
if (trimmed.startsWith('npub') || trimmed.startsWith('nprofile')) {
try {
const decoded = nip19.decode(trimmed);
if (decoded.type === 'npub') {
return decoded.data as string;
} else if (decoded.type === 'nprofile') {
// nprofile contains pubkey in data.pubkey
return (decoded.data as { pubkey: string }).pubkey;
}
} catch (error) {
logger.debug({ error, input: trimmed }, 'Failed to decode NIP-19 value');
return null;
}
}
// Check if it's a NIP-05 identifier (e.g., user@domain.com)
if (trimmed.includes('@')) {
try {
const pubkey = await resolveNIP05(trimmed);
return pubkey;
} catch (error) {
logger.debug({ error, input: trimmed }, 'Failed to resolve NIP-05');
return null;
}
}
return null;
}
/**
* Resolve NIP-05 identifier to hex pubkey
* Fetches from https://<domain>/.well-known/nostr.json?name=<local-part>
*/
async function resolveNIP05(nip05: string): Promise<string | null> {
const [localPart, domain] = nip05.split('@');
if (!localPart || !domain) {
return null;
}
try {
// Fetch from well-known endpoint
const url = `https://${domain}/.well-known/nostr.json?name=${encodeURIComponent(localPart)}`;
const response = await fetch(url, {
headers: {
'Accept': 'application/json'
}
});
if (!response.ok) {
return null;
}
const data = await response.json();
// NIP-05 format: { "names": { "<local-part>": "<hex-pubkey>" } }
if (data.names && data.names[localPart]) {
const pubkey = data.names[localPart];
// Validate it's a hex pubkey
if (/^[0-9a-f]{64}$/i.test(pubkey)) {
return pubkey.toLowerCase();
}
}
return null;
} catch (error) {
logger.debug({ error, nip05 }, 'Error fetching NIP-05');
return null;
}
}

588
src/routes/api/search/+server.ts

@ -6,23 +6,21 @@ import { json, error } from '@sveltejs/kit'; @@ -6,23 +6,21 @@ import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS, combineRelays } from '$lib/config.js';
import { KIND } from '$lib/types/nostr.js';
import { FileManager } from '$lib/services/git/file-manager.js';
import type { NostrEvent, NostrFilter } from '$lib/types/nostr.js';
import { nip19 } from 'nostr-tools';
import { existsSync } from 'fs';
import { join } from 'path';
import { handleApiError, handleValidationError } from '$lib/utils/error-handler.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { resolvePubkey } from '$lib/utils/pubkey-resolver.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import logger from '$lib/services/logger.js';
const repoRoot = process.env.GIT_REPO_ROOT || '/repos';
const fileManager = new FileManager(repoRoot);
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
// MaintainerService will be created with all available relays per request
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const type = event.url.searchParams.get('type') || 'repos'; // repos, code, or all
const limit = parseInt(event.url.searchParams.get('limit') || '20', 10);
// Extract user pubkey for privacy filtering
@ -38,31 +36,321 @@ export const GET: RequestHandler = async (event) => { @@ -38,31 +36,321 @@ export const GET: RequestHandler = async (event) => {
}
try {
// Use search relays which are more likely to support NIP-50
const nostrClient = new NostrClient(DEFAULT_NOSTR_SEARCH_RELAYS);
// Collect all available relays
const allRelays = new Set<string>();
// Add default search relays
DEFAULT_NOSTR_SEARCH_RELAYS.forEach(relay => allRelays.add(relay));
DEFAULT_NOSTR_RELAYS.forEach(relay => allRelays.add(relay));
// Add user's relays if logged in
if (userPubkey) {
try {
// Create a temporary client to fetch user relays
const tempClient = new NostrClient(Array.from(allRelays));
const userRelays = await getUserRelays(userPubkey, tempClient);
userRelays.inbox.forEach(relay => allRelays.add(relay));
userRelays.outbox.forEach(relay => allRelays.add(relay));
} catch (err) {
logger.debug({ error: err, userPubkey }, 'Failed to fetch user relays for search');
}
}
const relays = Array.from(allRelays);
logger.debug({ relayCount: relays.length }, 'Using relays for search');
// Create client with all available relays
const nostrClient = new NostrClient(relays);
// Create maintainer service with all available relays
const maintainerService = new MaintainerService(relays);
const results: {
repos: Array<{ id: string; name: string; description: string; owner: string; npub: string }>;
code: Array<{ repo: string; npub: string; file: string; matches: number }>;
repos: Array<{
id: string;
name: string;
description: string;
owner: string;
npub: string;
maintainers?: Array<{ pubkey: string; isOwner: boolean }>;
}>;
} = {
repos: [],
code: []
repos: []
};
// Search repositories using NIP-50
if (type === 'repos' || type === 'all') {
let events: Array<{ id: string; pubkey: string; tags: string[][]; content: string; created_at: number }> = [];
// Check if query is a URL (clone URL search)
const isUrl = (str: string): boolean => {
const trimmed = str.trim();
return trimmed.startsWith('http://') ||
trimmed.startsWith('https://') ||
trimmed.startsWith('git://') ||
trimmed.startsWith('ssh://') ||
trimmed.includes('.git') ||
(trimmed.includes('://') && trimmed.includes('/'));
};
try {
// Try NIP-50 search first (relays that support it will return results sorted by relevance)
events = await nostrClient.fetchEvents([
const queryIsUrl = isUrl(query.trim());
// Check if query is a pubkey (hex, npub, nprofile, or NIP-05)
const resolvedPubkey = await resolvePubkey(query.trim());
// Helper function to fetch events with cache-first strategy
async function fetchEventsWithCache(filters: NostrFilter[]): Promise<NostrEvent[]> {
// Check cache first
const cachedEvents = eventCache.get(filters);
if (cachedEvents && cachedEvents.length > 0) {
logger.debug({ filters, cachedCount: cachedEvents.length }, 'Returning cached events for search');
// Return cached events immediately, fetch from relays in background
nostrClient.fetchEvents(filters).then(freshEvents => {
// Merge fresh events with cached ones (deduplicate by event ID)
const eventMap = new Map<string, NostrEvent>();
cachedEvents.forEach(e => eventMap.set(e.id, e));
freshEvents.forEach(e => {
const existing = eventMap.get(e.id);
if (!existing || e.created_at > existing.created_at) {
eventMap.set(e.id, e);
}
});
const mergedEvents = Array.from(eventMap.values());
// Update cache with merged results
eventCache.set(filters, mergedEvents);
logger.debug({ filters, mergedCount: mergedEvents.length }, 'Updated cache with fresh events');
}).catch(err => {
logger.debug({ error: err, filters }, 'Background fetch failed, using cached events');
});
return cachedEvents;
}
// No cache, fetch from relays
const freshEvents = await nostrClient.fetchEvents(filters);
// Cache the results
if (freshEvents.length > 0) {
eventCache.set(filters, freshEvents);
}
return freshEvents;
}
let events: NostrEvent[] = [];
if (queryIsUrl) {
// Search for repos by clone URL
logger.debug({ query: query.trim() }, 'Searching for repos by clone URL');
// Normalize the URL for matching (remove trailing .git, trailing slash, etc.)
const normalizeUrl = (url: string): string => {
let normalized = url.trim().toLowerCase();
// Remove trailing .git
if (normalized.endsWith('.git')) {
normalized = normalized.slice(0, -4);
}
// Remove trailing slash
normalized = normalized.replace(/\/$/, '');
// Remove protocol for more flexible matching
normalized = normalized.replace(/^(https?|git|ssh):\/\//, '');
return normalized;
};
const normalizedQuery = normalizeUrl(query.trim());
// Fetch all repos with cache-first strategy
const allRepos = await fetchEventsWithCache([
{
kinds: [KIND.REPO_ANNOUNCEMENT],
search: query, // NIP-50: Search field
limit: limit * 2 // Get more results to account for different relay implementations
limit: 1000 // Get more to find URL matches
}
]);
// Filter for repos that have a matching clone URL
events = allRepos.filter(event => {
for (const tag of event.tags) {
if (tag[0] === 'clone') {
for (let i = 1; i < tag.length; i++) {
const cloneUrl = tag[i];
if (!cloneUrl || typeof cloneUrl !== 'string') continue;
const normalizedCloneUrl = normalizeUrl(cloneUrl);
// Check if the normalized query matches the normalized clone URL
// Support partial matches (e.g., "example.com/repo" matches "https://example.com/user/repo.git")
if (normalizedCloneUrl.includes(normalizedQuery) || normalizedQuery.includes(normalizedCloneUrl)) {
return true;
}
}
}
}
return false;
});
} else if (resolvedPubkey) {
// Search for repos by owner or maintainer pubkey
logger.debug({ query: query.trim(), resolvedPubkey }, 'Searching for repos by pubkey');
// Fetch repos where this pubkey is the owner (cache-first)
const ownerEvents = await fetchEventsWithCache([
{
kinds: [KIND.REPO_ANNOUNCEMENT],
authors: [resolvedPubkey],
limit: limit * 2
}
]);
// Fetch repos where this pubkey is a maintainer (cache-first)
// We need to fetch all repos and filter by maintainer tags
const allRepos = await fetchEventsWithCache([
{
kinds: [KIND.REPO_ANNOUNCEMENT],
limit: 1000 // Get more to find maintainer matches
}
]);
// Filter for repos where resolvedPubkey is in maintainers tag
const maintainerEvents = allRepos.filter(event => {
for (const tag of event.tags) {
if (tag[0] === 'maintainers') {
for (let i = 1; i < tag.length; i++) {
const maintainer = tag[i];
if (!maintainer || typeof maintainer !== 'string') continue;
// Maintainer can be npub or hex pubkey
let maintainerPubkey = maintainer;
try {
const decoded = nip19.decode(maintainer);
if (decoded.type === 'npub') {
maintainerPubkey = decoded.data as string;
}
} catch {
// Assume it's already a hex pubkey
}
if (maintainerPubkey.toLowerCase() === resolvedPubkey.toLowerCase()) {
return true;
}
}
}
}
return false;
});
// Combine owner and maintainer events, deduplicate by event ID
const eventMap = new Map<string, typeof events[0]>();
ownerEvents.forEach(e => eventMap.set(e.id, e));
maintainerEvents.forEach(e => eventMap.set(e.id, e));
events = Array.from(eventMap.values());
} else {
// Regular text search using NIP-50
const searchQuery = query.trim().toLowerCase();
// For text search, we'll use cache-first for all repos, then filter
// This allows us to leverage cache while still supporting NIP-50
let allReposForTextSearch: NostrEvent[] = [];
// Check cache first for all repo announcements
const cachedAllRepos = eventCache.get([{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 1000 }]);
if (cachedAllRepos && cachedAllRepos.length > 0) {
logger.debug({ cachedCount: cachedAllRepos.length }, 'Using cached repos for text search');
allReposForTextSearch = cachedAllRepos;
// Fetch fresh data in background
nostrClient.fetchEvents([{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 1000 }]).then(freshRepos => {
// Merge and update cache
const eventMap = new Map<string, NostrEvent>();
cachedAllRepos.forEach(e => eventMap.set(e.id, e));
freshRepos.forEach(e => {
const existing = eventMap.get(e.id);
if (!existing || e.created_at > existing.created_at) {
eventMap.set(e.id, e);
}
});
const merged = Array.from(eventMap.values());
eventCache.set([{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 1000 }], merged);
}).catch(err => {
logger.debug({ error: err }, 'Background fetch failed for text search');
});
} else {
// No cache, fetch all repos
allReposForTextSearch = await nostrClient.fetchEvents([
{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 1000 }
]);
// Cache the results
if (allReposForTextSearch.length > 0) {
eventCache.set([{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 1000 }], allReposForTextSearch);
}
}
try {
// Try NIP-50 search for fresh results (bypass cache for NIP-50)
const searchFilter = {
kinds: [KIND.REPO_ANNOUNCEMENT],
search: query.trim(), // NIP-50: Search field - use trimmed query
limit: limit * 2 // Get more results to account for different relay implementations
};
// Fetch NIP-50 results in background (don't wait)
const nip50Promise = nostrClient.fetchEvents([searchFilter]).then(nip50Events => {
// Merge NIP-50 results with cached repos
const eventMap = new Map<string, NostrEvent>();
allReposForTextSearch.forEach(e => eventMap.set(e.id, e));
nip50Events.forEach(e => {
const existing = eventMap.get(e.id);
if (!existing || e.created_at > existing.created_at) {
eventMap.set(e.id, e);
}
});
return Array.from(eventMap.values());
});
// Filter cached repos immediately for fast results
const searchLower = searchQuery;
events = allReposForTextSearch.filter(event => {
const name = event.tags.find(t => t[0] === 'name')?.[1] || '';
const description = event.tags.find(t => t[0] === 'description')?.[1] || '';
const repoId = event.tags.find(t => t[0] === 'd')?.[1] || '';
const content = event.content || '';
return name.toLowerCase().includes(searchLower) ||
description.toLowerCase().includes(searchLower) ||
repoId.toLowerCase().includes(searchLower) ||
content.toLowerCase().includes(searchLower);
});
// Merge NIP-50 results when available (in background)
nip50Promise.then(mergedEvents => {
// Update events with NIP-50 results if they're better
const eventMap = new Map<string, NostrEvent>();
events.forEach(e => eventMap.set(e.id, e));
mergedEvents.forEach(e => {
const existing = eventMap.get(e.id);
if (!existing || e.created_at > existing.created_at) {
eventMap.set(e.id, e);
}
});
// Note: We can't update the events array here since it's already being processed
// The next search will benefit from the updated cache
}).catch(err => {
logger.debug({ error: err }, 'NIP-50 search failed, using cached results');
});
// If NIP-50 returned results, verify they actually match the query
// Some relays might not properly implement NIP-50 search
if (events.length > 0) {
const searchLower = searchQuery;
events = events.filter(event => {
const name = event.tags.find(t => t[0] === 'name')?.[1] || '';
const description = event.tags.find(t => t[0] === 'description')?.[1] || '';
const repoId = event.tags.find(t => t[0] === 'd')?.[1] || '';
const content = event.content || '';
return name.toLowerCase().includes(searchLower) ||
description.toLowerCase().includes(searchLower) ||
repoId.toLowerCase().includes(searchLower) ||
content.toLowerCase().includes(searchLower);
});
}
// NIP-50 search succeeded
} catch (nip50Error) {
// Fallback to manual filtering if NIP-50 fails or isn't supported
@ -74,7 +362,7 @@ export const GET: RequestHandler = async (event) => { @@ -74,7 +362,7 @@ export const GET: RequestHandler = async (event) => {
}
]);
const searchLower = query.toLowerCase();
const searchLower = searchQuery;
events = allEvents.filter(event => {
const name = event.tags.find(t => t[0] === 'name')?.[1] || '';
const description = event.tags.find(t => t[0] === 'description')?.[1] || '';
@ -87,9 +375,38 @@ export const GET: RequestHandler = async (event) => { @@ -87,9 +375,38 @@ export const GET: RequestHandler = async (event) => {
content.toLowerCase().includes(searchLower);
});
}
}
// Process events into results with privacy filtering
const searchLower = query.toLowerCase();
const searchLower = query.trim().toLowerCase();
// Check if this is a pubkey search and if the resolved pubkey matches the logged-in user
const isSearchingOwnPubkey = resolvedPubkey && userPubkey &&
resolvedPubkey.toLowerCase() === userPubkey.toLowerCase();
// Map to track repo relationships for sorting
const repoRelationships = new Map<string, {
isOwned: boolean;
isMaintained: boolean;
isBookmarked: boolean;
maintainers?: Array<{ pubkey: string; isOwner: boolean }>;
}>();
// Pre-fetch maintainers and bookmarks for all repos (batch processing)
const bookmarkChecks = new Map<string, Promise<boolean>>();
if (userPubkey) {
const { BookmarksService } = await import('$lib/services/nostr/bookmarks-service.js');
const bookmarksService = new BookmarksService(relays);
for (const event of events) {
const repoId = event.tags.find(t => t[0] === 'd')?.[1];
if (!repoId) continue;
const repoAddress = `${KIND.REPO_ANNOUNCEMENT}:${event.pubkey}:${repoId}`;
bookmarkChecks.set(event.id, bookmarksService.isBookmarked(userPubkey, repoAddress));
}
}
for (const event of events) {
const repoId = event.tags.find(t => t[0] === 'd')?.[1];
if (!repoId) continue;
@ -104,8 +421,30 @@ export const GET: RequestHandler = async (event) => { @@ -104,8 +421,30 @@ export const GET: RequestHandler = async (event) => {
let canView = false;
if (!isPrivate) {
canView = true; // Public repos are viewable by anyone
} else {
// Private repos require authentication
// Special case: if searching by pubkey and the resolved pubkey matches the logged-in user,
// show all their repos (public and private) regardless of who owns them
if (isSearchingOwnPubkey && resolvedPubkey) {
// Check if the logged-in user is the owner or maintainer of this repo
try {
// Check if user is owner (event.pubkey matches resolvedPubkey)
if (event.pubkey.toLowerCase() === resolvedPubkey.toLowerCase()) {
canView = true; // User owns this repo
} else {
// Check if user is a maintainer
const { maintainers } = await maintainerService.getMaintainers(event.pubkey, repoId);
if (maintainers.some(m => m.toLowerCase() === resolvedPubkey.toLowerCase())) {
canView = true; // User is a maintainer
}
}
} catch (err) {
logger.warn({ error: err, pubkey: event.pubkey, repo: repoId }, 'Failed to check maintainer status in pubkey search');
canView = false;
}
} else if (userPubkey) {
// Private repos require authentication - check if user owns, maintains, or has bookmarked
// Regular privacy check: check if logged-in user owns, maintains, or has bookmarked
try {
// Check if user is owner or maintainer
canView = await maintainerService.canView(userPubkey, event.pubkey, repoId);
@ -113,7 +452,7 @@ export const GET: RequestHandler = async (event) => { @@ -113,7 +452,7 @@ export const GET: RequestHandler = async (event) => {
// If not owner/maintainer, check if user has bookmarked it
if (!canView) {
const { BookmarksService } = await import('$lib/services/nostr/bookmarks-service.js');
const bookmarksService = new BookmarksService(DEFAULT_NOSTR_SEARCH_RELAYS);
const bookmarksService = new BookmarksService(relays);
const repoAddress = `${KIND.REPO_ANNOUNCEMENT}:${event.pubkey}:${repoId}`;
canView = await bookmarksService.isBookmarked(userPubkey, repoAddress);
}
@ -122,6 +461,8 @@ export const GET: RequestHandler = async (event) => { @@ -122,6 +461,8 @@ export const GET: RequestHandler = async (event) => {
canView = false;
}
}
// If no userPubkey and repo is private, canView remains false
}
// Only include repos the user can view
if (!canView) continue;
@ -131,21 +472,124 @@ export const GET: RequestHandler = async (event) => { @@ -131,21 +472,124 @@ export const GET: RequestHandler = async (event) => {
try {
const npub = nip19.npubEncode(event.pubkey);
// Determine relationship to user
const isOwned = !!(userPubkey && event.pubkey.toLowerCase() === userPubkey.toLowerCase());
let isMaintained = false;
let allMaintainers: Array<{ pubkey: string; isOwner: boolean }> = [];
// Fetch maintainers for this repo
try {
const { maintainers, owner } = await maintainerService.getMaintainers(event.pubkey, repoId);
// Build maintainers list with owner flag, owner first
// The maintainers array from getMaintainers always includes the owner as the first element
// Use a Set to track which pubkeys we've already added (case-insensitive)
const seenPubkeys = new Set<string>();
const ownerLower = owner.toLowerCase();
// Build the list: owner first, then other maintainers
allMaintainers = [];
// Process all maintainers, marking owner and deduplicating
for (const maintainer of maintainers) {
const maintainerLower = maintainer.toLowerCase();
// Skip if we've already added this pubkey (case-insensitive check)
if (seenPubkeys.has(maintainerLower)) {
continue;
}
// Mark as seen
seenPubkeys.add(maintainerLower);
// Determine if this is the owner
const isOwner = maintainerLower === ownerLower;
// Add to list
allMaintainers.push({
pubkey: maintainer,
isOwner
});
}
// Sort: owner first, then other maintainers
allMaintainers.sort((a, b) => {
if (a.isOwner && !b.isOwner) return -1;
if (!a.isOwner && b.isOwner) return 1;
return 0;
});
// Ensure owner is always included (in case they weren't in maintainers list)
const hasOwner = allMaintainers.some(m => m.pubkey.toLowerCase() === ownerLower);
if (!hasOwner) {
allMaintainers.unshift({ pubkey: owner, isOwner: true });
}
// Check if user is a maintainer (but not owner, since we already checked that)
if (userPubkey && !isOwned) {
isMaintained = maintainers.some(m => m.toLowerCase() === userPubkey.toLowerCase());
}
} catch (err) {
logger.warn({ error: err, pubkey: event.pubkey, repo: repoId }, 'Failed to fetch maintainers for search result');
// Fallback: just use owner
allMaintainers = [{ pubkey: event.pubkey, isOwner: true }];
}
// Check if bookmarked
let isBookmarked = false;
if (userPubkey && bookmarkChecks.has(event.id)) {
const bookmarkCheck = bookmarkChecks.get(event.id);
if (bookmarkCheck) {
isBookmarked = await bookmarkCheck;
}
}
// Store relationship for sorting
repoRelationships.set(event.id, {
isOwned,
isMaintained,
isBookmarked,
maintainers: allMaintainers
});
results.repos.push({
id: event.id,
name: name || repoId,
description: description || '',
owner: event.pubkey,
npub
npub,
maintainers: allMaintainers
});
} catch {
// Skip if npub encoding fails
}
}
// Sort by relevance (name matches first, then description)
// Note: NIP-50 compliant relays should already return results sorted by relevance
// Sort by user relationship priority, then by relevance
// Priority: owned > maintained > bookmarked > others
// Within each group, sort by relevance (name matches first, then description)
results.repos.sort((a, b) => {
const aRel = repoRelationships.get(a.id) || { isOwned: false, isMaintained: false, isBookmarked: false };
const bRel = repoRelationships.get(b.id) || { isOwned: false, isMaintained: false, isBookmarked: false };
// Priority 1: Owned repos first
if (aRel.isOwned && !bRel.isOwned) return -1;
if (!aRel.isOwned && bRel.isOwned) return 1;
// Priority 2: Maintained repos (but not owned)
if (!aRel.isOwned && !bRel.isOwned) {
if (aRel.isMaintained && !bRel.isMaintained) return -1;
if (!aRel.isMaintained && bRel.isMaintained) return 1;
}
// Priority 3: Bookmarked repos (but not owned or maintained)
if (!aRel.isOwned && !aRel.isMaintained && !bRel.isOwned && !bRel.isMaintained) {
if (aRel.isBookmarked && !bRel.isBookmarked) return -1;
if (!aRel.isBookmarked && bRel.isBookmarked) return 1;
}
// Priority 4: Relevance (name matches first, then description)
const aNameMatch = a.name.toLowerCase().includes(searchLower);
const bNameMatch = b.name.toLowerCase().includes(searchLower);
if (aNameMatch && !bNameMatch) return -1;
@ -160,95 +604,13 @@ export const GET: RequestHandler = async (event) => { @@ -160,95 +604,13 @@ export const GET: RequestHandler = async (event) => {
});
results.repos = results.repos.slice(0, limit);
}
// Search code (basic file content search)
if (type === 'code' || type === 'all') {
// Get all repos on this server
const allRepos: Array<{ npub: string; repo: string }> = [];
// This is a simplified search - in production, you'd want to index files
// For now, we'll search through known repos
try {
const repoDirs = await import('fs/promises').then(fs =>
fs.readdir(repoRoot, { withFileTypes: true })
);
for (const dir of repoDirs) {
if (dir.isDirectory()) {
const npub = dir.name;
try {
const repoFiles = await import('fs/promises').then(fs =>
fs.readdir(join(repoRoot, npub), { withFileTypes: true })
);
for (const repoFile of repoFiles) {
if (repoFile.isDirectory() && repoFile.name.endsWith('.git')) {
const repo = repoFile.name.replace('.git', '');
allRepos.push({ npub, repo });
}
}
} catch {
// Skip if can't read directory
}
}
}
} catch {
// If we can't list repos, skip code search
}
// Filter repos by privacy before searching code
const accessibleRepos: Array<{ npub: string; repo: string }> = [];
for (const { npub, repo } of allRepos.slice(0, 10)) { // Limit to 10 repos for performance
try {
// Decode npub to get pubkey
const decoded = nip19.decode(npub);
if (decoded.type !== 'npub') continue;
const repoOwnerPubkey = decoded.data as string;
// Check if user can view this repo
const canView = await maintainerService.canView(userPubkey, repoOwnerPubkey, repo);
if (canView) {
accessibleRepos.push({ npub, repo });
}
} catch {
// Skip if can't decode npub or check access
}
}
// Search in files (limited to avoid performance issues)
const searchLower = query.toLowerCase();
let codeResults: Array<{ repo: string; npub: string; file: string; matches: number }> = [];
for (const { npub, repo } of accessibleRepos) {
try {
const files = await fileManager.listFiles(npub, repo, 'HEAD', '');
for (const file of files.slice(0, 50)) { // Limit to 50 files per repo
if (file.type === 'file' && file.name.toLowerCase().includes(searchLower)) {
codeResults.push({
repo,
npub,
file: file.path,
matches: 1
});
}
}
} catch {
// Skip if can't access repo
}
}
results.code = codeResults.slice(0, limit);
}
return json({
query,
type,
results,
total: results.repos.length + results.code.length
total: results.repos.length
});
} catch (err) {
return handleApiError(err, { operation: 'search', query, type }, 'Failed to search');
return handleApiError(err, { operation: 'search', query }, 'Failed to search');
}
};

159
src/routes/repos/[npub]/[repo]/+page.svelte

@ -62,6 +62,38 @@ @@ -62,6 +62,38 @@
let activeTab = $state<'files' | 'history' | 'tags' | 'issues' | 'prs' | 'docs' | 'discussions'>('discussions');
let showRepoMenu = $state(false);
// Load maintainers when page data changes (only once per repo, with guard)
let lastRepoKey = $state<string | null>(null);
let maintainersEffectRan = $state(false);
$effect(() => {
const data = $page.data as typeof pageData;
const currentRepoKey = `${npub}/${repo}`;
// Reset flags if repo changed
if (currentRepoKey !== lastRepoKey) {
maintainersLoaded = false;
maintainersEffectRan = false;
lastRepoKey = currentRepoKey;
}
// Only load if:
// 1. We have page data
// 2. Effect hasn't run yet for this repo
// 3. We're not currently loading
if ((data.repoOwnerPubkey || (data.repoMaintainers && data.repoMaintainers.length > 0)) &&
!maintainersEffectRan &&
!loadingMaintainers) {
maintainersEffectRan = true; // Mark as ran to prevent re-running
maintainersLoaded = true; // Set flag before loading to prevent concurrent calls
loadAllMaintainers().catch(err => {
maintainersLoaded = false; // Reset on error so we can retry
maintainersEffectRan = false; // Allow retry
console.warn('Failed to load maintainers:', err);
});
}
});
// Sync with userStore
$effect(() => {
const currentUser = $userStore;
@ -82,6 +114,11 @@ @@ -82,6 +114,11 @@
checkMaintainerStatus().catch(err => console.warn('Failed to reload maintainer status after login:', err));
loadBookmarkStatus().catch(err => console.warn('Failed to reload bookmark status after login:', err));
// Reset flags to allow reload
maintainersLoaded = false;
maintainersEffectRan = false;
lastRepoKey = null;
loadAllMaintainers().catch(err => console.warn('Failed to reload maintainers after login:', err));
// Recheck clone status after login (force refresh) - delay slightly to ensure auth headers are ready
setTimeout(() => {
checkCloneStatus(true).catch(err => console.warn('Failed to recheck clone status after login:', err));
@ -107,6 +144,11 @@ @@ -107,6 +144,11 @@
if (wasLoggedIn) {
checkMaintainerStatus().catch(err => console.warn('Failed to reload maintainer status after logout:', err));
loadBookmarkStatus().catch(err => console.warn('Failed to reload bookmark status after logout:', err));
// Reset flags to allow reload
maintainersLoaded = false;
maintainersEffectRan = false;
lastRepoKey = null;
loadAllMaintainers().catch(err => console.warn('Failed to reload maintainers after logout:', err));
// If repo is private and user logged out, reload to trigger access check
if (!loading && activeTab === 'files') {
loadFiles().catch(err => console.warn('Failed to reload files after logout:', err));
@ -146,6 +188,11 @@ @@ -146,6 +188,11 @@
let isMaintainer = $state(false);
let loadingMaintainerStatus = $state(false);
// All maintainers (including owner) for display
let allMaintainers = $state<Array<{ pubkey: string; isOwner: boolean }>>([]);
let loadingMaintainers = $state(false);
let maintainersLoaded = $state(false); // Guard to prevent repeated loads
// Clone status
let isRepoCloned = $state<boolean | null>(null); // null = unknown, true = cloned, false = not cloned
let checkingCloneStatus = $state(false);
@ -1377,6 +1424,7 @@ @@ -1377,6 +1424,7 @@
await loadTags();
await checkMaintainerStatus();
await loadBookmarkStatus();
await loadAllMaintainers();
// Check clone status (needed to disable write operations)
await checkCloneStatus();
@ -1598,6 +1646,77 @@ @@ -1598,6 +1646,77 @@
}
}
async function loadAllMaintainers() {
if (repoNotFound || loadingMaintainers) return;
loadingMaintainers = true;
try {
const response = await fetch(`/api/repos/${npub}/${repo}/maintainers`);
if (response.ok) {
const data = await response.json();
const owner = data.owner;
const maintainers = data.maintainers || [];
// Create array with all maintainers, marking the owner
const allMaintainersList: Array<{ pubkey: string; isOwner: boolean }> = [];
const seen = new Set<string>();
const ownerLower = owner?.toLowerCase();
// Process all maintainers, marking owner and deduplicating
for (const maintainer of maintainers) {
const maintainerLower = maintainer.toLowerCase();
// Skip if we've already added this pubkey (case-insensitive check)
if (seen.has(maintainerLower)) {
continue;
}
// Mark as seen
seen.add(maintainerLower);
// Determine if this is the owner
const isOwner = ownerLower && maintainerLower === ownerLower;
// Add to list
allMaintainersList.push({
pubkey: maintainer,
isOwner: !!isOwner
});
}
// Sort: owner first, then other maintainers
allMaintainersList.sort((a, b) => {
if (a.isOwner && !b.isOwner) return -1;
if (!a.isOwner && b.isOwner) return 1;
return 0;
});
// Ensure owner is always included (in case they weren't in maintainers list)
if (owner && !seen.has(ownerLower)) {
allMaintainersList.unshift({ pubkey: owner, isOwner: true });
}
allMaintainers = allMaintainersList;
}
} catch (err) {
console.error('Failed to load maintainers:', err);
maintainersLoaded = false; // Reset flag on error
// Fallback to pageData if available
if (pageData.repoOwnerPubkey) {
allMaintainers = [{ pubkey: pageData.repoOwnerPubkey, isOwner: true }];
if (pageData.repoMaintainers) {
for (const maintainer of pageData.repoMaintainers) {
if (maintainer.toLowerCase() !== pageData.repoOwnerPubkey.toLowerCase()) {
allMaintainers.push({ pubkey: maintainer, isOwner: false });
}
}
}
}
} finally {
loadingMaintainers = false;
}
}
async function checkVerification() {
if (repoNotFound) return;
loadingVerification = true;
@ -3040,16 +3159,32 @@ @@ -3040,16 +3159,32 @@
<span class="fork-badge">Forked from <a href={`/repos/${forkInfo.originalRepo.npub}/${forkInfo.originalRepo.repo}`}>{forkInfo.originalRepo.repo}</a></span>
{/if}
</div>
{#if pageData.repoOwnerPubkey || (pageData.repoMaintainers && pageData.repoMaintainers.length > 0)}
{#if allMaintainers.length > 0 || pageData.repoOwnerPubkey}
<div class="repo-contributors">
<span class="contributors-label">Contributors:</span>
<span class="contributors-label">Owners & Maintainers:</span>
<div class="contributors-list">
{#if pageData.repoOwnerPubkey}
<a href={`/users/${npub}`} class="contributor-item">
{#if allMaintainers.length > 0}
{#each allMaintainers as maintainer}
{@const maintainerNpub = nip19.npubEncode(maintainer.pubkey)}
<a
href={`/users/${maintainerNpub}`}
class="contributor-item"
class:contributor-owner={maintainer.isOwner}
>
<UserBadge pubkey={maintainer.pubkey} />
{#if maintainer.isOwner}
<span class="contributor-badge owner">Owner</span>
{:else}
<span class="contributor-badge maintainer">Maintainer</span>
{/if}
</a>
{/each}
{:else if pageData.repoOwnerPubkey}
<!-- Fallback to pageData if maintainers not loaded yet -->
<a href={`/users/${npub}`} class="contributor-item contributor-owner">
<UserBadge pubkey={pageData.repoOwnerPubkey} />
<span class="contributor-badge owner">Owner</span>
</a>
{/if}
{#if pageData.repoMaintainers}
{#each pageData.repoMaintainers.filter(m => m !== pageData.repoOwnerPubkey) as maintainerPubkey}
<a href={`/users/${nip19.npubEncode(maintainerPubkey)}`} class="contributor-item">
@ -3058,6 +3193,7 @@ @@ -3058,6 +3193,7 @@
</a>
{/each}
{/if}
{/if}
</div>
</div>
{/if}
@ -5042,6 +5178,19 @@ @@ -5042,6 +5178,19 @@
background: var(--card-bg);
}
.contributor-item.contributor-owner {
background: var(--accent-light);
border: 2px solid var(--accent);
font-weight: 600;
box-shadow: 0 0 0 1px var(--accent-light);
}
.contributor-item.contributor-owner:hover {
background: var(--accent-light);
border-color: var(--accent-hover);
box-shadow: 0 0 0 2px var(--accent-light);
}
.contributor-badge {
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;

237
src/routes/search/+page.svelte

@ -8,9 +8,9 @@ @@ -8,9 +8,9 @@
import { userStore } from '$lib/stores/user-store.js';
let query = $state('');
let searchType = $state<'repos' | 'code' | 'all'>('repos');
let loading = $state(false);
let userPubkeyHex = $state<string | null>(null);
let searchAbortController: AbortController | null = null;
// Sync with userStore
$effect(() => {
@ -35,8 +35,14 @@ @@ -35,8 +35,14 @@
}
});
let results = $state<{
repos: Array<{ id: string; name: string; description: string; owner: string; npub: string }>;
code: Array<{ repo: string; npub: string; file: string; matches: number }>;
repos: Array<{
id: string;
name: string;
description: string;
owner: string;
npub: string;
maintainers?: Array<{ pubkey: string; isOwner: boolean }>;
}>;
total: number;
} | null>(null);
let error = $state<string | null>(null);
@ -76,9 +82,19 @@ @@ -76,9 +82,19 @@
async function performSearch() {
if (!query.trim() || query.length < 2) {
results = null;
return;
}
// Cancel any ongoing search
if (searchAbortController) {
searchAbortController.abort();
}
// Create new abort controller for this search
searchAbortController = new AbortController();
const currentAbortController = searchAbortController;
loading = true;
error = null;
results = null; // Reset results
@ -89,18 +105,30 @@ @@ -89,18 +105,30 @@
headers['X-User-Pubkey'] = userPubkeyHex;
}
const response = await fetch(`/api/search?q=${encodeURIComponent(query)}&type=${searchType}`, {
headers
const response = await fetch(`/api/search?q=${encodeURIComponent(query.trim())}`, {
headers,
signal: currentAbortController.signal
});
// Check if request was aborted
if (currentAbortController.signal.aborted) {
return;
}
if (response.ok) {
const data = await response.json();
// The API returns { query, type, results: { repos, code }, total }
// Verify the response matches our current query (in case of race conditions)
if (data.query !== query.trim()) {
// Response is for a different query, ignore it
return;
}
// The API returns { query, results: { repos }, total }
// Extract the nested results structure
const apiResults = data.results || {};
results = {
repos: Array.isArray(apiResults.repos) ? apiResults.repos : [],
code: Array.isArray(apiResults.code) ? apiResults.code : [],
total: typeof data.total === 'number' ? data.total : (apiResults.repos?.length || 0) + (apiResults.code?.length || 0)
total: typeof data.total === 'number' ? data.total : (apiResults.repos?.length || 0)
};
} else {
const data = await response.json();
@ -108,12 +136,34 @@ @@ -108,12 +136,34 @@
results = null; // Clear results on error
}
} catch (err) {
// Ignore abort errors
if (err instanceof Error && err.name === 'AbortError') {
return;
}
error = err instanceof Error ? err.message : 'Search failed';
results = null; // Clear results on error
} finally {
// Only update loading state if this is still the current search
if (currentAbortController === searchAbortController) {
loading = false;
}
}
}
function cancelSearch() {
if (searchAbortController) {
searchAbortController.abort();
searchAbortController = null;
}
loading = false;
results = null;
error = null;
}
function clearSearch() {
query = '';
cancelSearch();
}
function handleSearch(e: Event) {
e.preventDefault();
@ -131,21 +181,21 @@ @@ -131,21 +181,21 @@
<input
type="text"
bind:value={query}
placeholder="Search repositories or code... (NIP-50 search)"
placeholder="Search repositories by name, description, pubkey (hex/npub/nprofile/NIP-05), or clone URL..."
class="search-input"
/>
<div class="search-controls">
<select bind:value={searchType} class="search-type-select">
<option value="repos">Repositories</option>
<option value="code">Code</option>
<option value="all">All</option>
</select>
<button type="submit" disabled={loading || !query.trim()} class="search-button">
{loading ? 'Searching...' : 'Search'}
</button>
{#if loading || results || query.trim()}
<button type="button" onclick={clearSearch} class="cancel-button">
Cancel
</button>
{/if}
</div>
<div class="search-info">
<small>Using NIP-50 search across multiple relays for better results</small>
<small>Search repositories by name, description, pubkey (hex/npub/nprofile/NIP-05), or clone URL.</small>
</div>
</form>
@ -159,7 +209,7 @@ @@ -159,7 +209,7 @@
<h2>Results ({results.total || 0})</h2>
</div>
{#if (searchType === 'repos' || searchType === 'all') && results.repos && results.repos.length > 0}
{#if results.repos && results.repos.length > 0}
<section class="results-section">
<h3>Repositories ({results.repos.length})</h3>
<div class="repo-list">
@ -181,38 +231,34 @@ @@ -181,38 +231,34 @@
<p class="repo-description">{repo.description}</p>
{/if}
<div class="repo-meta">
<a href={`/users/${repo.npub}`} onclick={(e) => e.stopPropagation()}>
<UserBadge pubkey={repo.owner} />
{#if repo.maintainers && repo.maintainers.length > 0}
<div class="repo-contributors">
<span class="contributors-label">Owners & Maintainers:</span>
<div class="contributors-list">
{#each repo.maintainers as maintainer}
{@const maintainerNpub = nip19.npubEncode(maintainer.pubkey)}
<a
href={`/users/${maintainerNpub}`}
class="contributor-item"
class:contributor-owner={maintainer.isOwner}
onclick={(e) => e.stopPropagation()}
>
<UserBadge pubkey={maintainer.pubkey} />
{#if maintainer.isOwner}
<span class="contributor-badge owner">Owner</span>
{:else}
<span class="contributor-badge maintainer">Maintainer</span>
{/if}
</a>
</div>
</div>
{/each}
</div>
</section>
{/if}
{#if (searchType === 'code' || searchType === 'all') && results.code && results.code.length > 0}
<section class="results-section">
<h3>Code Files ({results.code.length})</h3>
<div class="code-list">
{#each results.code as file}
<div
class="code-item"
role="button"
tabindex="0"
onclick={() => goto(`/repos/${file.npub}/${file.repo}?file=${encodeURIComponent(file.file)}`)}
onkeydown={(e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
goto(`/repos/${file.npub}/${file.repo}?file=${encodeURIComponent(file.file)}`);
}
}}
style="cursor: pointer;">
<div class="code-file-path">{file.file}</div>
<div class="code-repo">
<a href={`/repos/${file.npub}/${file.repo}`} onclick={(e) => e.stopPropagation()}>
{file.repo}
</div>
{:else}
<!-- Fallback: show owner if maintainers not available -->
<a href={`/users/${repo.npub}`} onclick={(e) => e.stopPropagation()}>
<UserBadge pubkey={repo.owner} />
</a>
{/if}
</div>
</div>
{/each}
@ -238,4 +284,107 @@ @@ -238,4 +284,107 @@
.search-info small {
color: inherit;
}
.search-controls {
display: flex;
gap: 0.5rem;
align-items: center;
flex-wrap: wrap;
}
.cancel-button {
padding: 0.5rem 1rem;
background: var(--bg-secondary, #e8e8e8);
color: var(--text-primary, #1a1a1a);
border: 1px solid var(--border-color, #ccc);
border-radius: 4px;
cursor: pointer;
font-size: 0.875rem;
transition: background-color 0.2s;
}
.cancel-button:hover {
background: var(--bg-tertiary, #d0d0d0);
}
.cancel-button:active {
background: var(--bg-quaternary, #b8b8b8);
}
.repo-contributors {
display: flex;
flex-direction: column;
gap: 0.5rem;
margin-top: 0.5rem;
}
.contributors-label {
font-size: 0.875rem;
color: var(--text-muted, #666);
font-weight: 500;
}
.contributors-list {
display: flex;
flex-wrap: wrap;
align-items: center;
gap: 0.5rem;
}
.contributor-item {
display: inline-flex;
align-items: center;
gap: 0.5rem;
text-decoration: none;
padding: 0.25rem 0.5rem;
border-radius: 0.5rem;
background: var(--bg-secondary, #f0f0f0);
border: 1px solid var(--border-color, #ddd);
transition: all 0.2s ease;
}
.contributor-item:hover {
border-color: var(--accent, #8a2be2);
background: var(--card-bg, #fff);
}
.contributor-item.contributor-owner {
background: var(--accent-light, var(--bg-tertiary));
border: 2px solid var(--accent, var(--border-color));
font-weight: 600;
box-shadow: 0 0 0 1px var(--accent-light, transparent);
}
.contributor-item.contributor-owner:hover {
background: var(--accent-light, var(--bg-tertiary));
border-color: var(--accent, var(--border-color));
box-shadow: 0 0 0 2px var(--accent-light, transparent);
}
.contributor-badge {
padding: 0.25rem 0.5rem;
border-radius: 0.25rem;
font-size: 0.7rem;
font-weight: 700;
text-transform: uppercase;
white-space: nowrap;
letter-spacing: 0.05em;
border: 1px solid transparent;
min-height: 1.5rem;
display: inline-flex;
align-items: center;
justify-content: center;
}
.contributor-badge.owner {
background: var(--bg-tertiary);
color: var(--text-primary);
border-color: var(--border-color);
}
.contributor-badge.maintainer {
background: var(--success-bg);
color: var(--success-text);
border-color: var(--border-color);
}
</style>

Loading…
Cancel
Save