Browse Source

bug-fixes

Nostr-Signature: 9566e4e2964d0a7b80cce1889092c4db333f89843b5d68906b3c3c568e4ba57d 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc 8cf9166c630a8dc21bbc3dfaea4330c80c93bf7bc9e8d5d3be182fb11a3b96ea2e5969f452d3e2b309103b3e7fea8fc1aa6e5908d499d0696e9bfcd3859a8e32
main
Silberengel 3 weeks ago
parent
commit
6aa7c54fcd
  1. 1
      nostr/commit-signatures.jsonl
  2. 30
      src/lib/components/RepoHeaderEnhanced.svelte
  3. 97
      src/lib/services/git/api-repo-fetcher.ts
  4. 36
      src/lib/styles/components.css
  5. 92
      src/lib/utils/api-repo-helper.ts
  6. 17
      src/routes/api/repos/[npub]/[repo]/branches/+server.ts
  7. 7
      src/routes/api/repos/[npub]/[repo]/tree/+server.ts
  8. 126
      src/routes/api/search/+server.ts
  9. 12
      src/routes/repos/[npub]/[repo]/+page.svelte
  10. 31
      src/routes/search/+page.svelte

1
nostr/commit-signatures.jsonl

@ -61,3 +61,4 @@ @@ -61,3 +61,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771838236,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor repo manager"]],"content":"Signed commit: refactor repo manager","id":"d134c35516991f27e47ed8a4aa0d3f1d6e6be41c46c9cf3f6c982c1442b09b4b","sig":"cb699fae6a8e44a3b9123f215749f6fec0470c75a0401a94c37dfb8e572c07281b3941862e704b868663f943c573ab2ee9fec217e87f7be567cc6bb3514cacdb"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771840654,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"0580e0df8000275817f040bbd6c04dfdfbff08a366df7a1686f227d8b7310053","sig":"9a238266f989c0664dc5c9743675907477e2fcb5311e8edeb505dec97027f619f6dc6742ee5f3887ff6a864274b45005fc7dd4432f8e2772dfe0bb7e2d8a449c"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771840660,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"e96c955f550a94c9c6d1228d2a7e479ced331334aaa4eea84525b362b8484d6e","sig":"1218bd9e449404ccc56c5727e8bdff5db31e37c2053a2d91ba02d214c0988173ba480010e53401661cb439884308a575230a7a12124f8e6d8f058c8a804a42f6"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1771845583,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix search and relay connections"]],"content":"Signed commit: fix search and relay connections","id":"24db15027960b244eb4c8664a3642c64684ebfef8c200250093dd047cd119e7d","sig":"561d15ae39b3bf7a5b8a67539a5cfa19d53cbaca9f904589ab7cb69e568ddf056d0d83ced4830cdfdc0b386f13c4bab930264a0f6144cbb833b187b5d452c4ae"}

30
src/lib/components/RepoHeaderEnhanced.svelte

@ -94,7 +94,7 @@ @@ -94,7 +94,7 @@
let moreMenuElement = $state<HTMLDivElement | null>(null);
let menuButtonElement = $state<HTMLButtonElement | null>(null);
// Adjust menu position to prevent overflow on the right side
// Adjust menu position to prevent overflow on the left side (menu opens to the left)
$effect(() => {
if (showMoreMenu && moreMenuElement && menuButtonElement) {
// Use double requestAnimationFrame to ensure DOM is fully rendered
@ -107,25 +107,25 @@ @@ -107,25 +107,25 @@
const viewportWidth = window.innerWidth;
const padding = 16; // Padding from viewport edges
// Menu is positioned with left: 0, so its left edge aligns with button's left edge
// Calculate where the menu's right edge currently is
// Menu is positioned with right: 0, so its right edge aligns with button's right edge
// Calculate where the menu's left edge currently is
const menuWidth = menuRect.width || 280; // Fallback to min-width
const currentLeft = buttonRect.left;
const currentRight = currentLeft + menuWidth;
const currentRight = buttonRect.right;
const currentLeft = currentRight - menuWidth;
let transformX = 0;
// Check if menu overflows on the right
if (currentRight > viewportWidth - padding) {
// Menu would overflow on the right, shift it left
const rightOverflow = currentRight - (viewportWidth - padding);
transformX = -rightOverflow;
// Check if menu overflows on the left
if (currentLeft < padding) {
// Menu would overflow on the left, shift it right
const leftOverflow = padding - currentLeft;
transformX = leftOverflow;
// Re-check left after adjustment - ensure we don't go off left
const finalLeft = currentLeft + transformX;
if (finalLeft < padding) {
// If we'd go off left, position it at the left edge with padding
transformX = padding - currentLeft;
// Re-check right after adjustment - ensure we don't go off right
const finalRight = currentRight + transformX;
if (finalRight > viewportWidth - padding) {
// If we'd go off right, position it at the right edge with padding
transformX = (viewportWidth - padding) - currentRight;
}
}

97
src/lib/services/git/api-repo-fetcher.ts

@ -93,17 +93,23 @@ export function isGraspUrl(url: string): boolean { @@ -93,17 +93,23 @@ export function isGraspUrl(url: string): boolean {
*/
export function parseGitUrl(url: string): { platform: GitPlatform; owner: string; repo: string; baseUrl: string } | null {
// Handle GRASP URLs - they use Gitea-compatible API but with npub as owner
if (isGraspUrl(url)) {
const isGrasp = isGraspUrl(url);
logger.info({ url, isGrasp }, 'Checking if URL is GRASP URL');
if (isGrasp) {
const graspMatch = url.match(/(https?:\/\/[^/]+)\/(npub1[a-z0-9]+)\/([^/]+?)(?:\.git)?\/?$/i);
if (graspMatch) {
const [, baseHost, npub, repo] = graspMatch;
return {
platform: 'grasp',
const parsed = {
platform: 'grasp' as const,
owner: npub,
repo: repo.replace(/\.git$/, ''),
baseUrl: `${baseHost}/api/v1`
};
logger.debug({ url, parsed }, 'Successfully parsed GRASP URL');
return parsed;
}
logger.warn({ url }, 'URL detected as GRASP but regex did not match');
return null;
}
@ -442,12 +448,15 @@ async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Pro @@ -442,12 +448,15 @@ async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Pro
baseUrl,
new URLSearchParams()
);
logger.info({ repoUrl, owner, repo, baseUrl }, 'Fetching from Gitea API');
const repoResponse = await fetch(repoUrl);
if (!repoResponse.ok) {
if (repoResponse.status === 404) {
logger.warn({ status: repoResponse.status, repoUrl, owner, repo, baseUrl }, 'Gitea API returned 404 - repository not found');
return null;
}
logger.warn({ status: repoResponse.status, owner, repo }, 'Gitea API error');
const errorText = await repoResponse.text().catch(() => repoResponse.statusText);
logger.warn({ status: repoResponse.status, repoUrl, owner, repo, baseUrl, errorText }, 'Gitea API error');
return null;
}
const repoData = await repoResponse.json();
@ -652,23 +661,25 @@ async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Pro @@ -652,23 +661,25 @@ async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Pro
/**
* Fetch repository metadata from GRASP
* GRASP repos use git protocol, so we can't easily fetch metadata via API
* For now, return minimal info indicating it's a GRASP repo
* GRASP servers use Gitea-compatible API, so we can use the same fetching logic as Gitea
*/
async function fetchFromGrasp(npub: string, repo: string, baseUrl: string, originalUrl: string): Promise<Partial<ApiRepoInfo> | null> {
// GRASP repos typically don't have REST APIs
// Full implementation would use git protocol (info/refs, git-upload-pack)
// For now, return basic structure
return {
name: repo,
description: undefined,
url: originalUrl,
defaultBranch: 'main',
branches: [],
commits: [],
files: [],
platform: 'grasp'
};
// GRASP servers use Gitea-compatible API endpoints
// The npub is the owner, and the repo name is the repo
// Use the same fetching logic as Gitea
logger.debug({ npub, repo, baseUrl, originalUrl }, 'Fetching from GRASP server using Gitea-compatible API');
try {
const result = await fetchFromGitea(npub, repo, baseUrl);
if (result) {
logger.info({ npub, repo, baseUrl, branchCount: result.branches?.length || 0, fileCount: result.files?.length || 0 }, 'Successfully fetched from GRASP server');
} else {
logger.warn({ npub, repo, baseUrl }, 'GRASP server returned null/empty metadata');
}
return result;
} catch (err) {
logger.error({ error: err instanceof Error ? err.message : String(err), errorStack: err instanceof Error ? err.stack : undefined, npub, repo, baseUrl }, 'Error fetching from GRASP server');
throw err;
}
}
/**
@ -680,39 +691,53 @@ export async function fetchRepoMetadata( @@ -680,39 +691,53 @@ export async function fetchRepoMetadata(
npub: string,
repoName: string
): Promise<ApiRepoInfo | null> {
logger.info({ url, npub, repoName }, 'Parsing git URL for API fetch');
const parsed = parseGitUrl(url);
if (!parsed) {
logger.warn({ url }, 'Unable to parse git URL');
logger.warn({ url, npub, repoName, isGrasp: isGraspUrl(url) }, 'Unable to parse git URL - URL format not recognized');
return null;
}
const { platform, owner, repo, baseUrl } = parsed;
logger.info({ url, platform, owner, repo, baseUrl, npub, repoName }, 'Parsed git URL successfully');
const isCloned = await checkLocalRepo(npub, repoName);
let metadata: Partial<ApiRepoInfo> | null = null;
switch (platform) {
case 'github':
metadata = await fetchFromGitHub(owner, repo);
break;
case 'gitlab':
metadata = await fetchFromGitLab(owner, repo, baseUrl);
break;
case 'gitea':
metadata = await fetchFromGitea(owner, repo, baseUrl);
break;
case 'grasp':
metadata = await fetchFromGrasp(owner, repo, baseUrl, url);
break;
default:
logger.warn({ platform, url }, 'Unsupported platform');
return null;
try {
switch (platform) {
case 'github':
logger.debug({ url, owner, repo }, 'Fetching from GitHub API');
metadata = await fetchFromGitHub(owner, repo);
break;
case 'gitlab':
logger.debug({ url, owner, repo, baseUrl }, 'Fetching from GitLab API');
metadata = await fetchFromGitLab(owner, repo, baseUrl);
break;
case 'gitea':
logger.debug({ url, owner, repo, baseUrl }, 'Fetching from Gitea API');
metadata = await fetchFromGitea(owner, repo, baseUrl);
break;
case 'grasp':
logger.info({ url, owner, repo, baseUrl }, 'Fetching from GRASP server (Gitea-compatible API)');
metadata = await fetchFromGrasp(owner, repo, baseUrl, url);
break;
default:
logger.warn({ platform, url }, 'Unsupported platform');
return null;
}
} catch (err) {
logger.error({ error: err instanceof Error ? err.message : String(err), errorStack: err instanceof Error ? err.stack : undefined, url, platform, owner, repo }, 'Error fetching metadata from platform API');
return null;
}
if (!metadata) {
logger.warn({ url, platform, owner, repo }, 'Platform API returned null/empty metadata');
return null;
}
logger.debug({ url, platform, branchCount: metadata.branches?.length || 0, fileCount: metadata.files?.length || 0 }, 'Successfully fetched metadata from platform API');
return {
...metadata,
isCloned,

36
src/lib/styles/components.css

@ -8,6 +8,9 @@ @@ -8,6 +8,9 @@
position: sticky;
top: 0;
z-index: 100;
display: block;
width: 100%;
box-sizing: border-box;
}
@media (max-width: 768px) {
@ -16,17 +19,20 @@ @@ -16,17 +19,20 @@
}
}
/* First row: Name and actions */
.repo-header-top {
display: flex;
justify-content: space-between;
align-items: flex-start;
gap: 1rem;
margin-bottom: 0.5rem;
margin-bottom: 0;
flex-wrap: wrap;
width: 100%;
box-sizing: border-box;
}
@media (max-width: 768px) {
.repo-header-top {
margin-bottom: 0.25rem;
gap: 0.5rem;
}
}
@ -139,16 +145,23 @@ @@ -139,16 +145,23 @@
opacity: 1 !important;
}
/* Second row: Description - MUST be on its own line */
.repo-description {
margin: 0.5rem 0;
display: block !important;
width: 100% !important;
margin: 0.75rem 0 0.5rem 0 !important;
padding: 0 !important;
font-size: 0.875rem;
color: var(--text-secondary, #666);
line-height: 1.5;
box-sizing: border-box;
clear: both;
float: none;
}
@media (max-width: 768px) {
.repo-description {
margin: 0.25rem 0;
margin: 0.5rem 0 0.25rem 0 !important;
}
}
@ -415,7 +428,8 @@ @@ -415,7 +428,8 @@
.more-menu {
position: absolute;
top: calc(100% + 0.25rem);
left: 0;
right: 0;
left: auto;
background: var(--card-bg, #ffffff);
border: 1px solid var(--border-color, #e0e0e0);
border-radius: 0.375rem;
@ -428,8 +442,8 @@ @@ -428,8 +442,8 @@
/* On mobile, ensure menu doesn't overflow screen */
@media (max-width: 768px) {
.repo-header-actions .more-menu {
left: 0;
right: auto;
right: 0;
left: auto;
max-width: calc(100vw - 1rem);
min-width: min(280px, calc(100vw - 1rem));
}
@ -438,8 +452,8 @@ @@ -438,8 +452,8 @@
/* On very small screens, ensure menu doesn't get cut off */
@media (max-width: 480px) {
.repo-header-actions .more-menu {
left: 0;
right: auto;
right: 0;
left: auto;
max-width: calc(100vw - 0.5rem);
min-width: min(260px, calc(100vw - 0.5rem));
}
@ -521,7 +535,9 @@ button.menu-item-danger:hover:not(:disabled) { @@ -521,7 +535,9 @@ button.menu-item-danger:hover:not(:disabled) {
}
.repo-description {
font-size: 1rem;
font-size: 1rem !important;
margin-top: 0.75rem !important;
margin-bottom: 0.5rem !important;
}
}

92
src/lib/utils/api-repo-helper.ts

@ -3,7 +3,8 @@ @@ -3,7 +3,8 @@
* Used by endpoints to fetch repo metadata without cloning
*/
import { fetchRepoMetadata, extractGitUrls, parseGitUrl } from '../services/git/api-repo-fetcher.js';
import { fetchRepoMetadata, parseGitUrl } from '../services/git/api-repo-fetcher.js';
import { extractCloneUrls } from './nostr-utils.js';
import type { NostrEvent } from '../types/nostr.js';
import logger from '../services/logger.js';
@ -50,32 +51,103 @@ export async function tryApiFetch( @@ -50,32 +51,103 @@ export async function tryApiFetch(
commits?: Array<{ sha: string; message: string; author: string; date: string }>;
} | null> {
try {
const cloneUrls = extractGitUrls(announcementEvent);
const cloneUrls = extractCloneUrls(announcementEvent);
if (cloneUrls.length === 0) {
logger.debug({ npub, repoName }, 'No clone URLs found for API fetch');
return null;
}
// Try each clone URL until one works
for (const url of cloneUrls) {
// Convert SSH URLs to HTTPS URLs for API fetching
const convertedUrls = cloneUrls.map(url => {
if (url.startsWith('git@')) {
// Convert SSH URL to HTTPS: git@host.com:owner/repo.git -> https://host.com/owner/repo.git
const sshMatch = url.match(/^git@([^:]+):(.+)$/);
if (sshMatch) {
const [, host, path] = sshMatch;
const httpsUrl = `https://${host}/${path}`;
logger.debug({ sshUrl: url, httpsUrl }, 'Converted SSH URL to HTTPS for API fetch');
return httpsUrl;
}
logger.warn({ url }, 'Unable to convert SSH URL to HTTPS, skipping');
return null;
}
return url;
}).filter((url): url is string => url !== null && (url.startsWith('http://') || url.startsWith('https://')));
if (convertedUrls.length === 0) {
logger.debug({ npub, repoName, totalUrls: cloneUrls.length, sshUrls: cloneUrls.filter(url => url.startsWith('git@')).length }, 'No usable clone URLs found for API fetch after conversion');
return null;
}
logger.debug({ npub, repoName, totalUrls: cloneUrls.length, convertedUrls: convertedUrls.length, originalHttpUrls: cloneUrls.filter(url => url.startsWith('http')).length, sshUrls: cloneUrls.filter(url => url.startsWith('git@')).length }, 'Converted clone URLs for API fetch');
// Prioritize GRASP servers (they use Gitea-compatible API)
// Sort URLs: GRASP URLs first, then others
const { isGraspUrl } = await import('../services/git/api-repo-fetcher.js');
const sortedUrls = [...convertedUrls].sort((a, b) => {
const aIsGrasp = isGraspUrl(a);
const bIsGrasp = isGraspUrl(b);
if (aIsGrasp && !bIsGrasp) return -1;
if (!aIsGrasp && bIsGrasp) return 1;
return 0;
});
logger.info({
npub,
repoName,
totalUrls: sortedUrls.length,
graspUrls: sortedUrls.filter(url => isGraspUrl(url)).length,
urls: sortedUrls.map((url, idx) => ({ index: idx + 1, url, isGrasp: isGraspUrl(url) }))
}, 'Starting API fetch attempts - will try each URL until one succeeds');
// Try each clone URL until one works (GRASP URLs first)
for (let i = 0; i < sortedUrls.length; i++) {
const url = sortedUrls[i];
try {
logger.info({ url, npub, repoName, isGrasp: isGraspUrl(url), attempt: i + 1, total: sortedUrls.length }, `[${i + 1}/${sortedUrls.length}] Attempting to fetch repo metadata from URL`);
const metadata = await fetchRepoMetadata(url, npub, repoName);
if (metadata) {
logger.info({
url,
npub,
repoName,
platform: metadata.platform,
branchCount: metadata.branches?.length || 0,
fileCount: metadata.files?.length || 0,
hasDefaultBranch: !!metadata.defaultBranch,
attempt: i + 1,
total: sortedUrls.length
}, 'Successfully fetched repo metadata via API');
// Return data even if some fields are empty (at least we got something)
return {
branches: metadata.branches,
defaultBranch: metadata.defaultBranch,
files: metadata.files,
commits: metadata.commits
branches: metadata.branches || [],
defaultBranch: metadata.defaultBranch || 'main',
files: metadata.files || [],
commits: metadata.commits || []
};
} else {
logger.warn({ url, npub, repoName, attempt: i + 1, total: sortedUrls.length }, `[${i + 1}/${sortedUrls.length}] fetchRepoMetadata returned null, trying next URL`);
}
} catch (err) {
logger.debug({ error: err, url, npub, repoName }, 'API fetch failed for URL, trying next');
logger.warn({
error: err instanceof Error ? err.message : String(err),
errorStack: err instanceof Error ? err.stack : undefined,
url,
npub,
repoName,
attempt: i + 1,
total: sortedUrls.length
}, `[${i + 1}/${sortedUrls.length}] API fetch threw error for URL, trying next`);
// Continue to next URL
continue;
}
}
logger.warn({ npub, repoName, totalUrls: sortedUrls.length, urls: sortedUrls }, 'All API fetch attempts failed for all clone URLs');
return null;
} catch (err) {
logger.warn({ error: err, npub, repoName }, 'Error attempting API fetch');
@ -95,7 +167,7 @@ export async function tryApiFetchFile( @@ -95,7 +167,7 @@ export async function tryApiFetchFile(
ref: string = 'main'
): Promise<{ content: string; encoding: string } | null> {
try {
const cloneUrls = extractGitUrls(announcementEvent);
const cloneUrls = extractCloneUrls(announcementEvent);
if (cloneUrls.length === 0) {
logger.debug({ npub, repoName, filePath }, 'No clone URLs found for API file fetch');

17
src/routes/api/repos/[npub]/[repo]/branches/+server.ts

@ -74,17 +74,28 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -74,17 +74,28 @@ export const GET: RequestHandler = createRepoGetHandler(
if (events.length > 0) {
// Try API-based fetching first (no cloning)
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const cloneUrls = extractCloneUrls(events[0]);
logger.debug({ npub: context.npub, repo: context.repo, cloneUrlCount: cloneUrls.length, cloneUrls }, 'Attempting API fallback for branches');
const apiData = await tryApiFetch(events[0], context.npub, context.repo);
if (apiData) {
if (apiData && apiData.branches && apiData.branches.length > 0) {
logger.debug({ npub: context.npub, repo: context.repo, branchCount: apiData.branches.length }, 'Successfully fetched branches via API fallback');
// Return API data directly without cloning
return json(apiData.branches);
}
// API fetch failed - repo is not cloned and API fetch didn't work
// Return 404 with helpful message suggesting to clone
// Check if we have clone URLs to provide better error message
const hasCloneUrls = cloneUrls.length > 0;
logger.warn({ npub: context.npub, repo: context.repo, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed for branches');
throw handleNotFoundError(
'Repository is not cloned locally and could not be fetched via API. Privileged users can clone this repository using the "Clone to Server" button.',
hasCloneUrls
? 'Repository is not cloned locally and could not be fetched via API from external clone URLs. Privileged users can clone this repository using the "Clone to Server" button.'
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.',
{ operation: 'getBranches', npub: context.npub, repo: context.repo }
);
} else {

7
src/routes/api/repos/[npub]/[repo]/tree/+server.ts

@ -34,9 +34,14 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -34,9 +34,14 @@ export const GET: RequestHandler = createRepoGetHandler(
if (announcement) {
// Try API-based fetching first (no cloning)
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const { extractCloneUrls: extractCloneUrlsHelper } = await import('$lib/utils/nostr-utils.js');
const cloneUrlsForLogging = extractCloneUrlsHelper(announcement);
logger.debug({ npub: context.npub, repo: context.repo, cloneUrlCount: cloneUrlsForLogging.length, cloneUrls: cloneUrlsForLogging, path: context.path }, 'Attempting API fallback for tree');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files) {
if (apiData && apiData.files && apiData.files.length > 0) {
logger.debug({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback');
// Return API data directly without cloning
const path = context.path || '';

126
src/routes/api/search/+server.ts

@ -19,6 +19,53 @@ import { eventCache } from '$lib/services/nostr/event-cache.js'; @@ -19,6 +19,53 @@ import { eventCache } from '$lib/services/nostr/event-cache.js';
import { decodeNostrAddress } from '$lib/services/nostr/nip19-utils.js';
import logger from '$lib/services/logger.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
/**
* Check if an event is a parameterized replaceable event (NIP-33)
* Parameterized replaceable events have:
* - kind >= 10000 && kind < 20000 (replaceable range) with a 'd' tag, OR
* - kind >= 30000 && kind < 40000 (addressable range) with a 'd' tag
*/
function isParameterizedReplaceable(event: NostrEvent): boolean {
const hasDTag = event.tags.some(t => t[0] === 'd' && t[1]);
if (!hasDTag) return false;
// Replaceable range (NIP-33)
if (event.kind >= 10000 && event.kind < 20000) {
return true;
}
// Addressable range (NIP-34) - also parameterized replaceable
if (event.kind >= 30000 && event.kind < 40000) {
return true;
}
return false;
}
/**
* Get the deduplication key for an event
* For replaceable events: kind:pubkey
* For parameterized replaceable events: kind:pubkey:d-tag
* For regular events: event.id
*/
function getDeduplicationKey(event: NostrEvent): string {
if (REPLACEABLE_KINDS.includes(event.kind)) {
return `${event.kind}:${event.pubkey}`;
}
if (isParameterizedReplaceable(event)) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1] || '';
return `${event.kind}:${event.pubkey}:${dTag}`;
}
// Special handling for gitrepublic-write-proof kind 24 events - treat as replaceable
if (event.kind === KIND.PUBLIC_MESSAGE && event.content && event.content.includes('gitrepublic-write-proof')) {
return `24:${event.pubkey}:write-proof`;
}
return event.id;
}
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const limit = parseInt(event.url.searchParams.get('limit') || '20', 10);
@ -136,31 +183,48 @@ export const GET: RequestHandler = async (event) => { @@ -136,31 +183,48 @@ export const GET: RequestHandler = async (event) => {
logger.debug({ error: err }, 'Failed to get relay results');
}
// Step 4 & 5: Deduplicate results (cached + relay)
// Step 4 & 5: Deduplicate results (cached + relay) using deduplication keys
// For replaceable/parameterized replaceable events, use kind:pubkey:d-tag as key
// For regular events, use event.id as key
const allResults = new Map<string, NostrEvent>();
// Add cached results first
cachedResults.forEach(r => allResults.set(r.id, r));
// Add cached results first, using deduplication keys
cachedResults.forEach(r => {
const key = getDeduplicationKey(r);
const existing = allResults.get(key);
if (!existing || r.created_at > existing.created_at) {
allResults.set(key, r);
}
});
// Add relay results (prefer newer events)
// Add relay results (prefer newer events), using deduplication keys
relayResults.forEach(r => {
const existing = allResults.get(r.id);
const key = getDeduplicationKey(r);
const existing = allResults.get(key);
if (!existing || r.created_at > existing.created_at) {
allResults.set(r.id, r);
allResults.set(key, r);
}
});
// Step 6: Update cache with ALL repos found from relays (not just filtered ones)
// This ensures everything discovered during the search is cached for future use
// Use deduplication keys to ensure only the newest event per kind:pubkey:d-tag is cached
if (allRelayRepos.length > 0) {
const repoMap = new Map<string, NostrEvent>();
// Start with cached repos
cachedRepos.forEach(r => repoMap.set(r.id, r));
// Add ALL repos found from relays (prefer newer events)
// Start with cached repos, using deduplication keys
cachedRepos.forEach(r => {
const key = getDeduplicationKey(r);
const existing = repoMap.get(key);
if (!existing || r.created_at > existing.created_at) {
repoMap.set(key, r);
}
});
// Add ALL repos found from relays (prefer newer events), using deduplication keys
allRelayRepos.forEach(r => {
const existing = repoMap.get(r.id);
const key = getDeduplicationKey(r);
const existing = repoMap.get(key);
if (!existing || r.created_at > existing.created_at) {
repoMap.set(r.id, r);
repoMap.set(key, r);
}
});
// Update cache with merged results
@ -174,11 +238,18 @@ export const GET: RequestHandler = async (event) => { @@ -174,11 +238,18 @@ export const GET: RequestHandler = async (event) => {
} else if (relayResults.length > 0) {
// Fallback: if we only have filtered results, cache those
const repoMap = new Map<string, NostrEvent>();
cachedRepos.forEach(r => repoMap.set(r.id, r));
cachedRepos.forEach(r => {
const key = getDeduplicationKey(r);
const existing = repoMap.get(key);
if (!existing || r.created_at > existing.created_at) {
repoMap.set(key, r);
}
});
relayResults.forEach(r => {
const existing = repoMap.get(r.id);
const key = getDeduplicationKey(r);
const existing = repoMap.get(key);
if (!existing || r.created_at > existing.created_at) {
repoMap.set(r.id, r);
repoMap.set(key, r);
}
});
eventCache.set(cacheKey, Array.from(repoMap.values()));
@ -209,9 +280,17 @@ export const GET: RequestHandler = async (event) => { @@ -209,9 +280,17 @@ export const GET: RequestHandler = async (event) => {
announcement?: NostrEvent;
}> = [];
logger.debug({
mergedResultsCount: mergedResults.length,
processingCount: Math.min(mergedResults.length, limit * 2)
}, 'Processing merged results for privacy filtering');
for (const event of mergedResults.slice(0, limit * 2)) { // Get more to filter by privacy
const repoId = event.tags.find(t => t[0] === 'd')?.[1];
if (!repoId) continue;
if (!repoId) {
logger.debug({ eventId: event.id }, 'Skipping event without d-tag');
continue;
}
// Check privacy
const isPrivate = event.tags.some(t =>
@ -231,7 +310,10 @@ export const GET: RequestHandler = async (event) => { @@ -231,7 +310,10 @@ export const GET: RequestHandler = async (event) => {
}
}
if (!canView) continue;
if (!canView) {
logger.debug({ eventId: event.id, repoId, isPrivate, hasUserPubkey: !!userPubkey }, 'Skipping event - cannot view');
continue;
}
const name = event.tags.find(t => t[0] === 'name')?.[1] || repoId;
const description = event.tags.find(t => t[0] === 'description')?.[1] || '';
@ -276,8 +358,10 @@ export const GET: RequestHandler = async (event) => { @@ -276,8 +358,10 @@ export const GET: RequestHandler = async (event) => {
maintainers: allMaintainers,
announcement: event
});
} catch {
logger.debug({ eventId: event.id, repoId, name }, 'Added repo to results');
} catch (err) {
// Skip if npub encoding fails
logger.debug({ error: err, eventId: event.id, repoId }, 'Skipping event - npub encoding failed');
}
}
@ -405,15 +489,17 @@ async function fetchFromRelays( @@ -405,15 +489,17 @@ async function fetchFromRelays(
);
const resultsArrays = await Promise.allSettled(fetchPromises);
// Merge and deduplicate by event ID (all repos fetched)
// Merge and deduplicate using deduplication keys (all repos fetched)
// For replaceable/parameterized replaceable events, use kind:pubkey:d-tag as key
const allReposMap = new Map<string, NostrEvent>();
for (const result of resultsArrays) {
if (result.status === 'fulfilled') {
const results = result.value;
for (const event of results) {
const existing = allReposMap.get(event.id);
const key = getDeduplicationKey(event);
const existing = allReposMap.get(key);
if (!existing || event.created_at > existing.created_at) {
allReposMap.set(event.id, event);
allReposMap.set(key, event);
}
}
} else {

12
src/routes/repos/[npub]/[repo]/+page.svelte

@ -4602,12 +4602,18 @@ @@ -4602,12 +4602,18 @@
{/if}
<main class="repo-view">
{#if isRepoCloned === false && canUseApiFallback}
{#if isRepoCloned === false && (canUseApiFallback || apiFallbackAvailable === null)}
<div class="read-only-banner">
<div class="banner-content">
<img src="/icons/alert-circle.svg" alt="Info" class="banner-icon" />
<span>This repository is displayed in <strong>read-only mode</strong> using data from external clone URLs. To enable editing and full features, clone this repository to the server.</span>
{#if hasUnlimitedAccess($userStore.userLevel)}
<span>
{#if apiFallbackAvailable === null}
Checking external clone URLs for read-only access...
{:else}
This repository is displayed in <strong>read-only mode</strong> using data from external clone URLs. To enable editing and full features, clone this repository to the server.
{/if}
</span>
{#if hasUnlimitedAccess($userStore.userLevel) && apiFallbackAvailable !== null}
<button
class="clone-button-banner"
onclick={cloneRepository}

31
src/routes/search/+page.svelte

@ -244,7 +244,12 @@ @@ -244,7 +244,12 @@
}
}}
style="cursor: pointer;">
<h4>{repo.name}</h4>
<div class="repo-title-row">
<h4>{repo.name}</h4>
{#if repo.repoId}
<span class="repo-d-tag">{repo.repoId}</span>
{/if}
</div>
{#if repo.description}
<p class="repo-description">{repo.description}</p>
{/if}
@ -405,4 +410,28 @@ @@ -405,4 +410,28 @@
color: var(--success-text);
border-color: var(--border-color);
}
.repo-title-row {
display: flex;
align-items: center;
gap: 0.75rem;
flex-wrap: wrap;
margin-bottom: 0.25rem;
}
.repo-title-row h4 {
margin: 0;
}
.repo-d-tag {
display: inline-block;
padding: 0.125rem 0.5rem;
background: var(--bg-secondary, #f0f0f0);
color: var(--text-secondary, #666);
border: 1px solid var(--border-color, #ddd);
border-radius: 0.25rem;
font-size: 0.75rem;
font-family: monospace;
font-weight: 500;
}
</style>

Loading…
Cancel
Save