Browse Source

bug-fixes

master
Silberengel 4 weeks ago
parent
commit
b0613a7698
  1. 14
      package-lock.json
  2. 2
      package.json
  3. 33
      src/lib/components/content/FileExplorer.svelte
  4. 101
      src/lib/services/content/git-repo-fetcher.ts
  5. 471
      src/lib/services/git/git-protocol-client.ts
  6. 49
      src/routes/api/gitea-proxy/[...path]/+server.ts
  7. 324
      src/routes/repos/[naddr]/+page.svelte

14
package-lock.json generated

@ -22,6 +22,7 @@
"@sveltejs/kit": "^2.0.0", "@sveltejs/kit": "^2.0.0",
"@sveltejs/vite-plugin-svelte": "^4.0.0-next.6", "@sveltejs/vite-plugin-svelte": "^4.0.0-next.6",
"@tanstack/svelte-virtual": "^3.0.0", "@tanstack/svelte-virtual": "^3.0.0",
"@types/pako": "^2.0.4",
"@types/ws": "^8.18.1", "@types/ws": "^8.18.1",
"asciidoctor": "3.0.x", "asciidoctor": "3.0.x",
"blurhash": "^2.0.5", "blurhash": "^2.0.5",
@ -34,6 +35,7 @@
"lucide-svelte": "^0.563.0", "lucide-svelte": "^0.563.0",
"marked": "^11.1.1", "marked": "^11.1.1",
"nostr-tools": "^2.22.1", "nostr-tools": "^2.22.1",
"pako": "^2.1.0",
"svelte": "^5.0.0", "svelte": "^5.0.0",
"unicode-emoji-json": "^0.8.0", "unicode-emoji-json": "^0.8.0",
"ws": "^8.19.0" "ws": "^8.19.0"
@ -3589,6 +3591,12 @@
"undici-types": "~7.16.0" "undici-types": "~7.16.0"
} }
}, },
"node_modules/@types/pako": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/@types/pako/-/pako-2.0.4.tgz",
"integrity": "sha512-VWDCbrLeVXJM9fihYodcLiIv0ku+AlOa/TQ1SvYOaBuyrSKgEcro95LJyIsJ4vSo6BXIxOKxiJAat04CmST9Fw==",
"license": "MIT"
},
"node_modules/@types/resolve": { "node_modules/@types/resolve": {
"version": "1.20.2", "version": "1.20.2",
"resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz", "resolved": "https://registry.npmjs.org/@types/resolve/-/resolve-1.20.2.tgz",
@ -7216,6 +7224,12 @@
"dev": true, "dev": true,
"license": "BlueOak-1.0.0" "license": "BlueOak-1.0.0"
}, },
"node_modules/pako": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/pako/-/pako-2.1.0.tgz",
"integrity": "sha512-w+eufiZ1WuJYgPXbV/PO3NCMEc3xqylkKHzp8bxp1uW4qaSNQUkwmLLEc3kKsfz8lpV1F8Ht3U1Cm+9Srog2ug==",
"license": "(MIT AND Zlib)"
},
"node_modules/parent-module": { "node_modules/parent-module": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",

2
package.json

@ -36,6 +36,7 @@
"@sveltejs/kit": "^2.0.0", "@sveltejs/kit": "^2.0.0",
"@sveltejs/vite-plugin-svelte": "^4.0.0-next.6", "@sveltejs/vite-plugin-svelte": "^4.0.0-next.6",
"@tanstack/svelte-virtual": "^3.0.0", "@tanstack/svelte-virtual": "^3.0.0",
"@types/pako": "^2.0.4",
"@types/ws": "^8.18.1", "@types/ws": "^8.18.1",
"asciidoctor": "3.0.x", "asciidoctor": "3.0.x",
"blurhash": "^2.0.5", "blurhash": "^2.0.5",
@ -48,6 +49,7 @@
"lucide-svelte": "^0.563.0", "lucide-svelte": "^0.563.0",
"marked": "^11.1.1", "marked": "^11.1.1",
"nostr-tools": "^2.22.1", "nostr-tools": "^2.22.1",
"pako": "^2.1.0",
"svelte": "^5.0.0", "svelte": "^5.0.0",
"unicode-emoji-json": "^0.8.0", "unicode-emoji-json": "^0.8.0",
"ws": "^8.19.0" "ws": "^8.19.0"

33
src/lib/components/content/FileExplorer.svelte

@ -1,6 +1,8 @@
<script lang="ts"> <script lang="ts">
import type { GitFile, GitRepoInfo } from '../../services/content/git-repo-fetcher.js'; import type { GitFile, GitRepoInfo } from '../../services/content/git-repo-fetcher.js';
import { isGraspUrl } from '../../services/content/git-repo-fetcher.js';
import { fetchGitHubApi } from '../../services/github-api.js'; import { fetchGitHubApi } from '../../services/github-api.js';
import { fetchCommitTree, fetchGitObject, parseGitBlob } from '../../services/git/git-protocol-client.js';
import { browser } from '$app/environment'; import { browser } from '$app/environment';
// @ts-ignore - highlight.js default export works at runtime // @ts-ignore - highlight.js default export works at runtime
import hljs from 'highlight.js'; import hljs from 'highlight.js';
@ -110,6 +112,37 @@
try { try {
// Parse the repo URL to determine platform // Parse the repo URL to determine platform
const url = repoInfo.url; const url = repoInfo.url;
// For GRASP repositories, use git protocol to fetch files
if (isGraspUrl(url)) {
try {
// Get the HEAD commit SHA from the default branch
const defaultBranch = repoInfo.branches.find(b => b.name === repoInfo.defaultBranch);
const commitSha = defaultBranch?.commit?.sha || repoInfo.commits[0]?.sha;
if (commitSha) {
// Fetch the commit tree to get file SHA
const treeFiles = await fetchCommitTree(url, commitSha);
const fileEntry = treeFiles.find(f => f.path === file.path && f.type === 'file');
if (fileEntry && fileEntry.sha) {
// Fetch the blob object via git protocol
const blobObj = await fetchGitObject(url, fileEntry.sha);
if (blobObj && blobObj.type === 'blob') {
const content = parseGitBlob(blobObj);
if (content !== null) {
fileContent = content;
return;
}
}
}
}
} catch (graspError) {
console.warn('[FileExplorer] Failed to fetch file via git protocol, trying HTTP fallback:', graspError);
// Fall through to HTTP methods below
}
}
let apiUrl = ''; let apiUrl = '';
if (url.includes('github.com')) { if (url.includes('github.com')) {

101
src/lib/services/content/git-repo-fetcher.ts

@ -782,13 +782,54 @@ function parseInfoRefs(refsText: string): { branches: string[]; defaultBranch: s
let lineNumber = 0; let lineNumber = 0;
while (pos < refsText.length) { while (pos < refsText.length) {
lineNumber++; lineNumber++;
// Read length prefix (4 hex characters)
// Skip any whitespace (newlines, spaces, etc.) that might be between pkt-lines
while (pos < refsText.length && (refsText[pos] === '\n' || refsText[pos] === '\r' || refsText[pos] === ' ' || refsText[pos] === '\t')) {
pos++;
}
if (pos >= refsText.length) {
break;
}
// Read length prefix (always 4 hex characters in git pkt-line format)
// The length value includes the 4-byte prefix itself
if (pos + 4 > refsText.length) { if (pos + 4 > refsText.length) {
console.log(`[GRASP] Line ${lineNumber}: Not enough bytes for length prefix at pos ${pos}, remaining: ${refsText.length - pos}`); console.log(`[GRASP] Line ${lineNumber}: Not enough bytes for length prefix at pos ${pos}, remaining: ${refsText.length - pos}`);
break; break;
} }
const lengthHex = refsText.substring(pos, pos + 4);
const totalLength = parseInt(lengthHex, 16); // In git pkt-line format, length prefix is always 4 bytes
// If we see "0000", it might be:
// 1. End marker (0000)
// 2. Part of a longer hex sequence (e.g., "0000014e" where "014e" is the actual length)
// Check if "0000" is followed by more hex digits that form a valid length
let lengthHex = refsText.substring(pos, pos + 4);
let totalLength = parseInt(lengthHex, 16);
let prefixLength = 4; // Always 4 bytes in git protocol
// If we see "0000" and there are more hex digits, check if it's actually a longer length
if (totalLength === 0 && pos + 8 <= refsText.length) {
const next8Chars = refsText.substring(pos, pos + 8);
// Check if positions 4-7 form a valid length (skip the initial "0000")
const actualLengthHex = next8Chars.substring(4, 8);
const actualLength = parseInt(actualLengthHex, 16);
if (actualLength > 0 && actualLength < 65536) {
// The actual length is in positions 4-7, so we need to skip the initial "0000"
console.log(`[GRASP] Line ${lineNumber}: Found "0000" prefix, actual length at pos ${pos + 4}: "${actualLengthHex}" (${actualLength})`);
lengthHex = actualLengthHex;
totalLength = actualLength;
// Skip the initial "0000" - it's padding/marker, not part of the length
pos += 4; // Skip the "0000" prefix
}
}
// Log the actual bytes we're reading
const bytesAtPos = Array.from(refsText.substring(pos, pos + 8)).map(c => {
const code = c.charCodeAt(0);
return code < 32 || code > 126 ? `\\x${code.toString(16).padStart(2, '0')}` : c;
}).join('');
console.log(`[GRASP] Line ${lineNumber}: Reading length prefix at pos ${pos}: "${lengthHex}" (totalLength=${totalLength}, next 8 chars: "${bytesAtPos}")`);
if (totalLength === 0) { if (totalLength === 0) {
// End marker (0000) // End marker (0000)
@ -796,20 +837,28 @@ function parseInfoRefs(refsText: string): { branches: string[]; defaultBranch: s
break; break;
} }
// The length includes the 4-byte prefix, so data length is totalLength - 4 // The length includes the 4-byte prefix itself, so data length is totalLength - 4
const dataLength = totalLength - 4; const dataLength = totalLength - 4;
if (dataLength < 0 || pos + totalLength > refsText.length) { if (dataLength < 0 || pos + 4 + dataLength > refsText.length) {
console.warn(`[GRASP] Line ${lineNumber}: Invalid length at pos ${pos}, totalLength=${totalLength}, dataLength=${dataLength}, remaining=${refsText.length - pos}`); console.warn(`[GRASP] Line ${lineNumber}: Invalid length at pos ${pos}, totalLength=${totalLength}, dataLength=${dataLength}, remaining=${refsText.length - pos}`);
break; break;
} }
// Skip the 4-byte prefix and read the data // Skip the 4-byte prefix and read the data
pos += 4; const dataStartPos = pos + 4;
const line = refsText.substring(pos, pos + dataLength); const line = refsText.substring(dataStartPos, dataStartPos + dataLength);
pos += dataLength; pos = dataStartPos + dataLength; // Update position to end of this pkt-line
console.log(`[GRASP] Line ${lineNumber}: pos=${pos - dataLength - 4}, length=${totalLength}, dataLength=${dataLength}, first 60 chars:`, line.substring(0, 60).replace(/\0/g, '\\0')); console.log(`[GRASP] Line ${lineNumber}: dataStart=${dataStartPos}, dataEnd=${pos}, length=${totalLength}, dataLength=${dataLength}, first 60 chars:`, line.substring(0, 60).replace(/\0/g, '\\0'));
// Log what's at the next position for debugging
if (pos < refsText.length) {
const next8Chars = Array.from(refsText.substring(pos, pos + 8)).map(c => {
const code = c.charCodeAt(0);
return code < 32 || code > 126 ? `\\x${code.toString(16).padStart(2, '0')}` : c;
}).join('');
console.log(`[GRASP] Line ${lineNumber}: After reading, next 8 chars at pos ${pos}: "${next8Chars}"`);
}
// Skip service announcement // Skip service announcement
if (line.startsWith('# service=')) { if (line.startsWith('# service=')) {
@ -1168,13 +1217,28 @@ async function getDirectoryTreeAt(repoUrl: string, ref: string): Promise<GitFile
/** /**
* Fetch repository data from GRASP (Git Repository Access via Secure Protocol) * Fetch repository data from GRASP (Git Repository Access via Secure Protocol)
* GRASP servers use git protocol, not REST APIs * GRASP servers may use Gitea-compatible REST APIs or git protocol
* The npub is used as the owner identifier in the API calls
* Falls back to git protocol if API is not available
*/
async function fetchFromGrasp(npub: string, repo: string, baseUrl: string, originalUrl?: string): Promise<GitRepoInfo | null> {
// Use original URL if provided, otherwise construct it
// The original URL preserves the exact format (e.g., .git suffix) which is important for git protocol
const repoUrl = originalUrl || `${baseUrl.replace('/api/v1', '')}/${npub}/${repo}`;
// GRASP servers don't support Gitea-compatible REST APIs
// They use git protocol (git-upload-pack, info/refs) for repository access
// Skip API attempt and go straight to git protocol to avoid unnecessary 404 errors
console.log('[GRASP] Fetching repo via git protocol (GRASP servers use git protocol, not REST API):', { npub, repo, repoUrl });
return fetchFromGraspViaGitProtocol(npub, repo, repoUrl);
}
/**
* Fetch GRASP repository data using git protocol (fallback method)
*/ */
async function fetchFromGrasp(npub: string, repo: string, baseUrl: string): Promise<GitRepoInfo | null> { async function fetchFromGraspViaGitProtocol(npub: string, repo: string, repoUrl: string): Promise<GitRepoInfo | null> {
try { try {
// Construct the full repository URL console.log('[GRASP] Fetching repo via git protocol:', repoUrl);
const repoUrl = `${baseUrl.replace('/api/v1', '')}/${npub}/${repo}`;
console.log('[GRASP] Fetching repo:', repoUrl);
// Step 1: Get repository refs using git protocol // Step 1: Get repository refs using git protocol
console.log('[GRASP] Getting info/refs...'); console.log('[GRASP] Getting info/refs...');
@ -1307,7 +1371,7 @@ async function fetchFromGrasp(npub: string, repo: string, baseUrl: string): Prom
readme readme
}; };
console.log('[GRASP] Fetch complete:', { console.log('[GRASP] Fetch complete via git protocol:', {
name: result.name, name: result.name,
branches: result.branches.length, branches: result.branches.length,
commits: result.commits.length, commits: result.commits.length,
@ -1317,7 +1381,7 @@ async function fetchFromGrasp(npub: string, repo: string, baseUrl: string): Prom
return result; return result;
} catch (error) { } catch (error) {
console.error('[GRASP] Error fetching from GRASP:', error); console.error('[GRASP] Error fetching from GRASP via git protocol:', error);
return null; return null;
} }
} }
@ -1344,7 +1408,8 @@ export async function fetchGitRepo(url: string): Promise<GitRepoInfo | null> {
// OneDev uses a similar API structure to Gitea, so we can use the same handler // OneDev uses a similar API structure to Gitea, so we can use the same handler
return fetchFromGitea(owner, repo, baseUrl); return fetchFromGitea(owner, repo, baseUrl);
case 'grasp': case 'grasp':
return fetchFromGrasp(owner, repo, baseUrl); // Pass the original URL so we can use it directly for git protocol fallback
return fetchFromGrasp(owner, repo, baseUrl, url);
default: default:
console.error('Unsupported platform:', platform); console.error('Unsupported platform:', platform);
return null; return null;

471
src/lib/services/git/git-protocol-client.ts

@ -4,6 +4,8 @@
* Supports git-upload-pack protocol for fetching commits, trees, and blobs * Supports git-upload-pack protocol for fetching commits, trees, and blobs
*/ */
import { inflate, inflateRaw } from 'pako';
export interface GitObject { export interface GitObject {
type: 'commit' | 'tree' | 'blob' | 'tag'; type: 'commit' | 'tree' | 'blob' | 'tag';
sha: string; sha: string;
@ -43,13 +45,27 @@ export async function fetchGitObjects(
const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`; const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`;
const uploadPackUrl = `${cleanUrl}/git-upload-pack`; const uploadPackUrl = `${cleanUrl}/git-upload-pack`;
// Build git-upload-pack request // Build git-upload-pack request in pkt-line format
// Format: "want <sha>\n" for each object, then "done\n" // Pkt-line format: 4 hex chars (length including 4-char prefix) + data + newline
// Example: "0032want 72a544a5b98007d3ef640e020dd70fe2ed129bb0\n"
// The length includes the 4-char prefix itself
function encodePktLine(line: string): string {
const lineWithNewline = line + '\n';
const totalLength = 4 + lineWithNewline.length;
const lengthHex = totalLength.toString(16).padStart(4, '0');
return lengthHex + lineWithNewline;
}
let requestBody = ''; let requestBody = '';
// Send capabilities (optional but recommended)
const capabilities = 'multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag';
for (const sha of wantShas) { for (const sha of wantShas) {
requestBody += `want ${sha}\n`; requestBody += encodePktLine(`want ${sha} ${capabilities}`);
} }
requestBody += 'done\n'; // Send "done" to indicate we're done with wants
requestBody += encodePktLine('done');
// Final "0000" to end the request
requestBody += '0000';
// Use proxy to avoid CORS // Use proxy to avoid CORS
const proxyUrl = `/api/gitea-proxy/git-upload-pack?url=${encodeURIComponent(uploadPackUrl)}`; const proxyUrl = `/api/gitea-proxy/git-upload-pack?url=${encodeURIComponent(uploadPackUrl)}`;
@ -68,16 +84,322 @@ export async function fetchGitObjects(
return objects; return objects;
} }
// Parse packfile response // Parse git-upload-pack response
// This is simplified - full packfile parsing is complex // The response is in pkt-line format initially, then the packfile
// Format: pkt-line messages (like "NAK\n" or "ACK <sha>\n"), then packfile
const arrayBuffer = await response.arrayBuffer(); const arrayBuffer = await response.arrayBuffer();
const data = new Uint8Array(arrayBuffer); const data = new Uint8Array(arrayBuffer);
// Basic packfile parsing (simplified) console.log(`[Git Protocol] Received ${data.length} bytes from git-upload-pack`);
// Real packfiles have a complex format with deltas, etc.
// For now, we'll use a simpler approach: fetch objects individually via HTTP
return objects; if (data.length === 0) {
console.warn('[Git Protocol] Empty response from git-upload-pack');
return objects;
}
// Log first 200 bytes for debugging
const firstBytes = Array.from(data.slice(0, Math.min(200, data.length)))
.map(b => b.toString(16).padStart(2, '0')).join(' ');
console.log(`[Git Protocol] First 200 bytes (hex): ${firstBytes}`);
// Find the packfile start (look for "PACK" header)
// Skip all pkt-line format messages first
const decoder = new TextDecoder();
let packfileStart = -1;
for (let i = 0; i < data.length - 4; i++) {
try {
const header = decoder.decode(data.slice(i, i + 4));
if (header === 'PACK') {
packfileStart = i;
break;
}
} catch {
// Invalid UTF-8, continue
}
}
// Check if it starts with PACK already
try {
const firstHeader = decoder.decode(data.slice(0, 4));
if (firstHeader === 'PACK') {
packfileStart = 0;
console.log('[Git Protocol] Packfile starts at beginning of response');
}
} catch {
// Not UTF-8, continue
}
// If not found at start, search for PACK header
if (packfileStart === -1) {
// Try to parse pkt-line format to find packfile
// Pkt-line format: 4 hex chars (length) + data
let pos = 0;
while (pos < data.length - 4) {
try {
const lengthHex = decoder.decode(data.slice(pos, pos + 4));
const length = parseInt(lengthHex, 16);
if (isNaN(length) || length < 0 || length > 65535) {
// Not a valid pkt-line, might be packfile start
try {
const header = decoder.decode(data.slice(pos, pos + 4));
if (header === 'PACK') {
packfileStart = pos;
console.log(`[Git Protocol] Found packfile at offset ${pos} after invalid pkt-line`);
break;
}
} catch {
// Not UTF-8, continue
}
pos++;
continue;
}
if (length === 0) {
// End marker (0000), packfile should follow
pos += 4;
if (pos < data.length - 4) {
try {
const header = decoder.decode(data.slice(pos, pos + 4));
if (header === 'PACK') {
packfileStart = pos;
console.log(`[Git Protocol] Found packfile at offset ${pos} after end marker`);
break;
}
} catch {
// Not UTF-8, continue
}
}
pos++;
continue;
}
// Valid pkt-line, skip it
pos += length;
} catch {
// Invalid UTF-8, try next position
pos++;
}
}
}
// If still not found, try binary search for "PACK"
if (packfileStart === -1) {
for (let i = 0; i < data.length - 4; i++) {
if (data[i] === 0x50 && data[i+1] === 0x41 && data[i+2] === 0x43 && data[i+3] === 0x4B) {
// "PACK" in ASCII
packfileStart = i;
console.log(`[Git Protocol] Found packfile at offset ${i} via binary search`);
break;
}
}
}
if (packfileStart === -1) {
// Try to decode as text to see what we got
try {
const textResponse = decoder.decode(data.slice(0, Math.min(500, data.length)));
console.warn('[Git Protocol] No packfile found in response. Response text:', textResponse);
} catch {
console.warn('[Git Protocol] No packfile found in response. Response is binary, first 100 bytes (hex):',
Array.from(data.slice(0, 100)).map((b: number) => b.toString(16).padStart(2, '0')).join(' '));
}
return objects;
}
// Extract packfile data
const packfileData = data.slice(packfileStart);
if (packfileData.length < 12) {
console.warn(`[Git Protocol] Packfile too small: ${packfileData.length} bytes (need at least 12)`);
return objects;
}
// Packfile format:
// Header: "PACK" (4 bytes) + version (4 bytes) + object count (4 bytes)
// Then objects in packfile format
// Footer: SHA1 checksum (20 bytes)
// Check packfile header
const header = new TextDecoder().decode(packfileData.slice(0, 4));
if (header !== 'PACK') {
console.warn('[Git Protocol] Invalid packfile header:', header);
return objects;
}
// Read version and object count (big-endian)
const view = new DataView(packfileData.buffer, packfileData.byteOffset);
const version = view.getUint32(4, false);
const objectCount = view.getUint32(8, false);
console.log(`[Git Protocol] Packfile found at offset ${packfileStart}, version: ${version}, objects: ${objectCount}`);
// Parse objects (simplified - only handles non-delta objects)
let pos = 12;
for (let i = 0; i < objectCount && pos < packfileData.length - 20; i++) {
try {
// Read object type and size (variable-length encoding)
let byte = packfileData[pos++];
const type = (byte >> 4) & 0x07; // 3 bits for type
let size = byte & 0x0F; // 4 bits for size start
let shift = 4;
// Continue reading size if MSB is set
while (byte & 0x80) {
if (pos >= packfileData.length) break;
byte = packfileData[pos++];
size |= (byte & 0x7F) << shift;
shift += 7;
}
// Packfile objects are zlib-compressed (raw deflate, no headers)
// We need to read the compressed data and decompress it
// The compressed size is variable, so we'll read incrementally
let objectData: Uint8Array | null = null;
let objectType: GitObject['type'];
const compressedStart = pos;
let compressedEnd = pos;
// Try to decompress chunks of data until we get the expected size
// Start with a reasonable estimate (compressed is usually 50-80% of uncompressed)
// We'll try progressively larger chunks until decompression succeeds with the right size
let compressedSize = Math.max(32, Math.ceil(size * 0.5)); // Start with 50% estimate, minimum 32 bytes
let attempts = 0;
const maxAttempts = 10; // Allow more attempts for finding the right compressed size
while (attempts < maxAttempts && pos + compressedSize <= packfileData.length) {
try {
const compressedChunk = packfileData.slice(pos, pos + compressedSize);
// Try to decompress using pako (raw deflate)
const decompressed = inflateRaw(compressedChunk);
if (decompressed.length === size) {
// Perfect match!
objectData = decompressed;
compressedEnd = pos + compressedSize;
break;
} else if (decompressed.length > size) {
// Got more than expected - this means we read too much compressed data
// Try a smaller chunk
compressedSize = Math.max(32, Math.floor(compressedSize * 0.8));
attempts++;
continue;
} else if (decompressed.length < size) {
// Need more compressed data - the stream wasn't complete
compressedSize = Math.min(Math.ceil(compressedSize * 1.3), data.length - pos);
attempts++;
continue;
}
} catch (decompressError) {
// Decompression failed - might need more data (incomplete stream) or wrong starting position
if (attempts < 3) {
// Early attempts - try reading more data (incomplete stream)
compressedSize = Math.min(Math.ceil(compressedSize * 1.5), packfileData.length - pos);
attempts++;
continue;
} else {
// Multiple failures - might be wrong format or corrupted
console.warn(`[Git Protocol] Failed to decompress object ${i} after ${attempts} attempts:`, decompressError);
// Skip this object - approximate position (assume ~60% compression)
pos += Math.min(Math.ceil(size * 0.6), packfileData.length - pos);
break;
}
}
}
if (!objectData) {
console.warn(`[Git Protocol] Could not decompress object ${i}, skipping`);
// Skip this object - approximate position
pos += Math.min(size / 2, packfileData.length - pos);
continue;
}
pos = compressedEnd; // Update position after reading compressed data
if (type === 1) { // commit
objectType = 'commit';
} else if (type === 2) { // tree
objectType = 'tree';
} else if (type === 3) { // blob
objectType = 'blob';
} else if (type === 4) { // tag
objectType = 'tag';
} else if (type === 6 || type === 7) { // OFS_DELTA or REF_DELTA - skip for now
console.warn(`[Git Protocol] Delta object at index ${i}, skipping (not implemented)`);
// Skip delta base reference and data
if (type === 7) { // REF_DELTA has 20-byte base SHA
pos += 20;
} else { // OFS_DELTA has variable-length offset
let offsetByte = packfileData[pos++];
let offset = offsetByte & 0x7F;
let shift = 7;
while (offsetByte & 0x80) {
offsetByte = packfileData[pos++];
offset = ((offset + 1) << 7) | (offsetByte & 0x7F);
}
}
// Skip delta data (compressed) - read size first
let deltaSize = 0;
let deltaByte = packfileData[pos++];
deltaSize = deltaByte & 0x7F;
let deltaShift = 7;
while (deltaByte & 0x80) {
deltaByte = packfileData[pos++];
deltaSize |= (deltaByte & 0x7F) << deltaShift;
deltaShift += 7;
}
// Skip the actual compressed delta data (approximate)
pos += Math.min(deltaSize * 2, packfileData.length - pos);
continue;
} else {
console.warn(`[Git Protocol] Unknown object type ${type} at index ${i}`);
continue;
}
pos = compressedEnd; // Update position
// Store object temporarily (we'll match by SHA after hashing)
const tempKey = `obj_${i}`;
objects.set(tempKey, {
type: objectType,
sha: tempKey, // Temporary, will be replaced after hashing
size: objectData.length,
data: objectData
});
} catch (error) {
console.warn(`[Git Protocol] Error parsing object ${i}:`, error);
break;
}
}
// Match objects to requested SHAs by hashing
// This is a simplified approach - we hash each object and match
const matchedObjects = new Map<string, GitObject>();
for (const [tempSha, obj] of objects.entries()) {
// Calculate actual SHA1 using Web Crypto API
try {
const header = `${obj.type} ${obj.size}\0`;
const headerBytes = new TextEncoder().encode(header);
const combined = new Uint8Array(headerBytes.length + obj.data.length);
combined.set(headerBytes, 0);
combined.set(obj.data, headerBytes.length);
const hashBuffer = await crypto.subtle.digest('SHA-1', combined);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const actualSha = hashArray.map(b => b.toString(16).padStart(2, '0')).join('');
if (wantShas.includes(actualSha)) {
matchedObjects.set(actualSha, { ...obj, sha: actualSha });
}
} catch (error) {
console.warn(`[Git Protocol] Error hashing object:`, error);
}
}
console.log(`[Git Protocol] Matched ${matchedObjects.size} of ${wantShas.length} requested objects`);
return matchedObjects;
} catch (error) { } catch (error) {
console.error('Error fetching git objects:', error); console.error('Error fetching git objects:', error);
return objects; return objects;
@ -86,7 +408,8 @@ export async function fetchGitObjects(
/** /**
* Fetch a single git object by SHA * Fetch a single git object by SHA
* Uses HTTP endpoint with decompression via proxy * Uses GRASP/git-natural-api HTTP endpoint: /objects/{prefix}/{suffix}
* Handles zlib decompression client-side (raw deflate format)
*/ */
export async function fetchGitObject( export async function fetchGitObject(
repoUrl: string, repoUrl: string,
@ -95,56 +418,150 @@ export async function fetchGitObject(
try { try {
const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`; const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`;
// Try HTTP endpoint with decompression (proxy handles zlib decompression) // GRASP/git-natural-api uses simple HTTP endpoint: /objects/{prefix}/{suffix}
const objectUrl = `${cleanUrl}/objects/${sha.substring(0, 2)}/${sha.substring(2)}`; const objectUrl = `${cleanUrl}/objects/${sha.substring(0, 2)}/${sha.substring(2)}`;
try { try {
// Always use proxy for GRASP URLs to avoid CORS issues
// The proxy returns raw compressed data that we decompress client-side
const proxyUrl = `/api/gitea-proxy/git-object?url=${encodeURIComponent(objectUrl)}`; const proxyUrl = `/api/gitea-proxy/git-object?url=${encodeURIComponent(objectUrl)}`;
const response = await fetch(proxyUrl); const response = await fetch(proxyUrl);
if (response.ok) {
const data = new Uint8Array(await response.arrayBuffer()); if (!response.ok) {
// Parse git object format (already decompressed by proxy) if (response.status === 404) {
return parseGitObject(data, sha); console.warn(`[Git Protocol] Object not found: ${sha} (404)`);
return null;
}
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = new Uint8Array(await response.arrayBuffer());
// Git objects can be:
// 1. Zlib-compressed (with zlib headers) - when served via HTTP /objects/{prefix}/{suffix}
// 2. Raw deflate (no headers) - when in packfiles
// 3. Already decompressed - some servers serve them this way
//
// Strategy: Check format and decompress accordingly
// Check if data starts with zlib header (0x78 0x01, 0x78 0x9C, 0x78 0xDA, etc.)
const hasZlibHeader = data.length >= 2 && data[0] === 0x78 &&
(data[1] === 0x01 || data[1] === 0x9C || data[1] === 0xDA || data[1] === 0x5E);
// First, try parsing as already decompressed (fastest check)
const directParse = parseGitObject(data, sha);
if (directParse) {
console.log(`[Git Protocol] Object ${sha} was already decompressed`);
return directParse;
}
// If direct parse failed and data looks compressed, try decompression
let decompressed: Uint8Array;
try {
if (hasZlibHeader) {
// Zlib-compressed (with headers) - use inflate
decompressed = inflate(data);
console.log(`[Git Protocol] Successfully decompressed object ${sha} (zlib, ${data.length} -> ${decompressed.length} bytes)`);
} else {
// Raw deflate (no headers) - use inflateRaw
decompressed = inflateRaw(data);
console.log(`[Git Protocol] Successfully decompressed object ${sha} (raw deflate, ${data.length} -> ${decompressed.length} bytes)`);
}
} catch (decompressError) {
// Decompression failed - log for debugging
console.warn(`[Git Protocol] Decompression failed for ${sha}:`, decompressError);
console.warn(`[Git Protocol] Data length: ${data.length}, first 20 bytes (hex):`,
Array.from(data.slice(0, 20)).map(b => b.toString(16).padStart(2, '0')).join(' '));
console.warn(`[Git Protocol] Has zlib header: ${hasZlibHeader}`);
return null;
} }
// Parse decompressed git object format: "<type> <size>\0<data>"
return parseGitObject(decompressed, sha);
} catch (error) { } catch (error) {
console.warn(`Failed to fetch git object via HTTP: ${error}`); console.warn(`[Git Protocol] Failed to fetch git object via HTTP: ${error}`);
} }
// Fallback: use git-upload-pack for single object // Fallback: use git-upload-pack for single object (only if HTTP fails)
console.log(`[Git Protocol] Falling back to git-upload-pack for ${sha}`);
const objects = await fetchGitObjects(repoUrl, [sha]); const objects = await fetchGitObjects(repoUrl, [sha]);
return objects.get(sha) || null; return objects.get(sha) || null;
} catch (error) { } catch (error) {
console.error(`Error fetching git object ${sha}:`, error); console.error(`[Git Protocol] Error fetching git object ${sha}:`, error);
return null; return null;
} }
} }
/** /**
* Parse git object from raw data (already decompressed by proxy) * Parse git object from raw data (decompressed)
* Git objects have header: "<type> <size>\0<data>" * Git objects have header: "<type> <size>\0<data>"
* The null byte (\0) separates the header from the content
*/ */
function parseGitObject(data: Uint8Array, sha: string): GitObject | null { function parseGitObject(data: Uint8Array, sha: string): GitObject | null {
try { try {
// Data is already decompressed by proxy if (data.length === 0) {
// Format: "commit 1234\0<data>" or "tree 5678\0<data>" etc. console.warn(`[Git Protocol] Empty data for object ${sha}`);
const text = new TextDecoder().decode(data); return null;
const headerMatch = text.match(/^(\w+) (\d+)\0/); }
// Find the null byte that separates header from content
let nullByteIndex = -1;
for (let i = 0; i < Math.min(100, data.length); i++) {
if (data[i] === 0) {
nullByteIndex = i;
break;
}
}
if (nullByteIndex === -1) {
// No null byte found - this is not a valid git object format
// Log first bytes for debugging
const firstBytes = Array.from(data.slice(0, Math.min(50, data.length)))
.map(b => b.toString(16).padStart(2, '0')).join(' ');
const firstText = new TextDecoder('utf-8', { fatal: false }).decode(data.slice(0, Math.min(50, data.length)));
console.warn(`[Git Protocol] No null byte found in object ${sha}`);
console.warn(`[Git Protocol] First 50 bytes (hex): ${firstBytes}`);
console.warn(`[Git Protocol] First 50 bytes (text): ${firstText}`);
return null;
}
// Parse header: "<type> <size>\0"
const headerBytes = data.slice(0, nullByteIndex);
const headerText = new TextDecoder('utf-8', { fatal: false }).decode(headerBytes);
const headerMatch = headerText.match(/^(\w+) (\d+)$/);
if (!headerMatch) { if (!headerMatch) {
console.warn(`[Git Protocol] Invalid header format for ${sha}: "${headerText}"`);
console.warn(`[Git Protocol] Header bytes (hex):`,
Array.from(headerBytes).map(b => b.toString(16).padStart(2, '0')).join(' '));
return null; return null;
} }
const type = headerMatch[1] as GitObject['type']; const type = headerMatch[1] as GitObject['type'];
const size = parseInt(headerMatch[2], 10); const size = parseInt(headerMatch[2], 10);
const contentStart = headerMatch[0].length;
if (isNaN(size) || size < 0) {
console.warn(`[Git Protocol] Invalid size in header for ${sha}: "${headerMatch[2]}"`);
return null;
}
const contentStart = nullByteIndex + 1; // Skip the null byte
const content = data.slice(contentStart); const content = data.slice(contentStart);
// Verify we have enough data
if (content.length < size) {
console.warn(`[Git Protocol] Content size mismatch for ${sha}: expected ${size}, got ${content.length}`);
// Still return what we have, but log the issue
}
return { return {
type, type,
sha, sha,
size, size,
data: content data: content.slice(0, size) // Take only the expected size
}; };
} catch (error) { } catch (error) {
console.error('Error parsing git object:', error); console.error(`[Git Protocol] Error parsing git object ${sha}:`, error);
return null; return null;
} }
} }

49
src/routes/api/gitea-proxy/[...path]/+server.ts

@ -1,9 +1,4 @@
import type { RequestHandler } from '@sveltejs/kit'; import type { RequestHandler } from '@sveltejs/kit';
import { inflateRaw, gunzip } from 'zlib';
import { promisify } from 'util';
const inflateRawAsync = promisify(inflateRaw);
const gunzipAsync = promisify(gunzip);
/** /**
* Proxy endpoint for Git hosting API requests (Gitea, GitLab, OneDev, etc.) to avoid CORS issues * Proxy endpoint for Git hosting API requests (Gitea, GitLab, OneDev, etc.) to avoid CORS issues
@ -344,7 +339,9 @@ export const GET: RequestHandler = async ({ params, url }) => {
}); });
} }
// Special endpoint: git-object (for GRASP git object fetching with decompression) // Special endpoint: git-object (for GRASP git object fetching)
// Note: Decompression is handled client-side for better error handling
// This endpoint just proxies the raw compressed data
if (apiPath === 'git-object') { if (apiPath === 'git-object') {
const targetUrl = url.searchParams.get('url'); const targetUrl = url.searchParams.get('url');
if (!targetUrl) { if (!targetUrl) {
@ -363,32 +360,17 @@ export const GET: RequestHandler = async ({ params, url }) => {
return createErrorResponse(`Failed to fetch git object: ${response.status}`, response.status); return createErrorResponse(`Failed to fetch git object: ${response.status}`, response.status);
} }
const compressedData = Buffer.from(await response.arrayBuffer()); // Return raw compressed data - client will handle decompression
// This avoids "incorrect header check" errors from server-side decompression
const compressedData = await response.arrayBuffer();
try { return new Response(compressedData, {
// Git objects are zlib-compressed (deflate) status: 200,
// Try inflateRaw first (most common), then gunzip as fallback headers: {
let decompressed: Buffer; 'Content-Type': 'application/octet-stream',
try { ...CORS_HEADERS
decompressed = await inflateRawAsync(compressedData);
} catch {
// Fallback to gunzip if inflateRaw fails
decompressed = await gunzipAsync(compressedData);
} }
});
// Convert Buffer to Uint8Array for Response
const uint8Array = new Uint8Array(decompressed);
return new Response(uint8Array, {
status: 200,
headers: {
'Content-Type': 'application/octet-stream',
...CORS_HEADERS
}
});
} catch (error) {
console.error('Error decompressing git object:', error);
return createErrorResponse('Failed to decompress git object', 500);
}
} }
// Standard Gitea/GitLab API proxy handling // Standard Gitea/GitLab API proxy handling
@ -424,8 +406,11 @@ export const GET: RequestHandler = async ({ params, url }) => {
const isReadmeRequest = apiPath.includes('contents') && const isReadmeRequest = apiPath.includes('contents') &&
(apiPath.toLowerCase().includes('readme') || apiPath.toLowerCase().includes('readme')); (apiPath.toLowerCase().includes('readme') || apiPath.toLowerCase().includes('readme'));
if (response.status === 404 && isReadmeRequest) { // Skip logging 404s for GRASP repos - they don't support REST API, use git protocol instead
// Silently skip - expected when trying README.adoc, README.md, etc. const isGraspRepo = apiPath.includes('npub1') || targetUrl.includes('npub1');
if (response.status === 404 && (isReadmeRequest || isGraspRepo)) {
// Silently skip - expected for README attempts or GRASP repos (which use git protocol)
} else if (response.status === 404) { } else if (response.status === 404) {
// Log other 404s with context // Log other 404s with context
console.warn('[Gitea Proxy] 404 Not Found:', { console.warn('[Gitea Proxy] 404 Not Found:', {

324
src/routes/repos/[naddr]/+page.svelte

@ -287,6 +287,53 @@
} }
} }
// Helper function to filter issues that match this repo
function filterMatchingIssues(issueEvents: NostrEvent[]): NostrEvent[] {
if (!repoEvent) return [];
const gitUrls = extractGitUrls(repoEvent);
const dTag = repoEvent.tags.find(t => Array.isArray(t) && t[0] === 'd')?.[1] || '';
const repoATag = dTag ? `${repoEvent.kind}:${repoEvent.pubkey}:${dTag}` : null;
const repoEventId = repoEvent.id;
return issueEvents.filter(issue => {
// Check if issue references this repo via 'a' tag
if (repoATag) {
const aTags = issue.tags.filter(t => t[0] === 'a').map(t => t[1]);
if (aTags.includes(repoATag)) {
return true;
}
}
// Check if issue references this repo via 'e' tag
const eTags = issue.tags.filter(t => t[0] === 'e').map(t => t[1]);
if (eTags.includes(repoEventId)) {
return true;
}
// Check if issue references this repo via 'r' tag (git URLs)
const rTags = issue.tags.filter(t => t[0] === 'r').map(t => t[1]);
for (const gitUrl of gitUrls) {
if (rTags.includes(gitUrl)) {
return true;
}
}
return false;
});
}
// Helper function to merge new issues into existing list (deduplicate and sort)
function mergeIssues(newIssues: NostrEvent[]): void {
const existingIds = new Set(issues.map(i => i.id));
const uniqueNewIssues = newIssues.filter(i => !existingIds.has(i.id));
if (uniqueNewIssues.length > 0) {
issues = [...issues, ...uniqueNewIssues]
.sort((a, b) => b.created_at - a.created_at);
}
}
async function loadIssues() { async function loadIssues() {
if (!repoEvent) return; if (!repoEvent) return;
@ -295,7 +342,17 @@
const gitUrls = extractGitUrls(repoEvent); const gitUrls = extractGitUrls(repoEvent);
const relays = relayManager.getProfileReadRelays(); const relays = relayManager.getProfileReadRelays();
// Batch fetch all issues that reference this repo // Step 1: Load cached issues immediately (cache-first)
const cachedIssues = await getEventsByKind(KIND.ISSUE, 500); // Get more than needed to filter
const matchingCachedIssues = filterMatchingIssues(cachedIssues);
// Show cached issues immediately
if (matchingCachedIssues.length > 0) {
issues = matchingCachedIssues.sort((a, b) => b.created_at - a.created_at);
loadingIssues = false; // Show cached issues immediately, don't wait for relays
}
// Step 2: Fetch from relays in background and update progressively
const filters: any[] = []; const filters: any[] = [];
// Search for issues that reference the repo event ID // Search for issues that reference the repo event ID
@ -316,57 +373,31 @@
// Search for issues by the repo author (issues might be created by repo maintainers) // Search for issues by the repo author (issues might be created by repo maintainers)
filters.push({ authors: [repoEvent.pubkey], kinds: [KIND.ISSUE], limit: 100 }); filters.push({ authors: [repoEvent.pubkey], kinds: [KIND.ISSUE], limit: 100 });
// Batch fetch all issues in parallel with cache-first strategy // Fetch from relays in parallel, updating issues as they arrive
const issueEventsArrays = await Promise.all( const fetchPromises = filters.map(async (filter) => {
filters.map(filter => try {
nostrClient.fetchEvents([filter], relays, { const events = await nostrClient.fetchEvents([filter], relays, {
useCache: 'cache-first', // Prioritize cache for faster loading useCache: 'cache-first', // Still use cache, but we already showed cached above
cacheResults: true cacheResults: true
}) });
)
);
// Flatten and deduplicate
const issueEvents: NostrEvent[] = [];
for (const events of issueEventsArrays) {
issueEvents.push(...events);
}
// Deduplicate
const uniqueIssues = Array.from(new Map(issueEvents.map(e => [e.id, e])).values());
// Filter to only include issues that actually match this repo
// Check if issue has 'a' tag matching this repo, or 'e' tag matching repo event ID, or 'r' tag matching repo URLs
const repoATag = dTag ? `${repoEvent.kind}:${repoEvent.pubkey}:${dTag}` : null;
const repoEventId = repoEvent.id;
const matchingIssues = uniqueIssues.filter(issue => {
// Check if issue references this repo via 'a' tag
if (repoATag) {
const aTags = issue.tags.filter(t => t[0] === 'a').map(t => t[1]);
if (aTags.includes(repoATag)) {
return true;
}
}
// Check if issue references this repo via 'e' tag // Filter and merge new issues as they arrive
const eTags = issue.tags.filter(t => t[0] === 'e').map(t => t[1]); const matchingEvents = filterMatchingIssues(events);
if (eTags.includes(repoEventId)) { if (matchingEvents.length > 0) {
return true; mergeIssues(matchingEvents);
}
// Check if issue references this repo via 'r' tag (git URLs)
const rTags = issue.tags.filter(t => t[0] === 'r').map(t => t[1]);
for (const gitUrl of gitUrls) {
if (rTags.includes(gitUrl)) {
return true;
} }
} catch (error) {
// Individual filter failures are non-critical
console.warn('Failed to fetch issues for filter:', filter, error);
} }
return false;
}); });
issues = matchingIssues.sort((a, b) => b.created_at - a.created_at); // Wait for all fetches to complete, but UI already shows cached issues
loadingIssues = false; // Issues are loaded, show them immediately await Promise.allSettled(fetchPromises);
// Final sort after all fetches complete
issues = [...issues].sort((a, b) => b.created_at - a.created_at);
loadingIssues = false;
// Load statuses, comments, and profiles in background (don't wait) // Load statuses, comments, and profiles in background (don't wait)
// This allows the UI to show issues immediately // This allows the UI to show issues immediately
@ -391,64 +422,73 @@
if (issues.length === 0) return; if (issues.length === 0) return;
try { try {
const issueIds = issues.map(i => i.id);
const relays = relayManager.getProfileReadRelays(); const relays = relayManager.getProfileReadRelays();
// Status events are different kinds: 1630 (Open), 1631 (Applied/Merged/Resolved), 1632 (Closed), 1633 (Draft) // Sort issues by newest first (created_at descending)
// They have "e" tags pointing to issues with marker "root" const sortedIssues = [...issues].sort((a, b) => b.created_at - a.created_at);
const statuses = await nostrClient.fetchEvents(
[{
'#e': issueIds,
kinds: [KIND.STATUS_OPEN, KIND.STATUS_APPLIED, KIND.STATUS_CLOSED, KIND.STATUS_DRAFT],
limit: 200
}],
relays,
{ useCache: 'cache-first', cacheResults: true } // Prioritize cache
);
// Get the latest status for each issue (statuses are replaceable per pubkey) // Process in batches of 50, starting with newest issues first
// For each issue, get the latest status from each pubkey, then take the most recent overall const BATCH_SIZE = 50;
const statusMap = new Map<string, NostrEvent>(); const statusMap = new Map<string, NostrEvent>();
const statusesByIssue = new Map<string, Map<string, NostrEvent>>(); // issueId -> pubkey -> status const statusesByIssue = new Map<string, Map<string, NostrEvent>>(); // issueId -> pubkey -> status
for (const status of statuses) { for (let i = 0; i < sortedIssues.length; i += BATCH_SIZE) {
// Find the "e" tag with marker "root" (or just the first "e" tag if no marker) const batch = sortedIssues.slice(i, i + BATCH_SIZE);
const eTag = status.tags.find(t => t[0] === 'e' && (t.length < 3 || t[2] === 'root')); const issueIds = batch.map(issue => issue.id);
const fallbackETag = status.tags.find(t => t[0] === 'e');
const issueId = (eTag && eTag[1] && issueIds.includes(eTag[1])) // Status events are different kinds: 1630 (Open), 1631 (Applied/Merged/Resolved), 1632 (Closed), 1633 (Draft)
? eTag[1] // They have "e" tags pointing to issues with marker "root"
: (fallbackETag && fallbackETag[1] && issueIds.includes(fallbackETag[1])) const statuses = await nostrClient.fetchEvents(
? fallbackETag[1] [{
: null; '#e': issueIds,
kinds: [KIND.STATUS_OPEN, KIND.STATUS_APPLIED, KIND.STATUS_CLOSED, KIND.STATUS_DRAFT],
if (issueId) { limit: 200
// Group by issue and pubkey (replaceable events) }],
if (!statusesByIssue.has(issueId)) { relays,
statusesByIssue.set(issueId, new Map()); { useCache: 'cache-first', cacheResults: true } // Prioritize cache
} );
const pubkeyMap = statusesByIssue.get(issueId)!;
const existing = pubkeyMap.get(status.pubkey); // Process statuses for this batch
if (!existing || status.created_at > existing.created_at) { for (const status of statuses) {
pubkeyMap.set(status.pubkey, status); // Find the "e" tag with marker "root" (or just the first "e" tag if no marker)
// Handle both 'e' and 'E' tags (case-insensitive)
const eTag = status.tags.find(t => (t[0] === 'e' || t[0] === 'E') && (t.length < 3 || t[2] === 'root'));
const fallbackETag = status.tags.find(t => t[0] === 'e' || t[0] === 'E');
const issueId = (eTag && eTag[1] && issueIds.includes(eTag[1]))
? eTag[1]
: (fallbackETag && fallbackETag[1] && issueIds.includes(fallbackETag[1]))
? fallbackETag[1]
: null;
if (issueId) {
// Group by issue and pubkey (replaceable events)
if (!statusesByIssue.has(issueId)) {
statusesByIssue.set(issueId, new Map());
}
const pubkeyMap = statusesByIssue.get(issueId)!;
const existing = pubkeyMap.get(status.pubkey);
if (!existing || status.created_at > existing.created_at) {
pubkeyMap.set(status.pubkey, status);
}
} }
} }
}
// For each issue, get the most recent status from any pubkey // Update statusMap after each batch (so UI can show statuses progressively)
for (const [issueId, pubkeyMap] of statusesByIssue.entries()) { for (const [issueId, pubkeyMap] of statusesByIssue.entries()) {
let latestStatus: NostrEvent | null = null; let latestStatus: NostrEvent | null = null;
for (const status of pubkeyMap.values()) { for (const status of pubkeyMap.values()) {
if (!latestStatus || status.created_at > latestStatus.created_at) { if (!latestStatus || status.created_at > latestStatus.created_at) {
latestStatus = status; latestStatus = status;
}
}
if (latestStatus) {
statusMap.set(issueId, latestStatus);
} }
} }
if (latestStatus) {
statusMap.set(issueId, latestStatus);
}
}
issueStatuses = statusMap; // Update state after each batch so newest issues show statuses first
issueStatuses = new Map(statusMap);
}
} catch (error) { } catch (error) {
// Failed to load issue statuses // Failed to load issue statuses
} }
@ -507,19 +547,26 @@
// Cache the event immediately // Cache the event immediately
await cacheEvent(signedEvent); await cacheEvent(signedEvent);
// Publish to relays // Update local state immediately with the cached event (before publishing)
const relays = relayManager.getProfileReadRelays(); // This ensures the UI updates right away, even if publishing is slow
const result = await nostrClient.publish(signedEvent, { relays }); issueStatuses.set(issueId, signedEvent);
issueStatuses = new Map(issueStatuses); // Trigger reactivity
if (result.success.length > 0) {
// Update local state immediately with the cached event
issueStatuses.set(issueId, signedEvent);
// Also reload statuses to get any other updates from relays // Publish to relays in background (don't wait for it)
await loadIssueStatuses(); const relays = relayManager.getProfileReadRelays();
} else { signAndPublish(event, relays).then((result) => {
alert('Failed to publish status change. Please try again.'); if (result.success.length === 0 && result.failed.length > 0) {
} console.warn('Failed to publish status change to some relays:', result.failed);
// Don't show alert - event is cached and will sync eventually
}
// Reload statuses to get any other updates from relays
loadIssueStatuses().catch(() => {
// Non-critical - status is already updated locally
});
}).catch((error) => {
console.error('Error publishing status change:', error);
// Don't show alert - event is cached and will sync eventually
});
} catch (error) { } catch (error) {
// Failed to change issue status // Failed to change issue status
alert('Error changing issue status: ' + (error instanceof Error ? error.message : String(error))); alert('Error changing issue status: ' + (error instanceof Error ? error.message : String(error)));
@ -604,22 +651,23 @@
if (issues.length === 0) return; if (issues.length === 0) return;
try { try {
const issueIds = issues.map(i => i.id);
const relays = relayManager.getCommentReadRelays(); const relays = relayManager.getCommentReadRelays();
// Batch fetch all comments for all issues // Sort issues by newest first (created_at descending)
// Use cache-first to load comments faster const sortedIssues = [...issues].sort((a, b) => b.created_at - a.created_at);
const comments = await nostrClient.fetchEvents(
[{ '#e': issueIds, kinds: [KIND.COMMENT], limit: 500 }], // Get all issue IDs upfront for cache lookup
relays, const allIssueIds = new Set(sortedIssues.map(issue => issue.id));
{ useCache: 'cache-first', cacheResults: true } // Prioritize cache
);
// Group comments by issue ID // Step 1: Load ALL cached comments first (before any relay fetches)
const cachedComments = await getEventsByKind(KIND.COMMENT, 1000); // Get a large number to cover all comments
const commentsMap = new Map<string, NostrEvent[]>(); const commentsMap = new Map<string, NostrEvent[]>();
for (const comment of comments) {
const eTag = comment.tags.find(t => t[0] === 'e'); // Process cached comments immediately
if (eTag && eTag[1]) { for (const comment of cachedComments) {
// Check for both lowercase 'e' and uppercase 'E' tags
const eTag = comment.tags.find(t => t[0] === 'e' || t[0] === 'E');
if (eTag && eTag[1] && allIssueIds.has(eTag[1])) {
const issueId = eTag[1]; const issueId = eTag[1];
if (!commentsMap.has(issueId)) { if (!commentsMap.has(issueId)) {
commentsMap.set(issueId, []); commentsMap.set(issueId, []);
@ -628,7 +676,46 @@
} }
} }
issueComments = commentsMap; // Update state immediately with cached comments
issueComments = new Map(commentsMap);
// Step 2: Process in batches of 50, fetching from relays for any missing comments
const BATCH_SIZE = 50;
for (let i = 0; i < sortedIssues.length; i += BATCH_SIZE) {
const batch = sortedIssues.slice(i, i + BATCH_SIZE);
const issueIds = batch.map(issue => issue.id);
// Fetch comments for this batch from relays (will also check cache, but we already loaded cache above)
const comments = await nostrClient.fetchEvents(
[{ '#e': issueIds, kinds: [KIND.COMMENT], limit: 500 }],
relays,
{ useCache: 'cache-first', cacheResults: true } // Prioritize cache
);
// Merge new comments into the map (deduplicate by event ID)
const existingCommentIds = new Set(
Array.from(commentsMap.values()).flat().map(c => c.id)
);
for (const comment of comments) {
// Skip if we already have this comment from cache
if (existingCommentIds.has(comment.id)) continue;
// Check for both lowercase 'e' and uppercase 'E' tags
const eTag = comment.tags.find(t => t[0] === 'e' || t[0] === 'E');
if (eTag && eTag[1]) {
const issueId = eTag[1];
if (!commentsMap.has(issueId)) {
commentsMap.set(issueId, []);
}
commentsMap.get(issueId)!.push(comment);
}
}
// Update state after each batch so newest issues show comments first
issueComments = new Map(commentsMap);
}
} catch (error) { } catch (error) {
// Failed to load issue comments // Failed to load issue comments
} }
@ -1414,6 +1501,7 @@
{#each paginatedIssues as issue} {#each paginatedIssues as issue}
{@const currentStatus = getCurrentStatus(issue.id)} {@const currentStatus = getCurrentStatus(issue.id)}
{@const isChanging = changingStatus.get(issue.id) || false} {@const isChanging = changingStatus.get(issue.id) || false}
{@const isLoggedIn = !!sessionManager.getSession()}
<div class="issue-item"> <div class="issue-item">
<div class="issue-header"> <div class="issue-header">
<div class="issue-status-control"> <div class="issue-status-control">
@ -1427,7 +1515,7 @@
changeIssueStatus(issue.id, newStatus); changeIssueStatus(issue.id, newStatus);
} }
}} }}
disabled={isChanging} disabled={isChanging || !isLoggedIn}
class="status-select" class="status-select"
class:open={currentStatus === 'open'} class:open={currentStatus === 'open'}
class:closed={currentStatus === 'closed'} class:closed={currentStatus === 'closed'}

Loading…
Cancel
Save