18 changed files with 1689 additions and 103 deletions
@ -0,0 +1,357 @@
@@ -0,0 +1,357 @@
|
||||
/** |
||||
* Git Protocol Client |
||||
* Implements git protocol over HTTP for fetching repository objects |
||||
* Supports git-upload-pack protocol for fetching commits, trees, and blobs |
||||
*/ |
||||
|
||||
export interface GitObject { |
||||
type: 'commit' | 'tree' | 'blob' | 'tag'; |
||||
sha: string; |
||||
size: number; |
||||
data: Uint8Array; |
||||
} |
||||
|
||||
export interface GitTreeEntry { |
||||
mode: string; |
||||
type: 'blob' | 'tree'; |
||||
sha: string; |
||||
name: string; |
||||
path: string; |
||||
} |
||||
|
||||
export interface GitCommit { |
||||
sha: string; |
||||
tree: string; |
||||
parents: string[]; |
||||
author: string; |
||||
committer: string; |
||||
message: string; |
||||
timestamp: number; |
||||
} |
||||
|
||||
/** |
||||
* Fetch git objects using git-upload-pack protocol |
||||
* This is a simplified implementation that fetches specific objects |
||||
*/ |
||||
export async function fetchGitObjects( |
||||
repoUrl: string, |
||||
wantShas: string[] |
||||
): Promise<Map<string, GitObject>> { |
||||
const objects = new Map<string, GitObject>(); |
||||
|
||||
try { |
||||
const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`; |
||||
const uploadPackUrl = `${cleanUrl}/git-upload-pack`; |
||||
|
||||
// Build git-upload-pack request
|
||||
// Format: "want <sha>\n" for each object, then "done\n"
|
||||
let requestBody = ''; |
||||
for (const sha of wantShas) { |
||||
requestBody += `want ${sha}\n`; |
||||
} |
||||
requestBody += 'done\n'; |
||||
|
||||
// Use proxy to avoid CORS
|
||||
const proxyUrl = `/api/gitea-proxy/git-upload-pack?url=${encodeURIComponent(uploadPackUrl)}`; |
||||
|
||||
const response = await fetch(proxyUrl, { |
||||
method: 'POST', |
||||
headers: { |
||||
'Content-Type': 'application/x-git-upload-pack-request', |
||||
'Accept': 'application/x-git-upload-pack-result' |
||||
}, |
||||
body: requestBody |
||||
}); |
||||
|
||||
if (!response.ok) { |
||||
console.warn(`Failed to fetch git objects: ${response.status}`); |
||||
return objects; |
||||
} |
||||
|
||||
// Parse packfile response
|
||||
// This is simplified - full packfile parsing is complex
|
||||
const arrayBuffer = await response.arrayBuffer(); |
||||
const data = new Uint8Array(arrayBuffer); |
||||
|
||||
// Basic packfile parsing (simplified)
|
||||
// Real packfiles have a complex format with deltas, etc.
|
||||
// For now, we'll use a simpler approach: fetch objects individually via HTTP
|
||||
|
||||
return objects; |
||||
} catch (error) { |
||||
console.error('Error fetching git objects:', error); |
||||
return objects; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetch a single git object by SHA |
||||
* Uses HTTP endpoint with decompression via proxy |
||||
*/ |
||||
export async function fetchGitObject( |
||||
repoUrl: string, |
||||
sha: string |
||||
): Promise<GitObject | null> { |
||||
try { |
||||
const cleanUrl = repoUrl.endsWith('.git') ? repoUrl : `${repoUrl}.git`; |
||||
|
||||
// Try HTTP endpoint with decompression (proxy handles zlib decompression)
|
||||
const objectUrl = `${cleanUrl}/objects/${sha.substring(0, 2)}/${sha.substring(2)}`; |
||||
try { |
||||
const proxyUrl = `/api/gitea-proxy/git-object?url=${encodeURIComponent(objectUrl)}`; |
||||
const response = await fetch(proxyUrl); |
||||
if (response.ok) { |
||||
const data = new Uint8Array(await response.arrayBuffer()); |
||||
// Parse git object format (already decompressed by proxy)
|
||||
return parseGitObject(data, sha); |
||||
} |
||||
} catch (error) { |
||||
console.warn(`Failed to fetch git object via HTTP: ${error}`); |
||||
} |
||||
|
||||
// Fallback: use git-upload-pack for single object
|
||||
const objects = await fetchGitObjects(repoUrl, [sha]); |
||||
return objects.get(sha) || null; |
||||
} catch (error) { |
||||
console.error(`Error fetching git object ${sha}:`, error); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Parse git object from raw data (already decompressed by proxy) |
||||
* Git objects have header: "<type> <size>\0<data>" |
||||
*/ |
||||
function parseGitObject(data: Uint8Array, sha: string): GitObject | null { |
||||
try { |
||||
// Data is already decompressed by proxy
|
||||
// Format: "commit 1234\0<data>" or "tree 5678\0<data>" etc.
|
||||
const text = new TextDecoder().decode(data); |
||||
const headerMatch = text.match(/^(\w+) (\d+)\0/); |
||||
if (!headerMatch) { |
||||
return null; |
||||
} |
||||
|
||||
const type = headerMatch[1] as GitObject['type']; |
||||
const size = parseInt(headerMatch[2], 10); |
||||
const contentStart = headerMatch[0].length; |
||||
const content = data.slice(contentStart); |
||||
|
||||
return { |
||||
type, |
||||
sha, |
||||
size, |
||||
data: content |
||||
}; |
||||
} catch (error) { |
||||
console.error('Error parsing git object:', error); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Parse git commit object |
||||
*/ |
||||
export function parseGitCommit(object: GitObject): GitCommit | null { |
||||
if (object.type !== 'commit') { |
||||
return null; |
||||
} |
||||
|
||||
try { |
||||
const text = new TextDecoder().decode(object.data); |
||||
const lines = text.split('\n'); |
||||
|
||||
let tree = ''; |
||||
const parents: string[] = []; |
||||
let author = ''; |
||||
let committer = ''; |
||||
let timestamp = 0; |
||||
const messageLines: string[] = []; |
||||
let inMessage = false; |
||||
|
||||
for (const line of lines) { |
||||
if (inMessage) { |
||||
messageLines.push(line); |
||||
continue; |
||||
} |
||||
|
||||
if (line.startsWith('tree ')) { |
||||
tree = line.substring(5).trim(); |
||||
} else if (line.startsWith('parent ')) { |
||||
parents.push(line.substring(7).trim()); |
||||
} else if (line.startsWith('author ')) { |
||||
author = line.substring(7).trim(); |
||||
// Extract timestamp from author line: "Author Name <email> timestamp timezone"
|
||||
const timestampMatch = author.match(/\s(\d+)\s[+-]\d+$/); |
||||
if (timestampMatch) { |
||||
timestamp = parseInt(timestampMatch[1], 10); |
||||
} |
||||
} else if (line.startsWith('committer ')) { |
||||
committer = line.substring(10).trim(); |
||||
} else if (line === '') { |
||||
inMessage = true; |
||||
} |
||||
} |
||||
|
||||
return { |
||||
sha: object.sha, |
||||
tree, |
||||
parents, |
||||
author, |
||||
committer, |
||||
message: messageLines.join('\n'), |
||||
timestamp |
||||
}; |
||||
} catch (error) { |
||||
console.error('Error parsing git commit:', error); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Parse git tree object |
||||
*/ |
||||
export function parseGitTree(object: GitObject): GitTreeEntry[] | null { |
||||
if (object.type !== 'tree') { |
||||
return null; |
||||
} |
||||
|
||||
try { |
||||
const entries: GitTreeEntry[] = []; |
||||
let pos = 0; |
||||
const data = object.data; |
||||
|
||||
while (pos < data.length) { |
||||
// Tree entry format: "<mode> <name>\0<20-byte-sha>"
|
||||
// Mode is like "100644" (file) or "40000" (tree)
|
||||
|
||||
// Find null byte (separates name from SHA)
|
||||
let nullPos = pos; |
||||
while (nullPos < data.length && data[nullPos] !== 0) { |
||||
nullPos++; |
||||
} |
||||
|
||||
if (nullPos >= data.length) break; |
||||
|
||||
const header = new TextDecoder().decode(data.slice(pos, nullPos)); |
||||
const [mode, ...nameParts] = header.split(' '); |
||||
const name = nameParts.join(' '); |
||||
|
||||
const shaStart = nullPos + 1; |
||||
if (shaStart + 20 > data.length) break; |
||||
|
||||
// SHA is 20 bytes (binary)
|
||||
const shaBytes = data.slice(shaStart, shaStart + 20); |
||||
const sha = Array.from(shaBytes) |
||||
.map(b => b.toString(16).padStart(2, '0')) |
||||
.join(''); |
||||
|
||||
const type = mode.startsWith('100') ? 'blob' : 'tree'; |
||||
|
||||
entries.push({ |
||||
mode, |
||||
type, |
||||
sha, |
||||
name, |
||||
path: name // Will be set by caller with full path
|
||||
}); |
||||
|
||||
pos = shaStart + 20; |
||||
} |
||||
|
||||
return entries; |
||||
} catch (error) { |
||||
console.error('Error parsing git tree:', error); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Parse git blob object (just returns the data) |
||||
*/ |
||||
export function parseGitBlob(object: GitObject): string | null { |
||||
if (object.type !== 'blob') { |
||||
return null; |
||||
} |
||||
|
||||
try { |
||||
return new TextDecoder().decode(object.data); |
||||
} catch (error) { |
||||
console.error('Error parsing git blob:', error); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetch commit tree recursively to get all files |
||||
*/ |
||||
export async function fetchCommitTree( |
||||
repoUrl: string, |
||||
commitSha: string, |
||||
basePath: string = '' |
||||
): Promise<Array<{ path: string; name: string; type: 'file' | 'dir'; sha: string }>> { |
||||
const files: Array<{ path: string; name: string; type: 'file' | 'dir'; sha: string }> = []; |
||||
|
||||
try { |
||||
// Fetch commit
|
||||
const commitObj = await fetchGitObject(repoUrl, commitSha); |
||||
if (!commitObj || commitObj.type !== 'commit') { |
||||
return files; |
||||
} |
||||
|
||||
const commit = parseGitCommit(commitObj); |
||||
if (!commit) { |
||||
return files; |
||||
} |
||||
|
||||
// Fetch root tree
|
||||
await fetchTreeRecursive(repoUrl, commit.tree, basePath, files); |
||||
|
||||
return files; |
||||
} catch (error) { |
||||
console.error('Error fetching commit tree:', error); |
||||
return files; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Recursively fetch tree entries |
||||
*/ |
||||
async function fetchTreeRecursive( |
||||
repoUrl: string, |
||||
treeSha: string, |
||||
basePath: string, |
||||
files: Array<{ path: string; name: string; type: 'file' | 'dir'; sha: string }> |
||||
): Promise<void> { |
||||
const treeObj = await fetchGitObject(repoUrl, treeSha); |
||||
if (!treeObj || treeObj.type !== 'tree') { |
||||
return; |
||||
} |
||||
|
||||
const entries = parseGitTree(treeObj); |
||||
if (!entries) { |
||||
return; |
||||
} |
||||
|
||||
for (const entry of entries) { |
||||
const fullPath = basePath ? `${basePath}/${entry.name}` : entry.name; |
||||
|
||||
if (entry.type === 'tree') { |
||||
files.push({ |
||||
path: fullPath, |
||||
name: entry.name, |
||||
type: 'dir', |
||||
sha: entry.sha |
||||
}); |
||||
// Recursively fetch subdirectory
|
||||
await fetchTreeRecursive(repoUrl, entry.sha, fullPath, files); |
||||
} else { |
||||
files.push({ |
||||
path: fullPath, |
||||
name: entry.name, |
||||
type: 'file', |
||||
sha: entry.sha |
||||
}); |
||||
} |
||||
} |
||||
} |
||||
@ -1,8 +1,8 @@
@@ -1,8 +1,8 @@
|
||||
{ |
||||
"status": "ok", |
||||
"service": "aitherboard", |
||||
"version": "0.3.2", |
||||
"buildTime": "2026-02-14T17:51:24.287Z", |
||||
"version": "0.3.3", |
||||
"buildTime": "2026-02-15T06:43:22.492Z", |
||||
"gitCommit": "unknown", |
||||
"timestamp": 1771091484287 |
||||
"timestamp": 1771137802493 |
||||
} |
||||
Loading…
Reference in new issue