Browse Source
Nostr-Signature: d134c35516991f27e47ed8a4aa0d3f1d6e6be41c46c9cf3f6c982c1442b09b4b 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc cb699fae6a8e44a3b9123f215749f6fec0470c75a0401a94c37dfb8e572c07281b3941862e704b868663f943c573ab2ee9fec217e87f7be567cc6bb3514cacdbmain
18 changed files with 3697 additions and 942 deletions
@ -0,0 +1,316 @@
@@ -0,0 +1,316 @@
|
||||
/** |
||||
* Announcement Manager |
||||
* Handles saving and retrieving repository announcements from repos |
||||
*/ |
||||
|
||||
import { existsSync } from 'fs'; |
||||
import { readFile } from 'fs/promises'; |
||||
import { join } from 'path'; |
||||
import { mkdir, writeFile, rm } from 'fs/promises'; |
||||
import simpleGit, { type SimpleGit } from 'simple-git'; |
||||
import logger from '../logger.js'; |
||||
import type { NostrEvent } from '../../types/nostr.js'; |
||||
import { validateAnnouncementEvent } from '../nostr/repo-verification.js'; |
||||
import { DEFAULT_NOSTR_RELAYS } from '../../config.js'; |
||||
import { NostrClient } from '../nostr/nostr-client.js'; |
||||
import { KIND } from '../../types/nostr.js'; |
||||
import { RepoUrlParser } from './repo-url-parser.js'; |
||||
|
||||
/** |
||||
* Announcement Manager |
||||
* Handles saving and retrieving repository announcements from repos |
||||
*/ |
||||
export class AnnouncementManager { |
||||
private urlParser: RepoUrlParser; |
||||
|
||||
constructor(repoRoot: string = '/repos', domain: string = 'localhost:6543') { |
||||
this.urlParser = new RepoUrlParser(repoRoot, domain); |
||||
} |
||||
|
||||
/** |
||||
* Check if an announcement event already exists in nostr/repo-events.jsonl |
||||
*/ |
||||
async hasAnnouncementInRepo(worktreePath: string, eventId?: string): Promise<boolean> { |
||||
try { |
||||
const jsonlFile = join(worktreePath, 'nostr', 'repo-events.jsonl'); |
||||
if (!existsSync(jsonlFile)) { |
||||
return false; |
||||
} |
||||
|
||||
const content = await readFile(jsonlFile, 'utf-8'); |
||||
const lines = content.trim().split('\n').filter(Boolean); |
||||
|
||||
for (const line of lines) { |
||||
try { |
||||
const entry = JSON.parse(line); |
||||
if (entry.type === 'announcement' && entry.event) { |
||||
// If eventId provided, check for exact match
|
||||
if (eventId) { |
||||
if (entry.event.id === eventId) { |
||||
return true; |
||||
} |
||||
} else { |
||||
// Just check if any announcement exists
|
||||
return true; |
||||
} |
||||
} |
||||
} catch { |
||||
// Skip invalid lines
|
||||
continue; |
||||
} |
||||
} |
||||
|
||||
return false; |
||||
} catch (err) { |
||||
logger.debug({ error: err, worktreePath }, 'Failed to check for announcement in repo'); |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Read announcement event from nostr/repo-events.jsonl |
||||
*/ |
||||
async getAnnouncementFromRepo(worktreePath: string): Promise<NostrEvent | null> { |
||||
try { |
||||
const jsonlFile = join(worktreePath, 'nostr', 'repo-events.jsonl'); |
||||
if (!existsSync(jsonlFile)) { |
||||
return null; |
||||
} |
||||
|
||||
const content = await readFile(jsonlFile, 'utf-8'); |
||||
const lines = content.trim().split('\n').filter(Boolean); |
||||
|
||||
// Find the most recent announcement event
|
||||
let latestAnnouncement: NostrEvent | null = null; |
||||
let latestTimestamp = 0; |
||||
|
||||
for (const line of lines) { |
||||
try { |
||||
const entry = JSON.parse(line); |
||||
if (entry.type === 'announcement' && entry.event && entry.timestamp) { |
||||
if (entry.timestamp > latestTimestamp) { |
||||
latestTimestamp = entry.timestamp; |
||||
latestAnnouncement = entry.event; |
||||
} |
||||
} |
||||
} catch { |
||||
// Skip invalid lines
|
||||
continue; |
||||
} |
||||
} |
||||
|
||||
return latestAnnouncement; |
||||
} catch (err) { |
||||
logger.debug({ error: err, worktreePath }, 'Failed to read announcement from repo'); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Fetch announcement from relays and validate it |
||||
*/ |
||||
async fetchAnnouncementFromRelays( |
||||
repoOwnerPubkey: string, |
||||
repoName: string |
||||
): Promise<NostrEvent | null> { |
||||
try { |
||||
const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS); |
||||
const events = await nostrClient.fetchEvents([ |
||||
{ |
||||
kinds: [KIND.REPO_ANNOUNCEMENT], |
||||
authors: [repoOwnerPubkey], |
||||
'#d': [repoName], |
||||
limit: 1 |
||||
} |
||||
]); |
||||
|
||||
if (events.length === 0) { |
||||
return null; |
||||
} |
||||
|
||||
const event = events[0]; |
||||
|
||||
// Validate the event
|
||||
const validation = validateAnnouncementEvent(event, repoName); |
||||
if (!validation.valid) { |
||||
logger.warn({ error: validation.error, repoName }, 'Fetched announcement failed validation'); |
||||
return null; |
||||
} |
||||
|
||||
return event; |
||||
} catch (err) { |
||||
logger.debug({ error: err, repoOwnerPubkey, repoName }, 'Failed to fetch announcement from relays'); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Save a repo event (announcement or transfer) to nostr/repo-events.jsonl |
||||
* Only saves if not already present (for announcements) |
||||
* This provides a standard location for all repo-related Nostr events for easy analysis |
||||
*/ |
||||
async saveRepoEventToWorktree( |
||||
worktreePath: string, |
||||
event: NostrEvent, |
||||
eventType: 'announcement' | 'transfer', |
||||
skipIfExists: boolean = true |
||||
): Promise<boolean> { |
||||
try { |
||||
// For announcements, check if already exists
|
||||
if (eventType === 'announcement' && skipIfExists) { |
||||
const exists = await this.hasAnnouncementInRepo(worktreePath, event.id); |
||||
if (exists) { |
||||
logger.debug({ eventId: event.id, worktreePath }, 'Announcement already exists in repo, skipping'); |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
// Create nostr directory in worktree
|
||||
const nostrDir = join(worktreePath, 'nostr'); |
||||
await mkdir(nostrDir, { recursive: true }); |
||||
|
||||
// Append to repo-events.jsonl with event type metadata
|
||||
const jsonlFile = join(nostrDir, 'repo-events.jsonl'); |
||||
const eventLine = JSON.stringify({ |
||||
type: eventType, |
||||
timestamp: event.created_at, |
||||
event |
||||
}) + '\n'; |
||||
await writeFile(jsonlFile, eventLine, { flag: 'a', encoding: 'utf-8' }); |
||||
return true; |
||||
} catch (err) { |
||||
logger.debug({ error: err, worktreePath, eventType }, 'Failed to save repo event to nostr/repo-events.jsonl'); |
||||
// Don't throw - this is a nice-to-have feature
|
||||
return false; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Ensure announcement event is saved to nostr/repo-events.jsonl in the repository |
||||
* Only saves if not already present (avoids redundant entries) |
||||
*/ |
||||
async ensureAnnouncementInRepo(repoPath: string, event: NostrEvent, selfTransferEvent?: NostrEvent): Promise<void> { |
||||
try { |
||||
// Create a temporary working directory
|
||||
const repoName = this.urlParser.parseRepoPathForName(repoPath)?.repoName || 'temp'; |
||||
const workDir = join(repoPath, '..', `${repoName}.work`); |
||||
|
||||
// Clean up if exists
|
||||
if (existsSync(workDir)) { |
||||
await rm(workDir, { recursive: true, force: true }); |
||||
} |
||||
await mkdir(workDir, { recursive: true }); |
||||
|
||||
// Clone the bare repo
|
||||
const git: SimpleGit = simpleGit(); |
||||
await git.clone(repoPath, workDir); |
||||
|
||||
// Check if announcement already exists in nostr/repo-events.jsonl
|
||||
const hasAnnouncement = await this.hasAnnouncementInRepo(workDir, event.id); |
||||
|
||||
const filesToAdd: string[] = []; |
||||
|
||||
// Only save announcement if not already present
|
||||
if (!hasAnnouncement) { |
||||
const saved = await this.saveRepoEventToWorktree(workDir, event, 'announcement', false); |
||||
if (saved) { |
||||
filesToAdd.push('nostr/repo-events.jsonl'); |
||||
logger.info({ repoPath, eventId: event.id }, 'Saved announcement to nostr/repo-events.jsonl'); |
||||
} |
||||
} else { |
||||
logger.debug({ repoPath, eventId: event.id }, 'Announcement already exists in repo, skipping'); |
||||
} |
||||
|
||||
// Save transfer event if provided
|
||||
if (selfTransferEvent) { |
||||
const saved = await this.saveRepoEventToWorktree(workDir, selfTransferEvent, 'transfer', false); |
||||
if (saved) { |
||||
if (!filesToAdd.includes('nostr/repo-events.jsonl')) { |
||||
filesToAdd.push('nostr/repo-events.jsonl'); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Only commit if we added files
|
||||
if (filesToAdd.length > 0) { |
||||
const workGit: SimpleGit = simpleGit(workDir); |
||||
await workGit.add(filesToAdd); |
||||
|
||||
// Use the event timestamp for commit date
|
||||
const commitDate = new Date(event.created_at * 1000).toISOString(); |
||||
const commitMessage = selfTransferEvent
|
||||
? 'Add Nostr repository announcement and initial ownership proof' |
||||
: 'Add Nostr repository announcement'; |
||||
|
||||
// Note: Initial commits are unsigned. The repository owner can sign their own commits
|
||||
// when they make changes. The server should never sign commits on behalf of users.
|
||||
|
||||
await workGit.commit(commitMessage, filesToAdd, { |
||||
'--author': `Nostr <${event.pubkey}@nostr>`, |
||||
'--date': commitDate |
||||
}); |
||||
|
||||
// Push back to bare repo
|
||||
// Use default branch from environment or try 'main' first, then 'master'
|
||||
const defaultBranch = process.env.DEFAULT_BRANCH || 'main'; |
||||
await workGit.push(['origin', defaultBranch]).catch(async () => { |
||||
// If default branch doesn't exist, try to create it
|
||||
try { |
||||
await workGit.checkout(['-b', defaultBranch]); |
||||
await workGit.push(['origin', defaultBranch]); |
||||
} catch { |
||||
// If default branch creation fails, try 'main' or 'master' as fallback
|
||||
const fallbackBranch = defaultBranch === 'main' ? 'master' : 'main'; |
||||
try { |
||||
await workGit.checkout(['-b', fallbackBranch]); |
||||
await workGit.push(['origin', fallbackBranch]); |
||||
} catch { |
||||
// If all fails, log but don't throw - announcement is saved
|
||||
logger.warn({ repoPath, defaultBranch, fallbackBranch }, 'Failed to push announcement to any branch'); |
||||
} |
||||
} |
||||
}); |
||||
} |
||||
|
||||
// Clean up
|
||||
await rm(workDir, { recursive: true, force: true }); |
||||
} catch (error) { |
||||
logger.error({ error, repoPath }, 'Failed to ensure announcement in repo'); |
||||
// Don't throw - announcement file creation is important but shouldn't block provisioning
|
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Check if a repository already has an announcement in nostr/repo-events.jsonl |
||||
* Used to determine if this is a truly new repo or an existing one being added |
||||
*/ |
||||
async hasAnnouncementInRepoFile(repoPath: string): Promise<boolean> { |
||||
if (!existsSync(repoPath)) { |
||||
return false; |
||||
} |
||||
|
||||
try { |
||||
const git: SimpleGit = simpleGit(); |
||||
const repoName = this.urlParser.parseRepoPathForName(repoPath)?.repoName || 'temp'; |
||||
const workDir = join(repoPath, '..', `${repoName}.check`); |
||||
|
||||
// Clean up if exists
|
||||
if (existsSync(workDir)) { |
||||
await rm(workDir, { recursive: true, force: true }); |
||||
} |
||||
await mkdir(workDir, { recursive: true }); |
||||
|
||||
// Try to clone and check for announcement in nostr/repo-events.jsonl
|
||||
await git.clone(repoPath, workDir); |
||||
const hasAnnouncement = await this.hasAnnouncementInRepo(workDir, undefined); |
||||
|
||||
// Clean up
|
||||
await rm(workDir, { recursive: true, force: true }); |
||||
|
||||
return hasAnnouncement; |
||||
} catch { |
||||
// If we can't check, assume it doesn't have one
|
||||
return false; |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,382 @@
@@ -0,0 +1,382 @@
|
||||
/** |
||||
* Git Remote Synchronization Service |
||||
* Handles syncing repositories to/from remote URLs |
||||
*/ |
||||
|
||||
import { spawn } from 'child_process'; |
||||
import simpleGit, { type SimpleGit } from 'simple-git'; |
||||
import logger from '../logger.js'; |
||||
import { shouldUseTor, getTorProxy } from '../../utils/tor.js'; |
||||
import { sanitizeError } from '../../utils/security.js'; |
||||
import { RepoUrlParser } from './repo-url-parser.js'; |
||||
|
||||
/** |
||||
* Execute git command with custom environment variables safely |
||||
* Uses spawn with argument arrays to prevent command injection |
||||
* Security: Only uses whitelisted environment variables, does not spread process.env |
||||
*/ |
||||
function execGitWithEnv( |
||||
repoPath: string, |
||||
args: string[], |
||||
env: Record<string, string> = {} |
||||
): Promise<{ stdout: string; stderr: string }> { |
||||
return new Promise((resolve, reject) => { |
||||
const gitProcess = spawn('git', args, { |
||||
cwd: repoPath, |
||||
// Security: Only use whitelisted env vars, don't spread process.env
|
||||
// The env parameter should already contain only safe, whitelisted variables
|
||||
env: env, |
||||
stdio: ['ignore', 'pipe', 'pipe'] |
||||
}); |
||||
|
||||
let stdout = ''; |
||||
let stderr = ''; |
||||
|
||||
gitProcess.stdout.on('data', (chunk: Buffer) => { |
||||
stdout += chunk.toString(); |
||||
}); |
||||
|
||||
gitProcess.stderr.on('data', (chunk: Buffer) => { |
||||
stderr += chunk.toString(); |
||||
}); |
||||
|
||||
gitProcess.on('close', (code) => { |
||||
if (code === 0) { |
||||
resolve({ stdout, stderr }); |
||||
} else { |
||||
reject(new Error(`Git command failed with code ${code}: ${stderr || stdout}`)); |
||||
} |
||||
}); |
||||
|
||||
gitProcess.on('error', (err) => { |
||||
reject(err); |
||||
}); |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Git Remote Synchronization Service |
||||
* Handles syncing repositories to and from remote URLs |
||||
*/ |
||||
export class GitRemoteSync { |
||||
private urlParser: RepoUrlParser; |
||||
|
||||
constructor(repoRoot: string = '/repos', domain: string = 'localhost:6543') { |
||||
this.urlParser = new RepoUrlParser(repoRoot, domain); |
||||
} |
||||
|
||||
/** |
||||
* Get git environment variables with Tor proxy if needed for .onion addresses |
||||
* Security: Only whitelist necessary environment variables |
||||
*/ |
||||
getGitEnvForUrl(url: string): Record<string, string> { |
||||
// Whitelist only necessary environment variables for security
|
||||
const env: Record<string, string> = { |
||||
PATH: process.env.PATH || '/usr/bin:/bin', |
||||
HOME: process.env.HOME || '/tmp', |
||||
USER: process.env.USER || 'git', |
||||
LANG: process.env.LANG || 'C.UTF-8', |
||||
LC_ALL: process.env.LC_ALL || 'C.UTF-8', |
||||
}; |
||||
|
||||
// Add TZ if set (for consistent timestamps)
|
||||
if (process.env.TZ) { |
||||
env.TZ = process.env.TZ; |
||||
} |
||||
|
||||
if (shouldUseTor(url)) { |
||||
const proxy = getTorProxy(); |
||||
if (proxy) { |
||||
// Git uses GIT_PROXY_COMMAND for proxy support
|
||||
// The command receives host and port as arguments
|
||||
// We'll create a simple proxy command using socat or nc
|
||||
// Note: This requires socat or netcat-openbsd to be installed
|
||||
const proxyCommand = `sh -c 'exec socat - SOCKS5:${proxy.host}:${proxy.port}:\\$1:\\$2' || sh -c 'exec nc -X 5 -x ${proxy.host}:${proxy.port} \\$1 \\$2'`; |
||||
env.GIT_PROXY_COMMAND = proxyCommand; |
||||
|
||||
// Also set ALL_PROXY for git-remote-http
|
||||
env.ALL_PROXY = `socks5://${proxy.host}:${proxy.port}`; |
||||
|
||||
// For HTTP/HTTPS URLs, also set http_proxy and https_proxy
|
||||
try { |
||||
const urlObj = new URL(url); |
||||
if (urlObj.protocol === 'http:' || urlObj.protocol === 'https:') { |
||||
env.http_proxy = `socks5://${proxy.host}:${proxy.port}`; |
||||
env.https_proxy = `socks5://${proxy.host}:${proxy.port}`; |
||||
} |
||||
} catch { |
||||
// URL parsing failed, skip proxy env vars
|
||||
} |
||||
} |
||||
} |
||||
|
||||
return env; |
||||
} |
||||
|
||||
/** |
||||
* Inject authentication token into a git URL if needed |
||||
* Supports GitHub tokens via GITHUB_TOKEN environment variable |
||||
* Returns the original URL if no token is needed or available |
||||
*/ |
||||
injectAuthToken(url: string): string { |
||||
try { |
||||
const urlObj = new URL(url); |
||||
|
||||
// If URL already has credentials, don't modify it
|
||||
if (urlObj.username) { |
||||
return url; |
||||
} |
||||
|
||||
// Check for GitHub token
|
||||
if (urlObj.hostname === 'github.com' || urlObj.hostname.endsWith('.github.com')) { |
||||
const githubToken = process.env.GITHUB_TOKEN; |
||||
if (githubToken) { |
||||
// Inject token into URL: https://token@github.com/user/repo.git
|
||||
urlObj.username = githubToken; |
||||
urlObj.password = ''; // GitHub uses token as username, password is empty
|
||||
return urlObj.toString(); |
||||
} |
||||
} |
||||
|
||||
// Add support for other git hosting services here if needed
|
||||
// e.g., GitLab: GITLAB_TOKEN, Gitea: GITEA_TOKEN, etc.
|
||||
|
||||
return url; |
||||
} catch { |
||||
// URL parsing failed, return original URL
|
||||
return url; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Sync from a single remote URL (helper for parallelization) |
||||
*/ |
||||
private async syncFromSingleRemote(repoPath: string, url: string, index: number): Promise<void> { |
||||
const remoteName = `remote-${index}`; |
||||
const git = simpleGit(repoPath); |
||||
// Inject authentication token if available (e.g., GITHUB_TOKEN)
|
||||
const authenticatedUrl = this.injectAuthToken(url); |
||||
const gitEnv = this.getGitEnvForUrl(authenticatedUrl); |
||||
|
||||
try { |
||||
// Add remote if not exists (ignore error if already exists)
|
||||
// Use authenticated URL so git can access private repos
|
||||
try { |
||||
await git.addRemote(remoteName, authenticatedUrl); |
||||
} catch { |
||||
// Remote might already exist, that's okay - try to update it
|
||||
try { |
||||
await git.removeRemote(remoteName); |
||||
await git.addRemote(remoteName, authenticatedUrl); |
||||
} catch { |
||||
// If update fails, continue - might be using old URL
|
||||
} |
||||
} |
||||
|
||||
// Configure git proxy for this remote if it's a .onion address
|
||||
if (shouldUseTor(url)) { |
||||
const proxy = getTorProxy(); |
||||
if (proxy) { |
||||
try { |
||||
// Use simple-git to set config (safer than exec)
|
||||
await git.addConfig(`http.${url}.proxy`, `socks5://${proxy.host}:${proxy.port}`, false, 'local'); |
||||
} catch { |
||||
// Config might fail, continue anyway
|
||||
} |
||||
} |
||||
} |
||||
|
||||
// Fetch from remote with appropriate environment
|
||||
// Use spawn with proper argument arrays for security
|
||||
// Note: 'git fetch <remote>' already fetches all branches from that remote
|
||||
// The --all flag is only for fetching from all remotes (without specifying a remote)
|
||||
await execGitWithEnv(repoPath, ['fetch', remoteName], gitEnv); |
||||
|
||||
// Update remote head
|
||||
try { |
||||
await execGitWithEnv(repoPath, ['remote', 'set-head', remoteName, '-a'], gitEnv); |
||||
} catch { |
||||
// Ignore errors for set-head
|
||||
} |
||||
} catch (error) { |
||||
const sanitizedError = sanitizeError(error); |
||||
logger.error({ error: sanitizedError, url, repoPath }, 'Failed to sync from remote'); |
||||
throw error; // Re-throw for Promise.allSettled handling
|
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Sync repository from multiple remote URLs (parallelized for efficiency) |
||||
*/ |
||||
async syncFromRemotes(repoPath: string, remoteUrls: string[]): Promise<void> { |
||||
if (remoteUrls.length === 0) return; |
||||
|
||||
// Sync all remotes in parallel for better performance
|
||||
const results = await Promise.allSettled( |
||||
remoteUrls.map((url, index) => this.syncFromSingleRemote(repoPath, url, index)) |
||||
); |
||||
|
||||
// Log any failures but don't throw (partial success is acceptable)
|
||||
results.forEach((result, index) => { |
||||
if (result.status === 'rejected') { |
||||
const sanitizedError = sanitizeError(result.reason); |
||||
logger.warn({ error: sanitizedError, url: remoteUrls[index], repoPath }, 'Failed to sync from one remote (continuing with others)'); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Check if force push is safe (no divergent history) |
||||
* A force push is safe if: |
||||
* - Local branch is ahead of remote (linear history, just new commits) |
||||
* - Local and remote are at the same commit (no-op) |
||||
* A force push is unsafe if: |
||||
* - Remote has commits that local doesn't have (would overwrite remote history) |
||||
*/ |
||||
private async canSafelyForcePush(repoPath: string, remoteName: string): Promise<boolean> { |
||||
try { |
||||
const git = simpleGit(repoPath); |
||||
|
||||
// Get current branch name
|
||||
const currentBranch = await git.revparse(['--abbrev-ref', 'HEAD']); |
||||
if (!currentBranch) { |
||||
return false; // Can't determine current branch
|
||||
} |
||||
|
||||
// Fetch latest remote state
|
||||
await git.fetch(remoteName); |
||||
|
||||
// Get remote branch reference
|
||||
const remoteBranch = `${remoteName}/${currentBranch}`; |
||||
|
||||
// Check if remote branch exists
|
||||
try { |
||||
await git.revparse([`refs/remotes/${remoteBranch}`]); |
||||
} catch { |
||||
// Remote branch doesn't exist yet - safe to push (first push)
|
||||
return true; |
||||
} |
||||
|
||||
// Get local and remote commit SHAs
|
||||
const localSha = await git.revparse(['HEAD']); |
||||
const remoteSha = await git.revparse([`refs/remotes/${remoteBranch}`]); |
||||
|
||||
// If they're the same, it's safe (no-op)
|
||||
if (localSha === remoteSha) { |
||||
return true; |
||||
} |
||||
|
||||
// Check if local is ahead (linear history) - safe to force push
|
||||
// This means all remote commits are ancestors of local commits
|
||||
const mergeBase = await git.raw(['merge-base', localSha, remoteSha]); |
||||
const mergeBaseSha = mergeBase.trim(); |
||||
|
||||
// If merge base equals remote SHA, local is ahead (safe)
|
||||
if (mergeBaseSha === remoteSha) { |
||||
return true; |
||||
} |
||||
|
||||
// If merge base equals local SHA, remote is ahead (unsafe to force push)
|
||||
if (mergeBaseSha === localSha) { |
||||
return false; |
||||
} |
||||
|
||||
// If merge base is different from both, branches have diverged (unsafe)
|
||||
return false; |
||||
} catch (error) { |
||||
// If we can't determine, default to false (safer)
|
||||
logger.warn({ error, repoPath, remoteName }, 'Failed to check branch divergence, defaulting to unsafe'); |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Sync to a single remote URL with retry logic (helper for parallelization) |
||||
*/ |
||||
private async syncToSingleRemote(repoPath: string, url: string, index: number, maxRetries: number = 3): Promise<void> { |
||||
const remoteName = `remote-${index}`; |
||||
const git = simpleGit(repoPath); |
||||
const gitEnv = this.getGitEnvForUrl(url); |
||||
|
||||
let lastError: Error | null = null; |
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) { |
||||
try { |
||||
// Add remote if not exists
|
||||
try { |
||||
await git.addRemote(remoteName, url); |
||||
} catch { |
||||
// Remote might already exist, that's okay
|
||||
} |
||||
|
||||
// Configure git proxy for this remote if it's a .onion address
|
||||
if (shouldUseTor(url)) { |
||||
const proxy = getTorProxy(); |
||||
if (proxy) { |
||||
try { |
||||
await git.addConfig(`http.${url}.proxy`, `socks5://${proxy.host}:${proxy.port}`, false, 'local'); |
||||
} catch { |
||||
// Config might fail, continue anyway
|
||||
} |
||||
} |
||||
} |
||||
|
||||
// Check if force push is safe
|
||||
const allowForce = process.env.ALLOW_FORCE_PUSH === 'true' || await this.canSafelyForcePush(repoPath, remoteName); |
||||
const forceFlag = allowForce ? ['--force'] : []; |
||||
|
||||
// Push branches with appropriate environment using spawn
|
||||
await execGitWithEnv(repoPath, ['push', remoteName, '--all', ...forceFlag], gitEnv); |
||||
|
||||
// Push tags
|
||||
await execGitWithEnv(repoPath, ['push', remoteName, '--tags', ...forceFlag], gitEnv); |
||||
|
||||
// Success - return
|
||||
return; |
||||
} catch (error) { |
||||
lastError = error instanceof Error ? error : new Error(String(error)); |
||||
const sanitizedError = sanitizeError(lastError); |
||||
|
||||
if (attempt < maxRetries) { |
||||
// Exponential backoff: wait 2^attempt seconds
|
||||
const delayMs = Math.pow(2, attempt) * 1000; |
||||
logger.warn({
|
||||
error: sanitizedError,
|
||||
url,
|
||||
repoPath,
|
||||
attempt,
|
||||
maxRetries, |
||||
retryIn: `${delayMs}ms` |
||||
}, 'Failed to sync to remote, retrying...'); |
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs)); |
||||
} else { |
||||
logger.error({ error: sanitizedError, url, repoPath, attempts: maxRetries }, 'Failed to sync to remote after all retries'); |
||||
throw lastError; |
||||
} |
||||
} |
||||
} |
||||
|
||||
throw lastError || new Error('Failed to sync to remote'); |
||||
} |
||||
|
||||
/** |
||||
* Sync repository to multiple remote URLs after a push (parallelized with retry) |
||||
*/ |
||||
async syncToRemotes(repoPath: string, remoteUrls: string[]): Promise<void> { |
||||
if (remoteUrls.length === 0) return; |
||||
|
||||
// Sync all remotes in parallel for better performance
|
||||
const results = await Promise.allSettled( |
||||
remoteUrls.map((url, index) => this.syncToSingleRemote(repoPath, url, index)) |
||||
); |
||||
|
||||
// Log any failures but don't throw (partial success is acceptable)
|
||||
results.forEach((result, index) => { |
||||
if (result.status === 'rejected') { |
||||
const sanitizedError = sanitizeError(result.reason); |
||||
logger.warn({ error: sanitizedError, url: remoteUrls[index], repoPath }, 'Failed to sync to one remote (continuing with others)'); |
||||
} |
||||
}); |
||||
} |
||||
} |
||||
@ -0,0 +1,613 @@
@@ -0,0 +1,613 @@
|
||||
/** |
||||
* Repository manager for git repositories |
||||
* Handles repo provisioning, syncing, and NIP-34 integration |
||||
*
|
||||
* Refactored to use focused service classes: |
||||
* - RepoUrlParser: URL parsing and validation |
||||
* - GitRemoteSync: Remote syncing (to/from) |
||||
* - AnnouncementManager: Announcement handling in repos |
||||
* - RepoSizeChecker: Size checking |
||||
*/ |
||||
|
||||
import { existsSync, mkdirSync, accessSync, constants } from 'fs'; |
||||
import { join } from 'path'; |
||||
import { spawn } from 'child_process'; |
||||
import type { NostrEvent } from '../../types/nostr.js'; |
||||
import { GIT_DOMAIN } from '../../config.js'; |
||||
import { validateAnnouncementEvent } from '../nostr/repo-verification.js'; |
||||
import simpleGit from 'simple-git'; |
||||
import logger from '../logger.js'; |
||||
import { sanitizeError } from '../../utils/security.js'; |
||||
import { isPrivateRepo as checkIsPrivateRepo } from '../../utils/repo-privacy.js'; |
||||
import { RepoUrlParser, type RepoPath } from './repo-url-parser.js'; |
||||
import { GitRemoteSync } from './git-remote-sync.js'; |
||||
import { AnnouncementManager } from './announcement-manager.js'; |
||||
import { RepoSizeChecker } from './repo-size-checker.js'; |
||||
import { shouldUseTor, getTorProxy } from '../../utils/tor.js'; |
||||
|
||||
/** |
||||
* Check if a URL is a GRASP (Git Repository Access via Secure Protocol) URL |
||||
* GRASP URLs contain npub (Nostr public key) in the path: https://host/npub.../repo.git
|
||||
*/ |
||||
export function isGraspUrl(url: string): boolean { |
||||
// GRASP URLs have npub (starts with npub1) in the path
|
||||
return /\/npub1[a-z0-9]+/i.test(url); |
||||
} |
||||
|
||||
export { type RepoPath }; |
||||
|
||||
/** |
||||
* Repository Manager |
||||
* Main facade for repository operations |
||||
* Delegates to focused service classes for specific responsibilities |
||||
*/ |
||||
export class RepoManager { |
||||
private repoRoot: string; |
||||
private domain: string; |
||||
private urlParser: RepoUrlParser; |
||||
private remoteSync: GitRemoteSync; |
||||
private announcementManager: AnnouncementManager; |
||||
private sizeChecker: RepoSizeChecker; |
||||
|
||||
constructor(repoRoot: string = '/repos', domain: string = GIT_DOMAIN) { |
||||
this.repoRoot = repoRoot; |
||||
this.domain = domain; |
||||
this.urlParser = new RepoUrlParser(repoRoot, domain); |
||||
this.remoteSync = new GitRemoteSync(repoRoot, domain); |
||||
this.announcementManager = new AnnouncementManager(repoRoot, domain); |
||||
this.sizeChecker = new RepoSizeChecker(); |
||||
} |
||||
|
||||
/** |
||||
* Parse git domain URL to extract npub and repo name |
||||
*/ |
||||
parseRepoUrl(url: string): RepoPath | null { |
||||
return this.urlParser.parseRepoUrl(url); |
||||
} |
||||
|
||||
/** |
||||
* Create a bare git repository from a NIP-34 repo announcement |
||||
*
|
||||
* @param event - The repo announcement event |
||||
* @param selfTransferEvent - Optional self-transfer event to include in initial commit |
||||
* @param isExistingRepo - Whether this is an existing repo being added to the server |
||||
*/ |
||||
async provisionRepo(event: NostrEvent, selfTransferEvent?: NostrEvent, isExistingRepo: boolean = false): Promise<void> { |
||||
const cloneUrls = this.urlParser.extractCloneUrls(event); |
||||
const domainUrl = cloneUrls.find(url => url.includes(this.domain)); |
||||
|
||||
if (!domainUrl) { |
||||
throw new Error(`No ${this.domain} URL found in repo announcement`); |
||||
} |
||||
|
||||
const repoPath = this.urlParser.parseRepoUrl(domainUrl); |
||||
if (!repoPath) { |
||||
throw new Error(`Invalid ${this.domain} URL format`); |
||||
} |
||||
|
||||
// Create directory structure
|
||||
const repoDir = join(this.repoRoot, repoPath.npub); |
||||
if (!existsSync(repoDir)) { |
||||
mkdirSync(repoDir, { recursive: true }); |
||||
} |
||||
|
||||
// Check if repo already exists
|
||||
const repoExists = existsSync(repoPath.fullPath); |
||||
|
||||
// Security: Only allow new repo creation if user has unlimited access
|
||||
// This prevents spam and abuse
|
||||
const isNewRepo = !repoExists; |
||||
if (isNewRepo && !isExistingRepo) { |
||||
const { getCachedUserLevel } = await import('../security/user-level-cache.js'); |
||||
const { hasUnlimitedAccess } = await import('../../utils/user-access.js'); |
||||
const userLevel = getCachedUserLevel(event.pubkey); |
||||
if (!hasUnlimitedAccess(userLevel?.level)) { |
||||
throw new Error(`Repository creation requires unlimited access. User has level: ${userLevel?.level || 'none'}`); |
||||
} |
||||
} |
||||
|
||||
// If there are other clone URLs, sync from them first (for existing repos)
|
||||
const otherUrls = cloneUrls.filter(url => !url.includes(this.domain)); |
||||
if (otherUrls.length > 0 && repoExists) { |
||||
// For existing repos, sync first to get the latest state
|
||||
const remoteUrls = this.urlParser.prepareRemoteUrls(otherUrls); |
||||
await this.remoteSync.syncFromRemotes(repoPath.fullPath, remoteUrls); |
||||
} |
||||
|
||||
// Validate announcement event before proceeding
|
||||
const validation = validateAnnouncementEvent(event, repoPath.repoName); |
||||
if (!validation.valid) { |
||||
throw new Error(`Invalid announcement event: ${validation.error}`); |
||||
} |
||||
|
||||
// Create bare repository if it doesn't exist
|
||||
if (isNewRepo) { |
||||
// Use simple-git to create bare repo (safer than exec)
|
||||
const git = simpleGit(); |
||||
await git.init(['--bare', repoPath.fullPath]); |
||||
|
||||
// Ensure announcement event is saved to nostr/repo-events.jsonl in the repository
|
||||
await this.announcementManager.ensureAnnouncementInRepo(repoPath.fullPath, event, selfTransferEvent); |
||||
|
||||
// If there are other clone URLs, sync from them after creating the repo
|
||||
if (otherUrls.length > 0) { |
||||
const remoteUrls = this.urlParser.prepareRemoteUrls(otherUrls); |
||||
await this.remoteSync.syncFromRemotes(repoPath.fullPath, remoteUrls); |
||||
} else { |
||||
// No external URLs - this is a brand new repo, create initial branch and README
|
||||
await this.createInitialBranchAndReadme(repoPath.fullPath, repoPath.npub, repoPath.repoName, event); |
||||
} |
||||
} else { |
||||
// For existing repos, check if announcement exists in repo
|
||||
// If not, try to fetch from relays and save it
|
||||
const hasAnnouncement = await this.announcementManager.hasAnnouncementInRepoFile(repoPath.fullPath); |
||||
if (!hasAnnouncement) { |
||||
// Try to fetch from relays
|
||||
const fetchedEvent = await this.announcementManager.fetchAnnouncementFromRelays(event.pubkey, repoPath.repoName); |
||||
if (fetchedEvent) { |
||||
// Save fetched announcement to repo
|
||||
await this.announcementManager.ensureAnnouncementInRepo(repoPath.fullPath, fetchedEvent, selfTransferEvent); |
||||
} else { |
||||
// Announcement not found in repo or relays - this is a problem
|
||||
logger.warn({ repoPath: repoPath.fullPath }, 'Existing repo has no announcement in repo or on relays'); |
||||
} |
||||
} |
||||
|
||||
if (selfTransferEvent) { |
||||
// Ensure self-transfer event is also saved
|
||||
await this.announcementManager.ensureAnnouncementInRepo(repoPath.fullPath, event, selfTransferEvent); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Create initial branch and README.md for a new repository |
||||
*/ |
||||
private async createInitialBranchAndReadme( |
||||
repoPath: string, |
||||
npub: string, |
||||
repoName: string, |
||||
announcementEvent: NostrEvent |
||||
): Promise<void> { |
||||
try { |
||||
// Get default branch from environment or use 'master'
|
||||
const defaultBranch = process.env.DEFAULT_BRANCH || 'master'; |
||||
|
||||
// Get repo name from d-tag or use repoName from path
|
||||
const dTag = announcementEvent.tags.find(t => t[0] === 'd')?.[1] || repoName; |
||||
|
||||
// Get name tag for README title, fallback to d-tag
|
||||
const nameTag = announcementEvent.tags.find(t => t[0] === 'name')?.[1] || dTag; |
||||
|
||||
// Get author info from user profile (fetch from relays)
|
||||
const { fetchUserProfile, extractProfileData, getUserName, getUserEmail } = await import('../../utils/user-profile.js'); |
||||
const { nip19 } = await import('nostr-tools'); |
||||
const { DEFAULT_NOSTR_RELAYS } = await import('../../config.js'); |
||||
const userNpub = nip19.npubEncode(announcementEvent.pubkey); |
||||
|
||||
const profileEvent = await fetchUserProfile(announcementEvent.pubkey, DEFAULT_NOSTR_RELAYS); |
||||
const profile = extractProfileData(profileEvent); |
||||
const authorName = getUserName(profile, announcementEvent.pubkey, userNpub); |
||||
const authorEmail = getUserEmail(profile, announcementEvent.pubkey, userNpub); |
||||
|
||||
// Create README.md content
|
||||
const readmeContent = `# ${nameTag} |
||||
|
||||
Welcome to your new GitRepublic repo. |
||||
|
||||
You can use this read-me file to explain the purpose of this repo to everyone who looks at it. You can also make a ReadMe.adoc file and delete this one, if you prefer. GitRepublic supports both markups. |
||||
|
||||
Your commits will all be signed by your Nostr keys and saved to the event files in the ./nostr folder. |
||||
`;
|
||||
|
||||
// Use FileManager to create the initial branch and files
|
||||
const { FileManager } = await import('./file-manager.js'); |
||||
const fileManager = new FileManager(this.repoRoot); |
||||
|
||||
// For a new repo with no branches, we need to create an orphan branch first
|
||||
// Check if repo has any branches
|
||||
const git = simpleGit(repoPath); |
||||
let hasBranches = false; |
||||
try { |
||||
const branches = await git.branch(['-a']); |
||||
hasBranches = branches.all.length > 0; |
||||
} catch { |
||||
// No branches exist
|
||||
hasBranches = false; |
||||
} |
||||
|
||||
if (!hasBranches) { |
||||
// Create orphan branch first (pass undefined for fromBranch to create orphan)
|
||||
await fileManager.createBranch(npub, repoName, defaultBranch, undefined); |
||||
} |
||||
|
||||
// Create both README.md and announcement in the initial commit
|
||||
// We'll use a worktree to write both files and commit them together
|
||||
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, npub, repoName); |
||||
const { writeFile: writeFileFs } = await import('fs/promises'); |
||||
const { join } = await import('path'); |
||||
|
||||
// Write README.md
|
||||
const readmePath = join(workDir, 'README.md'); |
||||
await writeFileFs(readmePath, readmeContent, 'utf-8'); |
||||
|
||||
// Save repo announcement event to nostr/repo-events.jsonl (only if not already present)
|
||||
const announcementSaved = await this.announcementManager.saveRepoEventToWorktree(workDir, announcementEvent, 'announcement', true); |
||||
|
||||
// Stage files
|
||||
const workGit = simpleGit(workDir); |
||||
const filesToAdd: string[] = ['README.md']; |
||||
if (announcementSaved) { |
||||
filesToAdd.push('nostr/repo-events.jsonl'); |
||||
} |
||||
await workGit.add(filesToAdd); |
||||
|
||||
// Commit files together
|
||||
await workGit.commit('Initial commit', filesToAdd, { |
||||
'--author': `${authorName} <${authorEmail}>` |
||||
}); |
||||
|
||||
// Clean up worktree
|
||||
await fileManager.removeWorktree(repoPath, workDir); |
||||
|
||||
logger.info({ npub, repoName, branch: defaultBranch }, 'Created initial branch and README.md'); |
||||
} catch (err) { |
||||
// Log but don't fail - initial README creation is nice-to-have
|
||||
const sanitizedErr = sanitizeError(err); |
||||
logger.warn({ error: sanitizedErr, repoPath, npub, repoName }, 'Failed to create initial branch and README, continuing anyway'); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Sync repository from multiple remote URLs (parallelized for efficiency) |
||||
*/ |
||||
async syncFromRemotes(repoPath: string, remoteUrls: string[]): Promise<void> { |
||||
await this.remoteSync.syncFromRemotes(repoPath, remoteUrls); |
||||
} |
||||
|
||||
/** |
||||
* Sync repository to multiple remote URLs after a push (parallelized with retry) |
||||
*/ |
||||
async syncToRemotes(repoPath: string, remoteUrls: string[]): Promise<void> { |
||||
await this.remoteSync.syncToRemotes(repoPath, remoteUrls); |
||||
} |
||||
|
||||
/** |
||||
* Check if a repository exists |
||||
*/ |
||||
repoExists(repoPath: string): boolean { |
||||
return existsSync(repoPath); |
||||
} |
||||
|
||||
/** |
||||
* Fetch repository on-demand from remote clone URLs |
||||
* This allows displaying repositories that haven't been provisioned yet |
||||
*
|
||||
* @param npub - Repository owner npub |
||||
* @param repoName - Repository name |
||||
* @param announcementEvent - The Nostr repo announcement event (optional, will fetch if not provided) |
||||
* @returns true if repository was successfully fetched, false otherwise |
||||
*/ |
||||
async fetchRepoOnDemand( |
||||
npub: string, |
||||
repoName: string, |
||||
announcementEvent?: NostrEvent |
||||
): Promise<{ success: boolean; needsAnnouncement?: boolean; announcement?: NostrEvent; error?: string; cloneUrls?: string[]; remoteUrls?: string[] }> { |
||||
const repoPath = join(this.repoRoot, npub, `${repoName}.git`); |
||||
|
||||
// If repo already exists, check if it has an announcement
|
||||
if (existsSync(repoPath)) { |
||||
const hasAnnouncement = await this.announcementManager.hasAnnouncementInRepoFile(repoPath); |
||||
if (hasAnnouncement) { |
||||
return { success: true }; |
||||
} |
||||
|
||||
// Repo exists but no announcement - try to fetch from relays
|
||||
const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js'); |
||||
const repoOwnerPubkey = requireNpubHexUtil(npub); |
||||
const fetchedAnnouncement = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName); |
||||
if (fetchedAnnouncement) { |
||||
// Save fetched announcement to repo
|
||||
await this.announcementManager.ensureAnnouncementInRepo(repoPath, fetchedAnnouncement); |
||||
return { success: true, announcement: fetchedAnnouncement }; |
||||
} |
||||
|
||||
// Repo exists but no announcement found - needs announcement
|
||||
return { success: false, needsAnnouncement: true }; |
||||
} |
||||
|
||||
// If no announcement provided, try to fetch from relays
|
||||
if (!announcementEvent) { |
||||
const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js'); |
||||
const repoOwnerPubkey = requireNpubHexUtil(npub); |
||||
const fetchedAnnouncement = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName); |
||||
if (fetchedAnnouncement) { |
||||
announcementEvent = fetchedAnnouncement; |
||||
} else { |
||||
// No announcement found - needs announcement
|
||||
return { success: false, needsAnnouncement: true }; |
||||
} |
||||
} |
||||
|
||||
// Check if repository is public
|
||||
const isPublic = !checkIsPrivateRepo(announcementEvent); |
||||
|
||||
// Security: For public repos, allow on-demand fetching regardless of owner's access level
|
||||
// For private repos, require owner to have unlimited access to prevent unauthorized creation
|
||||
if (!isPublic) { |
||||
const { getCachedUserLevel } = await import('../security/user-level-cache.js'); |
||||
const { hasUnlimitedAccess } = await import('../../utils/user-access.js'); |
||||
const userLevel = getCachedUserLevel(announcementEvent.pubkey); |
||||
if (!hasUnlimitedAccess(userLevel?.level)) { |
||||
logger.warn({
|
||||
npub,
|
||||
repoName, |
||||
pubkey: announcementEvent.pubkey.slice(0, 16) + '...', |
||||
level: userLevel?.level || 'none' |
||||
}, 'Skipping on-demand repo fetch: private repo requires owner with unlimited access'); |
||||
return { success: false, needsAnnouncement: false }; |
||||
} |
||||
} else { |
||||
logger.info({
|
||||
npub,
|
||||
repoName, |
||||
pubkey: announcementEvent.pubkey.slice(0, 16) + '...' |
||||
}, 'Allowing on-demand fetch for public repository'); |
||||
} |
||||
|
||||
// Extract clone URLs and prepare remote URLs
|
||||
const cloneUrls = this.urlParser.extractCloneUrls(announcementEvent); |
||||
let remoteUrls: string[] = []; |
||||
|
||||
try { |
||||
// Prepare remote URLs (filters out localhost/our domain, converts SSH to HTTPS)
|
||||
remoteUrls = this.urlParser.prepareRemoteUrls(cloneUrls); |
||||
|
||||
if (remoteUrls.length === 0) { |
||||
logger.warn({ npub, repoName, cloneUrls, announcementEventId: announcementEvent.id }, 'No remote clone URLs found for on-demand fetch'); |
||||
return { success: false, needsAnnouncement: false }; |
||||
} |
||||
|
||||
logger.debug({ npub, repoName, cloneUrls, remoteUrls, isPublic }, 'On-demand fetch details'); |
||||
|
||||
// Check if repoRoot exists and is writable
|
||||
if (!existsSync(this.repoRoot)) { |
||||
try { |
||||
mkdirSync(this.repoRoot, { recursive: true }); |
||||
logger.info({ repoRoot: this.repoRoot }, 'Created repos root directory'); |
||||
} catch (err) { |
||||
const error = err instanceof Error ? err : new Error(String(err)); |
||||
logger.error({
|
||||
repoRoot: this.repoRoot, |
||||
error: error.message |
||||
}, 'Failed to create repos root directory'); |
||||
throw new Error(`Cannot create repos root directory at ${this.repoRoot}. Please check permissions: ${error.message}`); |
||||
} |
||||
} else { |
||||
// Check if repoRoot is writable
|
||||
try { |
||||
accessSync(this.repoRoot, constants.W_OK); |
||||
} catch (err) { |
||||
const error = err instanceof Error ? err : new Error(String(err)); |
||||
logger.error({
|
||||
repoRoot: this.repoRoot, |
||||
error: error.message |
||||
}, 'Repos root directory is not writable'); |
||||
throw new Error(`Repos root directory at ${this.repoRoot} is not writable. Please fix permissions (e.g., chmod 755 ${this.repoRoot} or chown to the correct user).`); |
||||
} |
||||
} |
||||
|
||||
// Create directory structure
|
||||
const repoDir = join(this.repoRoot, npub); |
||||
if (!existsSync(repoDir)) { |
||||
try { |
||||
mkdirSync(repoDir, { recursive: true }); |
||||
} catch (err) { |
||||
const error = err instanceof Error ? err : new Error(String(err)); |
||||
if (error.message.includes('EACCES') || error.message.includes('permission denied')) { |
||||
logger.error({
|
||||
npub,
|
||||
repoName,
|
||||
repoDir, |
||||
repoRoot: this.repoRoot, |
||||
error: error.message |
||||
}, 'Permission denied when creating repository directory'); |
||||
throw new Error(`Permission denied: Cannot create repository directory at ${repoDir}. Please check that the server has write permissions to ${this.repoRoot}.`); |
||||
} |
||||
throw error; |
||||
} |
||||
} |
||||
|
||||
// Get git environment for URL (handles Tor proxy, etc.)
|
||||
const gitEnv = this.getGitEnvForUrl(remoteUrls[0]); |
||||
|
||||
// Inject authentication token if available
|
||||
const authenticatedUrl = this.injectAuthToken(remoteUrls[0]); |
||||
|
||||
// Log if we're using authentication (but don't log the token)
|
||||
const isAuthenticated = authenticatedUrl !== remoteUrls[0]; |
||||
logger.info({
|
||||
npub,
|
||||
repoName,
|
||||
sourceUrl: remoteUrls[0],
|
||||
cloneUrls, |
||||
authenticated: isAuthenticated |
||||
}, 'Fetching repository on-demand from remote'); |
||||
|
||||
// Clone as bare repository
|
||||
await new Promise<void>((resolve, reject) => { |
||||
const cloneProcess = spawn('git', ['clone', '--bare', authenticatedUrl, repoPath], { |
||||
env: gitEnv, |
||||
stdio: ['ignore', 'pipe', 'pipe'] |
||||
}); |
||||
|
||||
let stderr = ''; |
||||
let stdout = ''; |
||||
cloneProcess.stderr.on('data', (chunk: Buffer) => { |
||||
stderr += chunk.toString(); |
||||
}); |
||||
cloneProcess.stdout.on('data', (chunk: Buffer) => { |
||||
stdout += chunk.toString(); |
||||
}); |
||||
|
||||
cloneProcess.on('close', (code) => { |
||||
if (code === 0) { |
||||
logger.info({ npub, repoName, sourceUrl: remoteUrls[0] }, 'Successfully cloned repository'); |
||||
resolve(); |
||||
} else { |
||||
const errorMsg = `Git clone failed with code ${code}: ${stderr || stdout}`; |
||||
logger.error({
|
||||
npub,
|
||||
repoName,
|
||||
sourceUrl: remoteUrls[0],
|
||||
code,
|
||||
stderr,
|
||||
stdout, |
||||
authenticated: isAuthenticated |
||||
}, 'Git clone failed'); |
||||
reject(new Error(errorMsg)); |
||||
} |
||||
}); |
||||
|
||||
cloneProcess.on('error', (err) => { |
||||
logger.error({
|
||||
npub,
|
||||
repoName,
|
||||
sourceUrl: remoteUrls[0],
|
||||
error: err, |
||||
authenticated: isAuthenticated |
||||
}, 'Git clone process error'); |
||||
reject(err); |
||||
}); |
||||
}); |
||||
|
||||
// Verify the repository was actually created
|
||||
if (!existsSync(repoPath)) { |
||||
throw new Error('Repository clone completed but repository path does not exist'); |
||||
} |
||||
|
||||
// Ensure announcement is saved to nostr/repo-events.jsonl (non-blocking - repo is usable without it)
|
||||
try { |
||||
await this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementEvent); |
||||
} catch (verifyError) { |
||||
// Announcement file creation is optional - log but don't fail
|
||||
logger.warn({ error: verifyError, npub, repoName }, 'Failed to ensure announcement in repo, but repository is usable'); |
||||
} |
||||
|
||||
logger.info({ npub, repoName }, 'Successfully fetched repository on-demand'); |
||||
return { success: true, announcement: announcementEvent }; |
||||
} catch (error) { |
||||
const sanitizedError = sanitizeError(error); |
||||
const errorMessage = error instanceof Error ? error.message : String(error); |
||||
logger.error({
|
||||
error: sanitizedError,
|
||||
npub,
|
||||
repoName, |
||||
cloneUrls, |
||||
isPublic, |
||||
remoteUrls, |
||||
errorMessage |
||||
}, 'Failed to fetch repository on-demand'); |
||||
return {
|
||||
success: false,
|
||||
needsAnnouncement: false, |
||||
error: errorMessage, |
||||
cloneUrls, |
||||
remoteUrls |
||||
}; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Get repository size in bytes |
||||
* Returns the total size of the repository directory |
||||
*/ |
||||
async getRepoSize(repoPath: string): Promise<number> { |
||||
return this.sizeChecker.getRepoSize(repoPath); |
||||
} |
||||
|
||||
/** |
||||
* Check if repository size exceeds the maximum (2 GB) |
||||
*/ |
||||
async checkRepoSizeLimit(repoPath: string, maxSizeBytes: number = 2 * 1024 * 1024 * 1024): Promise<{ withinLimit: boolean; currentSize: number; maxSize: number; error?: string }> { |
||||
return this.sizeChecker.checkRepoSizeLimit(repoPath, maxSizeBytes); |
||||
} |
||||
|
||||
/** |
||||
* Get git environment variables with Tor proxy if needed for .onion addresses |
||||
* Security: Only whitelist necessary environment variables |
||||
*/ |
||||
private getGitEnvForUrl(url: string): Record<string, string> { |
||||
// Whitelist only necessary environment variables for security
|
||||
const env: Record<string, string> = { |
||||
PATH: process.env.PATH || '/usr/bin:/bin', |
||||
HOME: process.env.HOME || '/tmp', |
||||
USER: process.env.USER || 'git', |
||||
LANG: process.env.LANG || 'C.UTF-8', |
||||
LC_ALL: process.env.LC_ALL || 'C.UTF-8', |
||||
}; |
||||
|
||||
// Add TZ if set (for consistent timestamps)
|
||||
if (process.env.TZ) { |
||||
env.TZ = process.env.TZ; |
||||
} |
||||
|
||||
if (shouldUseTor(url)) { |
||||
const proxy = getTorProxy(); |
||||
if (proxy) { |
||||
// Git uses GIT_PROXY_COMMAND for proxy support
|
||||
const proxyCommand = `sh -c 'exec socat - SOCKS5:${proxy.host}:${proxy.port}:\\$1:\\$2' || sh -c 'exec nc -X 5 -x ${proxy.host}:${proxy.port} \\$1 \\$2'`; |
||||
env.GIT_PROXY_COMMAND = proxyCommand; |
||||
env.ALL_PROXY = `socks5://${proxy.host}:${proxy.port}`; |
||||
|
||||
// For HTTP/HTTPS URLs, also set http_proxy and https_proxy
|
||||
try { |
||||
const urlObj = new URL(url); |
||||
if (urlObj.protocol === 'http:' || urlObj.protocol === 'https:') { |
||||
env.http_proxy = `socks5://${proxy.host}:${proxy.port}`; |
||||
env.https_proxy = `socks5://${proxy.host}:${proxy.port}`; |
||||
} |
||||
} catch { |
||||
// URL parsing failed, skip proxy env vars
|
||||
} |
||||
} |
||||
} |
||||
|
||||
return env; |
||||
} |
||||
|
||||
/** |
||||
* Inject authentication token into a git URL if needed |
||||
* Supports GitHub tokens via GITHUB_TOKEN environment variable |
||||
* Returns the original URL if no token is needed or available |
||||
*/ |
||||
private injectAuthToken(url: string): string { |
||||
try { |
||||
const urlObj = new URL(url); |
||||
|
||||
// If URL already has credentials, don't modify it
|
||||
if (urlObj.username) { |
||||
return url; |
||||
} |
||||
|
||||
// Check for GitHub token
|
||||
if (urlObj.hostname === 'github.com' || urlObj.hostname.endsWith('.github.com')) { |
||||
const githubToken = process.env.GITHUB_TOKEN; |
||||
if (githubToken) { |
||||
// Inject token into URL: https://token@github.com/user/repo.git
|
||||
urlObj.username = githubToken; |
||||
urlObj.password = ''; // GitHub uses token as username, password is empty
|
||||
return urlObj.toString(); |
||||
} |
||||
} |
||||
|
||||
// Add support for other git hosting services here if needed
|
||||
// e.g., GitLab: GITLAB_TOKEN, Gitea: GITEA_TOKEN, etc.
|
||||
|
||||
return url; |
||||
} catch { |
||||
// URL parsing failed, return original URL
|
||||
return url; |
||||
} |
||||
} |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,79 @@
@@ -0,0 +1,79 @@
|
||||
/** |
||||
* Repository Size Checker |
||||
* Handles checking repository sizes and enforcing limits |
||||
*/ |
||||
|
||||
import { existsSync, statSync } from 'fs'; |
||||
import { readdir } from 'fs/promises'; |
||||
import { join } from 'path'; |
||||
import logger from '../logger.js'; |
||||
|
||||
/** |
||||
* Repository Size Checker |
||||
* Handles checking repository sizes and enforcing limits |
||||
*/ |
||||
export class RepoSizeChecker { |
||||
/** |
||||
* Get repository size in bytes |
||||
* Returns the total size of the repository directory |
||||
*/ |
||||
async getRepoSize(repoPath: string): Promise<number> { |
||||
if (!existsSync(repoPath)) { |
||||
return 0; |
||||
} |
||||
|
||||
let totalSize = 0; |
||||
|
||||
async function calculateSize(dirPath: string): Promise<number> { |
||||
let size = 0; |
||||
try { |
||||
const entries = await readdir(dirPath, { withFileTypes: true }); |
||||
|
||||
for (const entry of entries) { |
||||
const fullPath = join(dirPath, entry.name); |
||||
|
||||
if (entry.isDirectory()) { |
||||
size += await calculateSize(fullPath); |
||||
} else if (entry.isFile()) { |
||||
try { |
||||
const stats = statSync(fullPath); |
||||
size += stats.size; |
||||
} catch { |
||||
// Ignore errors accessing files
|
||||
} |
||||
} |
||||
} |
||||
} catch { |
||||
// Ignore errors accessing directories
|
||||
} |
||||
return size; |
||||
} |
||||
|
||||
totalSize = await calculateSize(repoPath); |
||||
return totalSize; |
||||
} |
||||
|
||||
/** |
||||
* Check if repository size exceeds the maximum (2 GB) |
||||
*/ |
||||
async checkRepoSizeLimit(repoPath: string, maxSizeBytes: number = 2 * 1024 * 1024 * 1024): Promise<{ withinLimit: boolean; currentSize: number; maxSize: number; error?: string }> { |
||||
try { |
||||
const currentSize = await this.getRepoSize(repoPath); |
||||
const withinLimit = currentSize <= maxSizeBytes; |
||||
|
||||
return { |
||||
withinLimit, |
||||
currentSize, |
||||
maxSize: maxSizeBytes, |
||||
...(withinLimit ? {} : { error: `Repository size (${(currentSize / 1024 / 1024 / 1024).toFixed(2)} GB) exceeds maximum (${(maxSizeBytes / 1024 / 1024 / 1024).toFixed(2)} GB)` }) |
||||
}; |
||||
} catch (error) { |
||||
return { |
||||
withinLimit: false, |
||||
currentSize: 0, |
||||
maxSize: maxSizeBytes, |
||||
error: `Failed to check repository size: ${error instanceof Error ? error.message : String(error)}` |
||||
}; |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,152 @@
@@ -0,0 +1,152 @@
|
||||
/** |
||||
* Repository URL Parser |
||||
* Handles parsing and validation of repository URLs |
||||
*/ |
||||
|
||||
import { join } from 'path'; |
||||
import { GIT_DOMAIN } from '../../config.js'; |
||||
import { extractCloneUrls } from '../../utils/nostr-utils.js'; |
||||
import type { NostrEvent } from '../../types/nostr.js'; |
||||
|
||||
export interface RepoPath { |
||||
npub: string; |
||||
repoName: string; |
||||
fullPath: string; |
||||
} |
||||
|
||||
/** |
||||
* Check if a URL is a GRASP (Git Repository Access via Secure Protocol) URL |
||||
* GRASP URLs contain npub (Nostr public key) in the path: https://host/npub.../repo.git
|
||||
*/ |
||||
export function isGraspUrl(url: string): boolean { |
||||
// GRASP URLs have npub (starts with npub1) in the path
|
||||
return /\/npub1[a-z0-9]+/i.test(url); |
||||
} |
||||
|
||||
/** |
||||
* Repository URL Parser |
||||
* Handles parsing git domain URLs and extracting repository information |
||||
*/ |
||||
export class RepoUrlParser { |
||||
private repoRoot: string; |
||||
private domain: string; |
||||
|
||||
constructor(repoRoot: string = '/repos', domain: string = GIT_DOMAIN) { |
||||
this.repoRoot = repoRoot; |
||||
this.domain = domain; |
||||
} |
||||
|
||||
/** |
||||
* Parse git domain URL to extract npub and repo name |
||||
*/ |
||||
parseRepoUrl(url: string): RepoPath | null { |
||||
// Match: https://{domain}/{npub}/{repo-name}.git or http://{domain}/{npub}/{repo-name}.git
|
||||
// Escape domain for regex (replace dots with \.)
|
||||
const escapedDomain = this.domain.replace(/\./g, '\\.'); |
||||
const match = url.match(new RegExp(`${escapedDomain}\\/(npub[a-z0-9]+)\\/([^\\/]+)\\.git`)); |
||||
if (!match) return null; |
||||
|
||||
const [, npub, repoName] = match; |
||||
const fullPath = join(this.repoRoot, npub, `${repoName}.git`); |
||||
|
||||
return { npub, repoName, fullPath }; |
||||
} |
||||
|
||||
/** |
||||
* Extract clone URLs from a NIP-34 repo announcement |
||||
* Uses shared utility with normalization enabled |
||||
*/ |
||||
extractCloneUrls(event: NostrEvent): string[] { |
||||
return extractCloneUrls(event, true); |
||||
} |
||||
|
||||
/** |
||||
* Convert SSH URL to HTTPS URL if possible |
||||
* e.g., git@github.com:user/repo.git -> https://github.com/user/repo.git
|
||||
*/ |
||||
convertSshToHttps(url: string): string | null { |
||||
// Check if it's an SSH URL (git@host:path or ssh://)
|
||||
const sshMatch = url.match(/^git@([^:]+):(.+)$/); |
||||
if (sshMatch) { |
||||
const [, host, path] = sshMatch; |
||||
// Remove .git suffix if present, we'll add it back
|
||||
const cleanPath = path.replace(/\.git$/, ''); |
||||
return `https://${host}/${cleanPath}.git`; |
||||
} |
||||
|
||||
// Check for ssh:// URLs
|
||||
if (url.startsWith('ssh://')) { |
||||
const sshUrlMatch = url.match(/^ssh:\/\/([^/]+)\/(.+)$/); |
||||
if (sshUrlMatch) { |
||||
const [, host, path] = sshUrlMatch; |
||||
const cleanPath = path.replace(/\.git$/, ''); |
||||
return `https://${host}/${cleanPath}.git`; |
||||
} |
||||
} |
||||
|
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* Filter and prepare remote URLs from clone URLs |
||||
* Respects the repo owner's order in the clone list |
||||
*/ |
||||
prepareRemoteUrls(cloneUrls: string[]): string[] { |
||||
const httpsUrls: string[] = []; |
||||
const sshUrls: string[] = []; |
||||
|
||||
for (const url of cloneUrls) { |
||||
const lowerUrl = url.toLowerCase(); |
||||
|
||||
// Skip localhost and our own domain
|
||||
if (lowerUrl.includes('localhost') ||
|
||||
lowerUrl.includes('127.0.0.1') ||
|
||||
url.includes(this.domain)) { |
||||
continue; |
||||
} |
||||
|
||||
// Check if it's an SSH URL
|
||||
if (url.startsWith('git@') || url.startsWith('ssh://')) { |
||||
sshUrls.push(url); |
||||
// Try to convert to HTTPS (preserve original order by appending)
|
||||
const httpsUrl = this.convertSshToHttps(url); |
||||
if (httpsUrl) { |
||||
httpsUrls.push(httpsUrl); |
||||
} |
||||
} else { |
||||
// It's already HTTPS/HTTP - preserve original order
|
||||
httpsUrls.push(url); |
||||
} |
||||
} |
||||
|
||||
// Respect the repo owner's order: use HTTPS URLs in the order they appeared in clone list
|
||||
let remoteUrls = httpsUrls; |
||||
|
||||
// If no HTTPS URLs, try SSH URLs (but log a warning)
|
||||
if (remoteUrls.length === 0 && sshUrls.length > 0) { |
||||
remoteUrls = sshUrls; |
||||
} |
||||
|
||||
// If no external URLs, try any URL that's not our domain (preserve order)
|
||||
if (remoteUrls.length === 0) { |
||||
remoteUrls = cloneUrls.filter(url => !url.includes(this.domain)); |
||||
} |
||||
|
||||
// If still no remote URLs, but there are *any* clone URLs, try the first one
|
||||
// This handles cases where the only clone URL is our own domain, but the repo doesn't exist locally yet
|
||||
if (remoteUrls.length === 0 && cloneUrls.length > 0) { |
||||
remoteUrls.push(cloneUrls[0]); |
||||
} |
||||
|
||||
return remoteUrls; |
||||
} |
||||
|
||||
/** |
||||
* Parse repo path to extract repo name (helper for verification file creation) |
||||
*/ |
||||
parseRepoPathForName(repoPath: string): { repoName: string } | null { |
||||
const match = repoPath.match(/\/([^\/]+)\.git$/); |
||||
if (!match) return null; |
||||
return { repoName: match[1] }; |
||||
} |
||||
} |
||||
Loading…
Reference in new issue