Compare commits

...

5 Commits

Author SHA1 Message Date
Silberengel e831e1f5b9 bug-fixes 2 weeks ago
Silberengel 02d4fa85dd bug-fixes and fallback relay 2 weeks ago
Silberengel 4465a1074a api refactor part 2 2 weeks ago
Silberengel f9988077e6 refactor API 2 weeks ago
Silberengel a8c9850b19 polling update 2 weeks ago
  1. 90
      docs/api-and-cli.md
  2. 18
      docs/editing-repos.md
  3. 16
      docs/repo-operations.md
  4. 4
      nostr/commit-signatures.jsonl
  5. 219
      server-maintenance-commands.md
  6. 6
      src/hooks.server.ts
  7. 7
      src/lib/components/PRDetail.svelte
  8. 14
      src/lib/config.ts
  9. 203
      src/lib/services/nostr/nostr-client.ts
  10. 39
      src/lib/services/nostr/repo-polling.ts
  11. 19
      src/lib/services/service-registry.ts
  12. 30
      src/lib/utils/repo-poll-trigger.ts
  13. 22
      src/routes/+layout.svelte
  14. 329
      src/routes/api/code-search/+server.ts
  15. 2291
      src/routes/api/openapi.json/openapi.json
  16. 503
      src/routes/api/repos/[npub]/[repo]/+server.ts
  17. 22
      src/routes/api/repos/[npub]/[repo]/archive/+server.ts
  18. 7
      src/routes/api/repos/[npub]/[repo]/branches/default/+server.ts
  19. 125
      src/routes/api/repos/[npub]/[repo]/clone-urls/+server.ts
  20. 210
      src/routes/api/repos/[npub]/[repo]/code-search/+server.ts
  21. 6
      src/routes/api/repos/[npub]/[repo]/commits/[hash]/verification/+server.ts
  22. 12
      src/routes/api/repos/[npub]/[repo]/diffs/+server.ts
  23. 573
      src/routes/api/repos/[npub]/[repo]/file/+server.ts
  24. 1194
      src/routes/api/repos/[npub]/[repo]/files/+server.ts
  25. 543
      src/routes/api/repos/[npub]/[repo]/fork/+server.ts
  26. 457
      src/routes/api/repos/[npub]/[repo]/forks/+server.ts
  27. 371
      src/routes/api/repos/[npub]/[repo]/maintainers/+server.ts
  28. 23
      src/routes/api/repos/[npub]/[repo]/patches/[id]/application/+server.ts
  29. 111
      src/routes/api/repos/[npub]/[repo]/prs/merge/+server.ts
  30. 43
      src/routes/api/repos/[npub]/[repo]/prs/update/+server.ts
  31. 56
      src/routes/api/repos/[npub]/[repo]/pull-requests/+server.ts
  32. 136
      src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/+server.ts
  33. 38
      src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/merge/+server.ts
  34. 153
      src/routes/api/repos/[npub]/[repo]/raw/+server.ts
  35. 130
      src/routes/api/repos/[npub]/[repo]/transfers/+server.ts
  36. 327
      src/routes/api/repos/[npub]/[repo]/tree/+server.ts
  37. 56
      src/routes/api/repos/[npub]/[repo]/verification/+server.ts
  38. 33
      src/routes/api/repos/poll/+server.ts
  39. 328
      src/routes/api/search/+server.ts
  40. 7
      src/routes/api/user/level/+server.ts
  41. 48
      src/routes/docs/[slug]/+page.svelte
  42. 29
      src/routes/repos/+page.svelte
  43. 9
      src/routes/repos/[npub]/[repo]/+page.svelte
  44. 45
      src/routes/repos/[npub]/[repo]/components/DocsTab.svelte
  45. 2
      src/routes/repos/[npub]/[repo]/components/DocsViewer.svelte
  46. 2
      src/routes/repos/[npub]/[repo]/components/dialogs/CreateReleaseDialog.svelte
  47. 6
      src/routes/repos/[npub]/[repo]/hooks/use-repo-api.ts
  48. 2
      src/routes/repos/[npub]/[repo]/services/branch-operations.ts
  49. 5
      src/routes/repos/[npub]/[repo]/services/code-search-operations.ts
  50. 27
      src/routes/repos/[npub]/[repo]/services/commit-operations.ts
  51. 37
      src/routes/repos/[npub]/[repo]/services/file-operations.ts
  52. 2
      src/routes/repos/[npub]/[repo]/services/pr-operations.ts
  53. 12
      src/routes/repos/[npub]/[repo]/services/repo-operations.ts
  54. 2
      src/routes/repos/[npub]/[repo]/utils/download.ts
  55. 2
      src/routes/repos/[npub]/[repo]/utils/file-processing.ts

90
docs/api-and-cli.md

@ -34,66 +34,80 @@ View interactive documentation at `/api/openapi.json` or use any OpenAPI viewer. @@ -34,66 +34,80 @@ View interactive documentation at `/api/openapi.json` or use any OpenAPI viewer.
#### Repository Management
- `GET /api/repos/list` - List all repositories
- `GET /api/repos/local` - List local repositories
- `GET /api/repos/list?domain={domain}` - List all registered repositories (optionally filter by domain)
- `GET /api/repos/local` - List local repositories (cloned on this server)
- `GET /api/repos/{npub}/{repo}` - Get repository information (with optional `?include=settings,maintainers,access,verification`)
- `PUT /api/repos/{npub}/{repo}` - Replace repository (full update)
- `PATCH /api/repos/{npub}/{repo}` - Partial update repository
- `DELETE /api/repos/{npub}/{repo}` - Delete repository
- `GET /api/repos/{npub}/{repo}/settings` - Get repository settings
- `POST /api/repos/{npub}/{repo}/settings` - Update repository settings
- `GET /api/repos/{npub}/{repo}/maintainers` - Get maintainers
- `POST /api/repos/{npub}/{repo}/maintainers` - Add maintainer
- `DELETE /api/repos/{npub}/{repo}/maintainers` - Remove maintainer
- `POST /api/repos/{npub}/{repo}/fork` - Fork repository
- `GET /api/repos/{npub}/{repo}/maintainers` - List maintainers
- `POST /api/repos/{npub}/{repo}/maintainers` - Add maintainer (body: `{ maintainer: "npub..." }`)
- `DELETE /api/repos/{npub}/{repo}/maintainers/{npub}` - Remove maintainer
- `GET /api/repos/{npub}/{repo}/forks` - Get fork information
- `POST /api/repos/{npub}/{repo}/forks` - Fork repository
- `DELETE /api/repos/{npub}/{repo}/delete` - Delete repository
- `POST /api/repos/{npub}/{repo}/transfer` - Transfer ownership
- `POST /api/repos/{npub}/{repo}/clone` - Clone to server
- `GET /api/repos/{npub}/{repo}/transfers` - Get ownership transfer history
- `POST /api/repos/{npub}/{repo}/transfers` - Transfer ownership
- `POST /api/repos/{npub}/{repo}/clone` - Clone repository to server
- `GET /api/repos/{npub}/{repo}/verification` - Verify repository ownership
- `POST /api/repos/{npub}/{repo}/verification` - Save announcement to repository for verification
- `GET /api/repos/{npub}/{repo}/validate` - Validate repository announcement
- `GET /api/repos/{npub}/{repo}/access` - Get repository access information
- `GET /api/repos/{npub}/{repo}/releases` - List releases
- `POST /api/repos/{npub}/{repo}/releases` - Create release
#### File Operations
- `GET /api/repos/{npub}/{repo}/file` - Get file content
- `POST /api/repos/{npub}/{repo}/file` - Create/update/delete file
- `GET /api/repos/{npub}/{repo}/tree` - List files and directories
- `GET /api/repos/{npub}/{repo}/raw` - Get raw file content
- `GET /api/repos/{npub}/{repo}/readme` - Get README content
- `GET /api/repos/{npub}/{repo}/files?path={path}&ref={ref}` - Get file content (JSON format)
- `GET /api/repos/{npub}/{repo}/files?action=tree&path={path}&ref={ref}` - List files and directories
- `GET /api/repos/{npub}/{repo}/files?path={path}&format=raw&ref={ref}` - Get raw file content
- `POST /api/repos/{npub}/{repo}/files?path={path}` - Create file
- `PUT /api/repos/{npub}/{repo}/files?path={path}` - Update file (replace)
- `PATCH /api/repos/{npub}/{repo}/files?path={path}` - Partial update
- `DELETE /api/repos/{npub}/{repo}/files?path={path}` - Delete file
- `GET /api/repos/{npub}/{repo}/readme?ref={ref}` - Get README content
#### Git Operations
- `GET /api/repos/{npub}/{repo}/branches` - List branches
- `POST /api/repos/{npub}/{repo}/branches` - Create branch
- `POST /api/repos/{npub}/{repo}/branches` - Create branch (requires maintainer auth)
- `GET /api/repos/{npub}/{repo}/branches/default` - Get default branch
- `GET /api/repos/{npub}/{repo}/tags` - List tags
- `POST /api/repos/{npub}/{repo}/tags` - Create tag
- `POST /api/repos/{npub}/{repo}/tags` - Create tag (requires maintainer auth)
- `GET /api/repos/{npub}/{repo}/commits` - List commits
- `GET /api/repos/{npub}/{repo}/commits/{hash}/verify` - Verify commit signature
- `GET /api/repos/{npub}/{repo}/diff` - Get diff between commits
- `GET /api/repos/{npub}/{repo}/default-branch` - Get default branch
- `POST /api/repos/{npub}/{repo}/default-branch` - Set default branch
- `GET /api/repos/{npub}/{repo}/commits/{hash}/verification` - Verify commit signature
- `GET /api/repos/{npub}/{repo}/diffs?from={from}&to={to}&path={path}` - Get diff between commits
- `GET /api/repos/{npub}/{repo}/archive?format=zip|tar.gz&ref={ref}` - Download repository archive
#### Collaboration
- `GET /api/repos/{npub}/{repo}/prs` - List pull requests
- `POST /api/repos/{npub}/{repo}/prs` - Create pull request
- `PATCH /api/repos/{npub}/{repo}/prs` - Update PR status
- `POST /api/repos/{npub}/{repo}/prs/{prId}/merge` - Merge PR
- `GET /api/repos/{npub}/{repo}/pull-requests` - List pull requests
- `POST /api/repos/{npub}/{repo}/pull-requests` - Create pull request
- `GET /api/repos/{npub}/{repo}/pull-requests/{id}` - Get pull request
- `PATCH /api/repos/{npub}/{repo}/pull-requests/{id}` - Update PR status
- `POST /api/repos/{npub}/{repo}/pull-requests/{id}/merge` - Merge PR
- `GET /api/repos/{npub}/{repo}/issues` - List issues
- `POST /api/repos/{npub}/{repo}/issues` - Create issue
- `PATCH /api/repos/{npub}/{repo}/issues` - Update issue status
- `GET /api/repos/{npub}/{repo}/patches` - List patches
- `POST /api/repos/{npub}/{repo}/patches` - Create patch
- `PATCH /api/repos/{npub}/{repo}/patches` - Update patch status
- `POST /api/repos/{npub}/{repo}/patches/{patchId}/apply` - Apply patch
- `POST /api/repos/{npub}/{repo}/patches/{id}/application` - Apply patch
- `GET /api/repos/{npub}/{repo}/highlights` - List highlights/comments
- `POST /api/repos/{npub}/{repo}/highlights` - Create highlight/comment
#### Search and Discovery
- `GET /api/search` - Search repositories
- `GET /api/repos/{npub}/{repo}/code-search` - Search code in repository
- `GET /api/code-search` - Global code search
- `GET /api/repos/{npub}/{repo}/clone-urls/reachability` - Check clone URL reachability
- `GET /api/search?type=repos&q={query}` - Search repositories (default)
- `GET /api/search?type=code&q={query}&repo={npub}/{repo}` - Search code (optionally filter by repository)
- `GET /api/repos/{npub}/{repo}/clone-urls` - List clone URLs
- `POST /api/repos/{npub}/{repo}/clone-urls/reachability` - Check clone URL reachability
#### User Operations
- `GET /api/users/{npub}/profile` - Get user profile
- `GET /api/users/{npub}/repos` - Get user's repositories
- `GET /api/user/level` - Get user access level
- `POST /api/user/level` - Verify user access level (relay write access)
- `GET /api/user/git-dashboard` - Get git dashboard
- `GET /api/user/messaging-preferences` - Get messaging preferences
- `POST /api/user/messaging-preferences` - Update messaging preferences
@ -102,6 +116,7 @@ View interactive documentation at `/api/openapi.json` or use any OpenAPI viewer. @@ -102,6 +116,7 @@ View interactive documentation at `/api/openapi.json` or use any OpenAPI viewer.
- `GET /api/config` - Get server configuration
- `GET /api/tor/onion` - Get Tor .onion address
- `POST /api/repos/poll` - Trigger repository polling (provisions new repos from Nostr)
- `GET /api/transfers/pending` - Get pending ownership transfers
#### Git HTTP Backend
@ -120,10 +135,19 @@ curl https://your-domain.com/api/repos/list @@ -120,10 +135,19 @@ curl https://your-domain.com/api/repos/list
curl https://your-domain.com/api/repos/{npub}/{repo}/settings
# Create file (requires NIP-98 auth)
curl -X POST https://your-domain.com/api/repos/{npub}/{repo}/file \
curl -X POST "https://your-domain.com/api/repos/{npub}/{repo}/files?path=test.txt" \
-H "Authorization: Nostr <base64-event>" \
-H "Content-Type: application/json" \
-d '{"path": "test.txt", "content": "Hello", "commitMessage": "Add file", "branch": "main", "action": "write"}'
-d '{"content": "Hello", "commitMessage": "Add file", "branch": "main"}'
# Get file content
curl "https://your-domain.com/api/repos/{npub}/{repo}/files?path=test.txt&ref=main"
# List files (tree view)
curl "https://your-domain.com/api/repos/{npub}/{repo}/files?action=tree&ref=main"
# Get raw file content
curl "https://your-domain.com/api/repos/{npub}/{repo}/files?path=test.txt&format=raw&ref=main"
```
## Command Line Interface (CLI)

18
docs/editing-repos.md

@ -42,9 +42,9 @@ git push origin feature/new-feature @@ -42,9 +42,9 @@ git push origin feature/new-feature
### Default Branch
The default branch (usually `main`) can be changed via:
The default branch (usually `main`) can be viewed via:
- **Web Interface**: Repository settings
- **API**: `POST /api/repos/{npub}/{repo}/default-branch`
- **API**: `GET /api/repos/{npub}/{repo}/branches/default`
## File Management
@ -60,7 +60,7 @@ The default branch (usually `main`) can be changed via: @@ -60,7 +60,7 @@ The default branch (usually `main`) can be changed via:
#### Via API
```bash
GET /api/repos/{npub}/{repo}/file?path={file-path}&branch={branch}
GET /api/repos/{npub}/{repo}/files?path={file-path}&ref={branch}
```
#### Via CLI
@ -84,13 +84,11 @@ gitrep file get <npub> <repo> <path> [branch] @@ -84,13 +84,11 @@ gitrep file get <npub> <repo> <path> [branch]
#### Via API
```bash
POST /api/repos/{npub}/{repo}/file
POST /api/repos/{npub}/{repo}/files?path=file.txt
{
"path": "file.txt",
"content": "File content",
"commitMessage": "Add file",
"branch": "main",
"action": "write"
"branch": "main"
}
```
@ -112,12 +110,10 @@ gitrep file put <npub> <repo> <path> [file] [message] [branch] @@ -112,12 +110,10 @@ gitrep file put <npub> <repo> <path> [file] [message] [branch]
#### Via API
```bash
POST /api/repos/{npub}/{repo}/file
DELETE /api/repos/{npub}/{repo}/files?path=file.txt
{
"path": "file.txt",
"commitMessage": "Remove file",
"branch": "main",
"action": "delete"
"branch": "main"
}
```

16
docs/repo-operations.md

@ -92,8 +92,22 @@ The transfer process: @@ -92,8 +92,22 @@ The transfer process:
### Via CLI
Ownership transfers are done via the API. Use the publish command to create an ownership transfer event:
```bash
gitrep publish ownership-transfer <repo> <new-owner-npub> [--self-transfer]
```
Or use the API directly:
```bash
gitrep repos transfer <npub> <repo> <new-owner-npub>
# Get transfer history
curl https://{domain}/api/repos/{npub}/{repo}/transfers
# Initiate transfer (requires NIP-98 auth)
curl -X POST https://{domain}/api/repos/{npub}/{repo}/transfers \
-H "Authorization: Nostr <base64-event>" \
-H "Content-Type: application/json" \
-d '{"transferEvent": {...}}'
```
**Important**: Ownership transfers are permanent and create a chain of ownership events. The new owner will have full control.

4
nostr/commit-signatures.jsonl

@ -114,3 +114,7 @@ @@ -114,3 +114,7 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772223624,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"99cb543f1e821f1b7df4bbde2b3da3ab3a09cda7a1e9a537fe1b8df79b19e8e8","sig":"762a7ea92457ce81cc5aae9bc644fb9d80f90c7500035fbb506f2f76a5942333b828cc8a59f7656b0e714b15a59158be0a671f51476be2e8eabe9731ced74bcb"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772226191,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"20be97351d2b05fa7ad9e161b2619e9babaaffc6a8090057c1a3ac50a0f08d6a","sig":"a174c7dd39f613dd88260ef5c111b943df381b0acae20d048596e11ef1a6b0e3c1bfb9a8858af3df0f8858c4c79d1e2d03ad248a0608ac5d5cded6a81e99af77"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772227102,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fix"]],"content":"Signed commit: bug-fix","id":"0f366a0cc7c003f74e375f40e7c322781746d12829943df1287bf67f36e1330a","sig":"167177ccfeb053cd645e50e7d00450b847ecd65c305165777bcfbe39fd3f48ccc86b57fdd183d2a4b138d94d27d11e4f1c121d702b295d94b9aee0a8dc81a744"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772261455,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision"]],"content":"Signed commit: fix zombie spawning on polling\nmake announcement commits non-blocking on repo provision","id":"b0da119e7477b46f5d82be831693a92e117f25379476488f19351e2bac8f88b8","sig":"b8ca18e8215a9f5b3fc877ce113936c582353d44f8d03cdccd9f9ee70fb3e6fdd64db7cc6a3ca15339fb21b9ca87ea8471a38b587721a594a189d97cc2964ad9"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772264490,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","polling update"]],"content":"Signed commit: polling update","id":"42c1a2a63a4568c65d82d78701451b3b4363bdf9c8c57e804535b5f3f0d7b6fc","sig":"8e5f32ecb79da876ac41eba04c3b1541b21d039ae50d1b9fefa630d35f31c97dd29af64e4b695742fa7d4eaec17db8f4a066b4db99ce628aed596971975d4a87"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772267611,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","refactor API"]],"content":"Signed commit: refactor API","id":"934f8809638cea0bc7b8158fca959bc60880e0cae9ab8ff653687313adcd2f57","sig":"c9d8e5b821ae8182f8d39599c50fd0a4db6040ead1d8d83730a608a1d94d5078770a6ccbfc525a98691e98fabd9f9d24f0298680fb564c6b76c2f34bed9889b5"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772269280,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","api refactor part 2"]],"content":"Signed commit: api refactor part 2","id":"ece894a60057bba46ebd4ac0dca2aca55ffce05e44671fe07b29516809fc86f6","sig":"176706a271659834e441ea5eab4bb1480667dad4468fe8315803284f4a183debf595523dd33d0d3cabe0c35013f4a72b9169b5f10afefaf8a82a721d8b0f3b08"}

219
server-maintenance-commands.md

@ -1,219 +0,0 @@ @@ -1,219 +0,0 @@
# Server Maintenance Commands
## 1. Investigate Zombie Processes (CRITICAL - 3300 zombies)
```bash
# Find processes with zombie children
ps aux | awk '$8 ~ /^Z/ { print $2, $11 }' | head -20
# Find parent processes that are creating zombies
ps aux | awk '$8 ~ /^Z/ { print $3 }' | sort | uniq -c | sort -rn | head -10
# Check for specific problematic processes
ps auxf | grep -E 'Z|defunct'
# Check systemd services that might be spawning zombies
systemctl status | grep -i failed
systemctl list-units --type=service --state=failed
```
## 2. Identify the Root Cause (Git Processes Detected)
Based on initial investigation, zombies are `[git]` processes. Run these commands:
```bash
# Check all git processes (including zombies)
ps aux | grep -E 'git|\[git\]' | head -30
# Find what's spawning git processes
ps auxf | grep -B 5 -A 5 git | head -50
# Check for web server processes that might spawn git
ps aux | grep -E 'node|nginx|apache|php-fpm|plesk' | head -20
# Check system logs for git-related errors
journalctl -p err -n 100 | grep -i git
journalctl -u nginx -n 50
journalctl -u apache2 -n 50
# Check for processes with many children (potential zombie creators)
ps aux --sort=-%cpu | head -20
ps aux --sort=-%mem | head -20
# Monitor zombie creation in real-time (run for 30 seconds)
watch -n 1 'ps aux | awk '\''$8 ~ /^Z/ { count++ } END { print "Zombies:", count+0 }'\'''
# Check if it's a GitRepublic application issue
ps aux | grep -E 'node.*gitrepublic|gitrepublic.*node'
systemctl status | grep -i gitrepublic
```
## 3. Apply Security Updates
```bash
# Update package lists
apt update
# See what security updates are available
apt list --upgradable | grep -i security
# Apply security updates
apt upgrade -y
# Or apply all updates (after investigating zombies)
apt upgrade
```
## 4. System Health Check
```bash
# Check disk space
df -h
# Check memory usage
free -h
# Check system load
uptime
top -bn1 | head -20
# Check for failed services
systemctl list-units --type=service --state=failed
# Check system logs
journalctl -p err -n 50
```
## 5. Plan System Restart
```bash
# Check what requires restart
cat /var/run/reboot-required.pkgs 2>/dev/null || echo "No reboot required file found"
# Schedule maintenance window and restart
# (Only after fixing zombie issue)
# reboot
```
## 6. Plesk-Specific Checks
```bash
# Check Plesk services
plesk repair all -y
# Check Plesk logs
tail -100 /var/log/plesk/panel.log
# Check for Plesk-related zombie processes
ps aux | grep -i plesk | grep -i defunct
```
## Root Cause Identified ✅
**Problem**: Node.js GitRepublic process (PID 330225, `node build`) is spawning git processes that aren't being properly reaped, creating zombies.
**Evidence**:
- All zombie processes are `[git] <defunct>` children of the Node.js process
- Active git process: `git remote set-head remote-0 -a` (from `git-remote-sync.ts`)
- Git spawns subprocesses like `git-remote-https` that can become zombies if not properly waited for
**Code Fix**: Updated `src/lib/services/git/git-remote-sync.ts` to:
- Add timeout handling (30 minutes)
- Properly clean up processes on exit
- Handle signals correctly
- Prevent zombie processes
## ⚠ URGENT: Restart Service IMMEDIATELY ⚠
**Zombie count is increasing rapidly (3300 → 5940). Restart NOW to stop the bleeding.**
**Option 1: Restart the GitRepublic service (RECOMMENDED)**
```bash
# Find the service/container
docker ps | grep gitrepublic
# or
systemctl list-units | grep -i gitrepublic
# or find the process
ps aux | grep "node build" | grep -v grep
# RESTART IT NOW (this will clean up zombies temporarily)
docker restart <container-id>
# or
systemctl restart <service-name>
# or if running directly
kill -TERM <pid> # Let systemd/docker restart it
```
**After restart, monitor zombie count:**
```bash
watch -n 2 'ps aux | awk '\''$8 ~ /^Z/ { count++ } END { print "Zombies:", count+0 }'\'''
```
**If zombies continue to increase after restart:**
- The code fix needs to be deployed
- Check if there are other services spawning git processes
**Option 2: Kill and let it restart (if managed by systemd/docker)**
```bash
# Find the process
ps aux | grep "node build" | grep -v grep
# Kill it (systemd/docker will restart it)
kill -TERM 330225
# Wait a moment, then check if it restarted
ps aux | grep "node build" | grep -v grep
```
**Option 3: Clean up zombies manually (temporary fix)**
```bash
# This won't fix the root cause but will clean up existing zombies
# The zombies will come back until the code is fixed
# Note: You can't kill zombies directly, but killing the parent will clean them up
```
## Recommended Action Plan
1. **IMMEDIATE**: Restart GitRepublic service to clean up existing zombies
2. **URGENT**: Deploy the code fix (updated `git-remote-sync.ts`)
3. **HIGH PRIORITY**: Apply security updates (section 3)
4. **MONITOR**: Watch for zombie process count after restart
5. **MAINTENANCE WINDOW**: Schedule system restart after deploying fix
## Common Causes of Zombie Processes
- Process spawning children without proper signal handling
- Systemd service not properly configured
- Application bugs (especially Node.js, Python, or long-running processes)
- Resource exhaustion causing process management issues
- Plesk or web server processes not reaping children
## Git-Specific Zombie Issues
Since zombies are `[git]` processes, likely causes:
- **Git operations not being properly waited for** - parent process exits before git finishes
- **Git HTTP backend issues** - web server spawning git processes that aren't reaped
- **GitRepublic application** - Node.js app spawning git commands without proper signal handling
- **Plesk Git integration** - Plesk's git features not properly managing child processes
- **Git hooks** - hooks spawning processes that become zombies
### Quick Fixes to Try
```bash
# Restart web server (if using nginx/apache)
systemctl restart nginx
# or
systemctl restart apache2
# Restart GitRepublic application (if running as service)
systemctl restart gitrepublic-web
# or find and restart the Node.js process
ps aux | grep node | grep gitrepublic
# Then restart it
# Check git-http-backend processes
ps aux | grep git-http-backend
# Kill any stuck git processes (CAREFUL - only if safe)
# pkill -9 git # Only if you're sure no important operations are running
```

6
src/hooks.server.ts

@ -7,6 +7,7 @@ import type { Handle } from '@sveltejs/kit'; @@ -7,6 +7,7 @@ import type { Handle } from '@sveltejs/kit';
import { error } from '@sveltejs/kit';
import { RepoPollingService } from './lib/services/nostr/repo-polling.js';
import { GIT_DOMAIN, DEFAULT_NOSTR_RELAYS } from './lib/config.js';
import { setRepoPollingService } from './lib/services/service-registry.js';
import { rateLimiter } from './lib/services/security/rate-limiter.js';
import { auditLogger } from './lib/services/security/audit-logger.js';
import logger from './lib/services/logger.js';
@ -30,6 +31,9 @@ if (typeof process !== 'undefined') { @@ -30,6 +31,9 @@ if (typeof process !== 'undefined') {
pollingService = new RepoPollingService(DEFAULT_NOSTR_RELAYS, repoRoot, domain);
// Register with service registry so it can be accessed from API endpoints
setRepoPollingService(pollingService);
// Start polling - the initial poll will complete asynchronously
// The local repos endpoint will skip cache for the first 10 seconds after startup
pollingService.start().then(() => {
@ -110,7 +114,7 @@ export const handle: Handle = async ({ event, resolve }) => { @@ -110,7 +114,7 @@ export const handle: Handle = async ({ event, resolve }) => {
let rateLimitType = 'api';
if (url.pathname.startsWith('/api/git/')) {
rateLimitType = 'git';
} else if (url.pathname.startsWith('/api/repos/') && url.pathname.includes('/file')) {
} else if (url.pathname.startsWith('/api/repos/') && url.pathname.includes('/files')) {
rateLimitType = 'file';
} else if (url.pathname.startsWith('/api/search')) {
rateLimitType = 'search';

7
src/lib/components/PRDetail.svelte

@ -175,7 +175,7 @@ @@ -175,7 +175,7 @@
try {
// Load diff for the commit
const response = await fetch(
`/api/repos/${npub}/${repo}/diff?from=${pr.commitId}^&to=${pr.commitId}`
`/api/repos/${npub}/${repo}/diffs?from=${pr.commitId}^&to=${pr.commitId}`
);
if (response.ok) {
const data = await response.json();
@ -356,11 +356,10 @@ @@ -356,11 +356,10 @@
error = null;
try {
const response = await fetch(`/api/repos/${npub}/${repo}/prs`, {
const response = await fetch(`/api/repos/${npub}/${repo}/pull-requests/${pr.id}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prId: pr.id,
prAuthor: pr.author,
status
})
@ -397,7 +396,7 @@ @@ -397,7 +396,7 @@
error = null;
try {
const response = await fetch(`/api/repos/${npub}/${repo}/prs/${pr.id}/merge`, {
const response = await fetch(`/api/repos/${npub}/${repo}/pull-requests/${pr.id}/merge`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({

14
src/lib/config.ts

@ -25,6 +25,20 @@ export const DEFAULT_NOSTR_RELAYS = @@ -25,6 +25,20 @@ export const DEFAULT_NOSTR_RELAYS =
'wss://nostr.land',
];
/**
* Fallback Nostr relays to use when primary relays fail
* Can be overridden by NOSTR_FALLBACK_RELAYS env var (comma-separated list)
* These relays are automatically used when primary relays are unavailable
*/
export const FALLBACK_NOSTR_RELAYS =
typeof process !== 'undefined' && process.env?.NOSTR_FALLBACK_RELAYS
? process.env.NOSTR_FALLBACK_RELAYS.split(',').map(r => r.trim()).filter(r => r.length > 0)
: [
'wss://orly-relay.imwald.eu',
'wss://nostr.sovbit.host',
'wss://nostr21.com',
];
/**
* Nostr relays to use for searching for repositories, profiles, or other events
* Can be overridden by NOSTR_SEARCH_RELAYS env var (comma-separated list)

203
src/lib/services/nostr/nostr-client.ts

@ -9,6 +9,7 @@ import { isNIP07Available, getPublicKeyWithNIP07, signEventWithNIP07 } from './n @@ -9,6 +9,7 @@ import { isNIP07Available, getPublicKeyWithNIP07, signEventWithNIP07 } from './n
import { SimplePool, type Filter } from 'nostr-tools';
import { KIND } from '../../types/nostr.js';
import { isParameterizedReplaceable } from '../../utils/nostr-event-utils.js';
import { FALLBACK_NOSTR_RELAYS } from '../../config.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
@ -236,36 +237,140 @@ export class NostrClient { @@ -236,36 +237,140 @@ export class NostrClient {
return this.fetchAndMergeFromRelays(filters, []);
}
/**
* Sanitize a filter to ensure all values are valid
* Removes invalid authors (non-strings, null, undefined, non-hex)
* Ensures all array fields contain only valid strings
*/
private sanitizeFilter(filter: NostrFilter): Filter {
const sanitized: Filter = {};
// Sanitize authors - must be array of valid hex pubkeys (64 chars)
if (filter.authors) {
const validAuthors = filter.authors
.filter((author): author is string =>
typeof author === 'string' &&
author.length === 64 &&
/^[0-9a-f]{64}$/i.test(author)
);
if (validAuthors.length > 0) {
sanitized.authors = validAuthors;
}
}
// Sanitize ids - must be array of valid hex strings (64 chars)
if (filter.ids) {
const validIds = filter.ids
.filter((id): id is string =>
typeof id === 'string' &&
id.length === 64 &&
/^[0-9a-f]{64}$/i.test(id)
);
if (validIds.length > 0) {
sanitized.ids = validIds;
}
}
// Sanitize kinds - must be array of numbers
if (filter.kinds) {
const validKinds = filter.kinds.filter((kind): kind is number => typeof kind === 'number');
if (validKinds.length > 0) {
sanitized.kinds = validKinds;
}
}
// Sanitize tag filters - must be arrays of strings
const tagFields = ['#e', '#p', '#d', '#a', '#E', '#K', '#P', '#A', '#I'] as const;
for (const tagField of tagFields) {
const value = filter[tagField];
if (value) {
const validValues = value.filter((v): v is string => typeof v === 'string' && v.length > 0);
if (validValues.length > 0) {
sanitized[tagField] = validValues;
}
}
}
// Copy other valid fields
if (filter.since !== undefined && typeof filter.since === 'number') {
sanitized.since = filter.since;
}
if (filter.until !== undefined && typeof filter.until === 'number') {
sanitized.until = filter.until;
}
if (filter.limit !== undefined && typeof filter.limit === 'number' && filter.limit > 0) {
sanitized.limit = filter.limit;
}
if (filter.search && typeof filter.search === 'string') {
sanitized.search = filter.search;
}
return sanitized;
}
/**
* Fetch events from relays and merge with existing events
* Never deletes valid events, only appends/integrates new ones
* Automatically falls back to fallback relays if primary relays fail
*/
private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[]): Promise<NostrEvent[]> {
const events: NostrEvent[] = [];
// Sanitize all filters before sending to relays
const sanitizedFilters = filters.map(f => this.sanitizeFilter(f));
// Use nostr-tools SimplePool to fetch from all relays in parallel
// SimplePool handles connection management, retries, and error handling automatically
try {
// querySync takes a single filter, so we query each filter and combine results
// Wrap each query individually to catch errors from individual relays
const queryPromises = filters.map(filter =>
this.pool.querySync(this.relays, filter as Filter, { maxWait: 8000 })
const queryPromises = sanitizedFilters.map(filter =>
this.pool.querySync(this.relays, filter, { maxWait: 8000 })
.catch(err => {
// Log individual relay errors but don't fail the entire request
logger.debug({ error: err, filter }, 'Individual relay query failed');
logger.debug({ error: err, filter, relays: this.relays }, 'Primary relay query failed, trying fallback');
return []; // Return empty array for failed queries
})
);
const results = await Promise.allSettled(queryPromises);
let hasResults = false;
for (const result of results) {
if (result.status === 'fulfilled') {
if (result.status === 'fulfilled' && result.value.length > 0) {
events.push(...result.value);
} else {
hasResults = true;
} else if (result.status === 'rejected') {
// Log rejected promises (shouldn't happen since we catch above, but just in case)
logger.debug({ error: result.reason }, 'Query promise rejected');
}
}
// If no results from primary relays and we have fallback relays, try them
if (!hasResults && events.length === 0 && FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: this.relays, fallbackRelays: FALLBACK_NOSTR_RELAYS }, 'No results from primary relays, trying fallback relays');
try {
const fallbackPromises = sanitizedFilters.map(filter =>
this.pool.querySync(FALLBACK_NOSTR_RELAYS, filter, { maxWait: 8000 })
.catch(err => {
logger.debug({ error: err, filter }, 'Fallback relay query failed');
return [];
})
);
const fallbackResults = await Promise.allSettled(fallbackPromises);
for (const result of fallbackResults) {
if (result.status === 'fulfilled') {
events.push(...result.value);
}
}
if (events.length > 0) {
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, eventCount: events.length }, 'Successfully fetched events from fallback relays');
}
} catch (fallbackErr) {
logger.debug({ error: fallbackErr }, 'Fallback relay query failed completely');
}
}
} catch (err) {
logger.debug({ error: err, filters }, 'Pool querySync failed');
// Continue with empty events - will use cached events
@ -509,10 +614,90 @@ export class NostrClient { @@ -509,10 +614,90 @@ export class NostrClient {
}
});
} else {
// If publish failed or timed out, mark all as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
// If publish failed or timed out to primary relays, try fallback relays
if (FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: targetRelays, fallbackRelays: FALLBACK_NOSTR_RELAYS, eventId: event.id }, 'Primary relay publish failed, trying fallback relays');
try {
const fallbackPublishPromise = new Promise<string[]>((resolve, reject) => {
const timeout = setTimeout(() => {
reject(new Error('Fallback publish timeout after 30 seconds'));
}, 30000);
try {
const fallbackPublishPromises = this.pool.publish(FALLBACK_NOSTR_RELAYS, event);
Promise.all(fallbackPublishPromises)
.then((results) => {
clearTimeout(timeout);
resolve(results);
})
.catch((error: unknown) => {
clearTimeout(timeout);
const errorMessage = error instanceof Error ? error.message : String(error);
if (errorMessage.includes('restricted') ||
errorMessage.includes('Pay on') ||
errorMessage.includes('payment required') ||
errorMessage.includes('rate limit')) {
logger.debug({ error: errorMessage, eventId: event.id }, 'Fallback relay restriction encountered');
resolve([]);
} else {
reject(error);
}
});
} catch (syncError) {
clearTimeout(timeout);
reject(syncError);
}
});
const fallbackPublishedRelays: string[] = await Promise.race([
fallbackPublishPromise,
new Promise<string[]>((_, reject) =>
setTimeout(() => reject(new Error('Fallback publish timeout')), 30000)
)
]).catch((error: unknown): string[] => {
logger.debug({ error: error instanceof Error ? error.message : String(error), eventId: event.id }, 'Error publishing to fallback relays');
return [];
});
if (fallbackPublishedRelays && fallbackPublishedRelays.length > 0) {
success.push(...fallbackPublishedRelays);
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, publishedCount: fallbackPublishedRelays.length, eventId: event.id }, 'Successfully published to fallback relays');
// Mark primary relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Primary relay failed, used fallback' });
});
// Mark fallback relays not in success as failed
FALLBACK_NOSTR_RELAYS.forEach(relay => {
if (!fallbackPublishedRelays.includes(relay)) {
failed.push({ relay, error: 'Fallback relay did not accept event' });
}
});
} else {
// Both primary and fallback failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
FALLBACK_NOSTR_RELAYS.forEach(relay => {
failed.push({ relay, error: 'Fallback relay publish failed or timed out' });
});
}
} catch (fallbackError) {
logger.debug({ error: fallbackError, eventId: event.id }, 'Fallback relay publish failed completely');
// Mark all relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
FALLBACK_NOSTR_RELAYS.forEach(relay => {
failed.push({ relay, error: 'Fallback relay publish failed' });
});
}
} else {
// No fallback relays available, mark all primary relays as failed
targetRelays.forEach(relay => {
failed.push({ relay, error: 'Publish failed or timed out' });
});
}
}
} catch (error) {
// Catch any synchronous errors

39
src/lib/services/nostr/repo-polling.ts

@ -84,6 +84,14 @@ export class RepoPollingService { @@ -84,6 +84,14 @@ export class RepoPollingService {
}
}
/**
* Trigger a manual poll (useful after user verification)
*/
async triggerPoll(): Promise<void> {
logger.info('Manual poll triggered');
return this.poll();
}
/**
* Poll for new repo announcements and provision repos
*/
@ -106,9 +114,23 @@ export class RepoPollingService { @@ -106,9 +114,23 @@ export class RepoPollingService {
}
const cloneUrls = this.extractCloneUrls(event);
return cloneUrls.some(url => url.includes(this.domain));
const listsDomain = cloneUrls.some(url => url.includes(this.domain));
if (listsDomain) {
logger.debug({
eventId: event.id,
pubkey: event.pubkey.slice(0, 16) + '...',
cloneUrls: cloneUrls.slice(0, 3) // Log first 3 URLs
}, 'Found repo announcement that lists this domain');
}
return listsDomain;
});
logger.info({
totalEvents: events.length,
relevantEvents: relevantEvents.length,
domain: this.domain
}, 'Filtered repo announcements');
// Provision each repo
for (const event of relevantEvents) {
try {
@ -201,11 +223,22 @@ export class RepoPollingService { @@ -201,11 +223,22 @@ export class RepoPollingService {
if (!isExistingRepo) {
const userLevel = getCachedUserLevel(event.pubkey);
const { hasUnlimitedAccess } = await import('../../utils/user-access.js');
if (!hasUnlimitedAccess(userLevel?.level)) {
const hasAccess = hasUnlimitedAccess(userLevel?.level);
logger.debug({
eventId: event.id,
pubkey: event.pubkey.slice(0, 16) + '...',
cachedLevel: userLevel?.level || 'none',
hasAccess,
isExistingRepo
}, 'Checking user access for repo provisioning');
if (!hasAccess) {
logger.warn({
eventId: event.id,
pubkey: event.pubkey.slice(0, 16) + '...',
level: userLevel?.level || 'none'
level: userLevel?.level || 'none',
cacheExists: !!userLevel
}, 'Skipping repo provisioning: user does not have unlimited access');
continue;
}

19
src/lib/services/service-registry.ts

@ -15,7 +15,8 @@ import { ForkCountService } from './nostr/fork-count-service.js'; @@ -15,7 +15,8 @@ import { ForkCountService } from './nostr/fork-count-service.js';
import { PRsService } from './nostr/prs-service.js';
import { HighlightsService } from './nostr/highlights-service.js';
import { ReleasesService } from './nostr/releases-service.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '../config.js';
import { RepoPollingService } from './nostr/repo-polling.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS, GIT_DOMAIN } from '../config.js';
// Get repo root from environment or use default
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
@ -35,6 +36,7 @@ let _forkCountService: ForkCountService | null = null; @@ -35,6 +36,7 @@ let _forkCountService: ForkCountService | null = null;
let _prsService: PRsService | null = null;
let _highlightsService: HighlightsService | null = null;
let _releasesService: ReleasesService | null = null;
let _repoPollingService: RepoPollingService | null = null;
/**
* Get singleton FileManager instance
@ -156,6 +158,21 @@ export function getReleasesService(): ReleasesService { @@ -156,6 +158,21 @@ export function getReleasesService(): ReleasesService {
return _releasesService;
}
/**
* Get singleton RepoPollingService instance
* Note: This should be initialized in hooks.server.ts on startup
*/
export function getRepoPollingService(): RepoPollingService | null {
return _repoPollingService;
}
/**
* Set the RepoPollingService instance (called from hooks.server.ts)
*/
export function setRepoPollingService(service: RepoPollingService): void {
_repoPollingService = service;
}
// Convenience exports for direct access (common pattern)
export const fileManager = getFileManager();
export const repoManager = getRepoManager();

30
src/lib/utils/repo-poll-trigger.ts

@ -0,0 +1,30 @@ @@ -0,0 +1,30 @@
/**
* Shared utility for triggering repo polls
* This provides a consistent interface for triggering polls from anywhere in the codebase
*/
import { getRepoPollingService } from '../services/service-registry.js';
import logger from '../services/logger.js';
/**
* Trigger a repo poll
* This is the single source of truth for triggering polls
* @param context Optional context string for logging (e.g., 'user-verification', 'manual-refresh')
* @returns Promise that resolves when poll is triggered (not when it completes)
*/
export async function triggerRepoPoll(context?: string): Promise<void> {
const pollingService = getRepoPollingService();
if (!pollingService) {
logger.warn({ context }, 'Poll request received but polling service not initialized');
throw new Error('Polling service not available');
}
// Trigger poll asynchronously (non-blocking)
// The poll will complete in the background
pollingService.triggerPoll().catch((err) => {
logger.error({ error: err, context }, 'Failed to trigger poll');
});
logger.info({ context }, 'Repo poll triggered');
}

22
src/routes/+layout.svelte

@ -2,7 +2,7 @@ @@ -2,7 +2,7 @@
import '../app.css';
import { onMount, onDestroy, setContext } from 'svelte';
import { page } from '$app/stores';
import { goto } from '$app/navigation';
import { goto, beforeNavigate } from '$app/navigation';
import Footer from '$lib/components/Footer.svelte';
import NavBar from '$lib/components/NavBar.svelte';
import TransferNotification from '$lib/components/TransferNotification.svelte';
@ -447,6 +447,26 @@ @@ -447,6 +447,26 @@
}
});
// Intercept navigation to .md files and redirect to /docs/ route
beforeNavigate((navigation) => {
if (!navigation.to || typeof window === 'undefined') return;
// NavigationTarget can be a URL or a route object, get the pathname
const toUrl = navigation.to instanceof URL ? navigation.to : new URL(navigation.to.url, 'http://localhost');
const path = toUrl.pathname;
// Check if path ends with .md and doesn't already start with /docs/
if (path.endsWith('.md') && !path.startsWith('/docs/')) {
// Extract filename without .md extension
const filename = path.replace(/\.md$/, '').replace(/^\//, '');
// Security: Only allow alphanumeric, hyphens, underscores
if (/^[a-zA-Z0-9_-]+$/.test(filename)) {
navigation.cancel();
goto(`/docs/${filename}`, { replaceState: true });
}
}
});
</script>
{#if !isSplashPage}

329
src/routes/api/code-search/+server.ts

@ -1,329 +0,0 @@ @@ -1,329 +0,0 @@
/**
* API endpoint for global code search across all repositories
* Searches file contents across multiple repositories
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND } from '$lib/types/nostr.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache } from '$lib/utils/nostr-utils.js';
import logger from '$lib/services/logger.js';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
import { simpleGit } from 'simple-git';
import { fileManager } from '$lib/services/service-registry.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export interface GlobalCodeSearchResult {
repo: string;
npub: string;
file: string;
line: number;
content: string;
branch: string;
}
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const repoFilter = event.url.searchParams.get('repo'); // Optional: filter by specific repo (npub/repo format)
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10);
if (!query || query.trim().length < 2) {
throw handleValidationError('Query must be at least 2 characters', { operation: 'globalCodeSearch' });
}
const requestContext = extractRequestContext(event);
const results: GlobalCodeSearchResult[] = [];
try {
// If repo filter is specified, search only that repo
if (repoFilter) {
const [npub, repo] = repoFilter.split('/');
if (npub && repo) {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (existsSync(repoPath)) {
const repoResults = await searchInRepo(npub, repo, query, limit);
results.push(...repoResults);
}
}
return json(results);
}
// Search across all repositories
// First, get list of all repos from filesystem
if (!existsSync(repoRoot)) {
return json([]);
}
const users = await readdir(repoRoot);
for (const user of users) {
const userPath = join(repoRoot, user);
const userStat = await stat(userPath);
if (!userStat.isDirectory()) {
continue;
}
const repos = await readdir(userPath);
for (const repo of repos) {
if (!repo.endsWith('.git')) {
continue;
}
const repoName = repo.replace(/\.git$/, '');
const repoPath = join(userPath, repo);
const repoStat = await stat(repoPath);
if (!repoStat.isDirectory()) {
continue;
}
// Check access for private repos
try {
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
// Decode npub to hex
const { nip19 } = await import('nostr-tools');
let repoOwnerPubkey: string;
try {
const decoded = nip19.decode(user);
if (decoded.type === 'npub') {
repoOwnerPubkey = decoded.data as string;
} else {
repoOwnerPubkey = user; // Assume it's already hex
}
} catch {
repoOwnerPubkey = user; // Assume it's already hex
}
const canView = await maintainerService.canView(
requestContext.userPubkeyHex || null,
repoOwnerPubkey,
repoName
);
if (!canView) {
continue; // Skip private repos user can't access
}
} catch (accessErr) {
logger.debug({ error: accessErr, user, repo: repoName }, 'Error checking access, skipping repo');
continue;
}
// Search in this repo
try {
const repoResults = await searchInRepo(user, repoName, query, limit - results.length);
results.push(...repoResults);
if (results.length >= limit) {
break;
}
} catch (searchErr) {
logger.debug({ error: searchErr, user, repo: repoName }, 'Error searching repo, continuing');
continue;
}
}
if (results.length >= limit) {
break;
}
}
return json(results.slice(0, limit));
} catch (err) {
logger.error({ error: err, query }, 'Error performing global code search');
throw err;
}
};
async function searchInRepo(
npub: string,
repo: string,
query: string,
limit: number
): Promise<GlobalCodeSearchResult[]> {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (!existsSync(repoPath)) {
return [];
}
const results: GlobalCodeSearchResult[] = [];
const git = simpleGit(repoPath);
try {
// Get default branch
let branch = 'HEAD';
try {
const branches = await git.branchLocal();
branch = branches.current || 'HEAD';
// If no current branch, try common defaults
if (!branch || branch === 'HEAD') {
const allBranches = branches.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
branch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
branch = 'main';
}
// For bare repositories, we need to use a worktree or search the index
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
actualBranch = 'main';
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, npub, repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub, repo, branch }, 'Could not create worktree, trying git grep with tree reference');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
repo,
npub,
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep with tree reference
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with tree reference for bare repos
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
repo,
npub,
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
}
} catch (err) {
logger.debug({ error: err, npub, repo, query }, 'Error searching in repo');
return [];
}
return results;
}

2291
src/routes/api/openapi.json/openapi.json

File diff suppressed because it is too large Load Diff

503
src/routes/api/repos/[npub]/[repo]/+server.ts

@ -0,0 +1,503 @@ @@ -0,0 +1,503 @@
/**
* RESTful Repository Resource Endpoint
*
* GET /api/repos/{npub}/{repo} # Get repository info (settings, metadata, access, verification)
* PUT /api/repos/{npub}/{repo} # Update repository (replace)
* PATCH /api/repos/{npub}/{repo} # Partial update (settings, description, etc.)
* DELETE /api/repos/{npub}/{repo} # Delete repository
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient, maintainerService } from '$lib/services/service-registry.js';
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js';
import { KIND } from '$lib/types/nostr.js';
import { nip19 } from 'nostr-tools';
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import logger from '$lib/services/logger.js';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { verifyRepositoryOwnership } from '$lib/services/nostr/repo-verification.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
// Admin pubkeys (can be set via environment variable)
const ADMIN_PUBKEYS = (typeof process !== 'undefined' && process.env?.ADMIN_PUBKEYS
? process.env.ADMIN_PUBKEYS.split(',').map(p => p.trim()).filter(p => p.length > 0)
: []) as string[];
function isAdmin(userPubkeyHex: string | null): boolean {
if (!userPubkeyHex) return false;
return ADMIN_PUBKEYS.some(adminPubkey => {
try {
const decoded = nip19.decode(adminPubkey);
if (decoded.type === 'npub') {
return decoded.data === userPubkeyHex;
}
} catch {
// Not an npub, compare as hex
}
return adminPubkey.toLowerCase() === userPubkeyHex.toLowerCase();
});
}
function isOwner(userPubkeyHex: string | null, repoOwnerPubkey: string): boolean {
if (!userPubkeyHex) return false;
return userPubkeyHex.toLowerCase() === repoOwnerPubkey.toLowerCase();
}
/**
* GET: Get repository info
* Query params: ?include=settings,maintainers,access,verification
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const url = new URL(event.request.url);
const include = url.searchParams.get('include')?.split(',') || ['settings', 'access'];
// Fetch repository announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
const result: any = {
npub: context.npub,
repo: context.repo,
owner: context.npub
};
// Include settings
if (include.includes('settings') || include.includes('all')) {
if (announcement) {
result.description = announcement.tags.find(t => t[0] === 'description')?.[1] || '';
result.visibility = getVisibility(announcement);
result.projectRelays = getProjectRelays(announcement);
result.private = result.visibility === 'restricted' || result.visibility === 'private';
} else {
result.description = '';
result.visibility = 'public';
result.projectRelays = [];
result.private = false;
}
}
// Include maintainers
if (include.includes('maintainers') || include.includes('all')) {
const { maintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
result.maintainers = maintainers.map(p => nip19.npubEncode(p));
result.owner = nip19.npubEncode(owner);
if (context.userPubkeyHex) {
result.isMaintainer = maintainers.includes(context.userPubkeyHex);
result.isOwner = context.userPubkeyHex === owner;
}
}
// Include access
if (include.includes('access') || include.includes('all')) {
const { isPrivate, maintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
const canView = await maintainerService.canView(
context.userPubkeyHex || null,
context.repoOwnerPubkey,
context.repo
);
result.access = {
canView,
isPrivate,
isMaintainer: context.userPubkeyHex ? maintainers.includes(context.userPubkeyHex) : false,
isOwner: context.userPubkeyHex ? context.userPubkeyHex === owner : false
};
}
// Include verification
if (include.includes('verification') || include.includes('all')) {
// Simplified verification check - full verification is in /verification endpoint
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
result.verification = {
exists: existsSync(repoPath),
announcementFound: !!announcement
};
}
return json(result);
},
{ operation: 'getRepo', requireRepoExists: false, requireRepoAccess: false }
);
/**
* PUT: Replace repository (full update)
* PATCH: Partial update
*/
export const PUT: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
return updateRepository(context, event, true); // full update
},
{ operation: 'updateRepo', requireRepoExists: false }
);
export const PATCH: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
return updateRepository(context, event, false); // partial update
},
{ operation: 'updateRepo', requireRepoExists: false }
);
async function updateRepository(
context: RepoRequestContext,
event: RequestEvent,
isFullUpdate: boolean
) {
let body: {
description?: string;
visibility?: string;
projectRelays?: string[];
private?: boolean;
branchProtection?: any;
};
try {
body = await event.request.json();
} catch {
throw handleValidationError('Invalid JSON in request body', {
operation: 'updateRepo',
npub: context.npub,
repo: context.repo
});
}
// Fetch current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'updateRepo',
npub: context.npub,
repo: context.repo
});
}
// Get user's pubkey (required for signing)
const userPubkey = await getPublicKeyWithNIP07();
let userPubkeyHex: string;
if (typeof userPubkey === 'string' && userPubkey.length === 64) {
userPubkeyHex = userPubkey;
} else {
const decoded = nip19.decode(userPubkey) as { type: string; data: unknown };
if (decoded.type === 'npub' && typeof decoded.data === 'string') {
userPubkeyHex = decoded.data;
} else {
throw handleValidationError('Invalid user pubkey format', { operation: 'updateRepo', npub: context.npub, repo: context.repo });
}
}
// Verify user is maintainer
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can update repository');
}
// Build updated tags
const tags: string[][] = isFullUpdate ? [] : [...announcement.tags];
// Update description
if (body.description !== undefined || isFullUpdate) {
const descIndex = tags.findIndex(t => t[0] === 'description');
const descValue = body.description !== undefined ? body.description : (isFullUpdate ? '' : announcement.tags.find(t => t[0] === 'description')?.[1] || '');
if (descIndex >= 0) {
tags[descIndex] = ['description', descValue];
} else if (descValue) {
tags.push(['description', descValue]);
}
}
// Update visibility
let newVisibility: 'public' | 'unlisted' | 'restricted' | 'private' = getVisibility(announcement);
if (body.visibility !== undefined) {
const vis = body.visibility.toLowerCase();
if (['public', 'unlisted', 'restricted', 'private'].includes(vis)) {
newVisibility = vis as typeof newVisibility;
} else {
throw handleValidationError(`Invalid visibility: ${body.visibility}. Must be one of: public, unlisted, restricted, private`,
{ operation: 'updateRepo', npub: context.npub, repo: context.repo });
}
} else if (body.private !== undefined) {
newVisibility = body.private ? 'restricted' : 'public';
} else if (isFullUpdate) {
newVisibility = 'public';
}
// Update visibility tag
const visIndex = tags.findIndex(t => t[0] === 'visibility');
if (newVisibility === 'public') {
if (visIndex >= 0) {
tags.splice(visIndex, 1);
}
} else {
if (visIndex >= 0) {
tags[visIndex] = ['visibility', newVisibility];
} else {
tags.push(['visibility', newVisibility]);
}
}
// Update project-relay tags
if (body.projectRelays !== undefined || isFullUpdate) {
// Remove existing project-relay tags
const projectRelayIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'project-relay') {
projectRelayIndices.push(index);
}
});
for (let i = projectRelayIndices.length - 1; i >= 0; i--) {
tags.splice(projectRelayIndices[i], 1);
}
// Add new project-relay tags
const relays = body.projectRelays || (isFullUpdate ? [] : getProjectRelays(announcement));
for (const relay of relays) {
if (relay && (relay.startsWith('ws://') || relay.startsWith('wss://'))) {
tags.push(['project-relay', relay]);
}
}
}
// Validate: unlisted/restricted require project-relay
if ((newVisibility === 'unlisted' || newVisibility === 'restricted')) {
const hasProjectRelay = tags.some(t => t[0] === 'project-relay');
if (!hasProjectRelay) {
throw handleValidationError(
`Visibility '${newVisibility}' requires at least one project-relay. Please provide project-relays in the request.`,
{ operation: 'updateRepo', npub: context.npub, repo: context.repo }
);
}
}
// Preserve essential tags
if (!isFullUpdate) {
// Keep d-tag, name, clone tags, etc.
const essentialTags = ['d', 'name', 'clone'];
essentialTags.forEach(tagName => {
announcement.tags.forEach(tag => {
if (tag[0] === tagName && !tags.some(t => t[0] === tagName && t[1] === tag[1])) {
tags.push(tag);
}
});
});
} else {
// For full update, we need d-tag at minimum
const dTag = announcement.tags.find(t => t[0] === 'd');
if (dTag) {
tags.unshift(dTag);
}
}
// Remove old private tag if present
const privateIndex = tags.findIndex(t => (t[0] === 'private' && t[1] === 'true') || (t[0] === 't' && t[1] === 'private'));
if (privateIndex >= 0) {
tags.splice(privateIndex, 1);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign with NIP-07
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays for publishing
const allSearchRelays = Array.from(new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS]));
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
// Determine which relays to publish to based on visibility
const { getRelaysForEventPublishing } = await import('$lib/utils/repo-visibility.js');
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
// Publish to relays (if not private)
if (relaysToPublish.length > 0) {
const publishResult = await nostrClient.publishEvent(signedEvent, relaysToPublish);
if (publishResult.failed.length > 0 && publishResult.success.length === 0) {
logger.warn({ npub: context.npub, repo: context.repo }, 'Failed to publish update to all relays');
}
}
// Save to repository
const { AnnouncementManager } = await import('$lib/services/git/announcement-manager.js');
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo }, 'Failed to save update to repository');
}
// Return updated repository
return json({
npub: context.npub,
repo: context.repo,
owner: context.npub,
description: body.description !== undefined ? body.description : (announcement.tags.find(t => t[0] === 'description')?.[1] || ''),
visibility: newVisibility,
projectRelays: body.projectRelays !== undefined ? body.projectRelays : getProjectRelays(announcement),
private: newVisibility === 'restricted' || newVisibility === 'private'
});
}
/**
* DELETE: Delete repository
*/
export const DELETE: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const { npub, repo, repoOwnerPubkey, userPubkeyHex, clientIp } = context;
// Check permissions: must be owner or admin
if (!userPubkeyHex) {
auditLogger.log({
user: undefined,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Authentication required'
});
return handleAuthorizationError('Authentication required to delete repositories');
}
const userIsOwner = isOwner(userPubkeyHex, repoOwnerPubkey);
const userIsAdmin = isAdmin(userPubkeyHex);
if (!userIsOwner && !userIsAdmin) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Insufficient permissions'
});
return handleAuthorizationError('Only repository owners or admins can delete repositories');
}
// Get repository path
const repoPath = join(repoRoot, npub, `${repo}.git`);
// Security: Ensure resolved path is within repoRoot
const resolvedPath = resolve(repoPath).replace(/\\/g, '/');
const resolvedRoot = resolve(repoRoot).replace(/\\/g, '/');
if (!resolvedPath.startsWith(resolvedRoot + '/')) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'denied',
error: 'Invalid repository path'
});
return error(403, 'Invalid repository path');
}
// Check if repo exists
if (!existsSync(repoPath)) {
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'failure',
error: 'Repository not found'
});
return error(404, 'Repository not found');
}
try {
// Delete the repository directory
await rm(repoPath, { recursive: true, force: true });
// Clear cache
repoCache.delete(RepoCache.repoExistsKey(npub, repo));
// Log successful deletion
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'success',
metadata: {
isOwner: userIsOwner,
isAdmin: userIsAdmin
}
});
logger.info({
user: userPubkeyHex,
npub,
repo,
isOwner: userIsOwner,
isAdmin: userIsAdmin
}, 'Repository deleted');
return json({
success: true,
message: 'Repository deleted successfully'
});
} catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Unknown error';
auditLogger.log({
user: userPubkeyHex,
ip: clientIp,
action: 'repo.delete',
resource: `${npub}/${repo}`,
result: 'failure',
error: errorMessage
});
return handleApiError(err, { operation: 'deleteRepo', npub, repo }, 'Failed to delete repository');
}
},
{
operation: 'deleteRepo',
requireRepoExists: true,
requireRepoAccess: false,
requireMaintainer: false
}
);

22
src/routes/api/repos/[npub]/[repo]/download/+server.ts → src/routes/api/repos/[npub]/[repo]/archive/+server.ts

@ -1,9 +1,15 @@ @@ -1,9 +1,15 @@
/**
* API endpoint for downloading repository as ZIP or TAR.GZ
* Refactored for better error handling and reliability
* RESTful Archive Endpoint
*
* GET /api/repos/{npub}/{repo}/archive?format=zip|tar.gz&ref=...
*
* Query parameters:
* - format - Archive format: 'zip' or 'tar.gz' (default: 'zip')
* - ref - Branch, tag, or commit hash (default: 'HEAD')
*/
import { error } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
@ -55,7 +61,7 @@ async function createTempClone( @@ -55,7 +61,7 @@ async function createTempClone(
return null;
}
logger.info({ npub: context.npub, repo: context.repo }, 'Creating temporary clone for download');
logger.info({ npub: context.npub, repo: context.repo }, 'Creating temporary clone for archive');
// Setup temp clone directory
const tempDir = resolve(join(repoRoot, '..', 'temp-clones'));
@ -275,7 +281,7 @@ function createTarGzArchive(workDir: string, archivePath: string): Promise<void> @@ -275,7 +281,7 @@ function createTarGzArchive(workDir: string, archivePath: string): Promise<void>
}
/**
* Main download handler
* Main archive handler
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
@ -302,7 +308,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -302,7 +308,7 @@ export const GET: RequestHandler = createRepoGetHandler(
} else {
throw handleNotFoundError(
'Repository not found',
{ operation: 'download', npub: context.npub, repo: context.repo }
{ operation: 'archive', npub: context.npub, repo: context.repo }
);
}
}
@ -312,7 +318,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -312,7 +318,7 @@ export const GET: RequestHandler = createRepoGetHandler(
if (!existsSync(sourceRepoPath)) {
throw handleNotFoundError(
'Repository not found',
{ operation: 'download', npub: context.npub, repo: context.repo }
{ operation: 'archive', npub: context.npub, repo: context.repo }
);
}
@ -441,7 +447,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -441,7 +447,7 @@ export const GET: RequestHandler = createRepoGetHandler(
}
// Return archive
logger.info({ npub: context.npub, repo: context.repo, ref, format, size: archiveBuffer.length }, 'Download completed successfully');
logger.info({ npub: context.npub, repo: context.repo, ref, format, size: archiveBuffer.length }, 'Archive created successfully');
return new Response(archiveBuffer, {
headers: {
@ -490,5 +496,5 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -490,5 +496,5 @@ export const GET: RequestHandler = createRepoGetHandler(
throw error(500, `Failed to create archive: ${err instanceof Error ? err.message : String(err)}`);
}
},
{ operation: 'download', requireRepoExists: false, requireRepoAccess: true }
{ operation: 'archive', requireRepoExists: false, requireRepoAccess: true }
);

7
src/routes/api/repos/[npub]/[repo]/default-branch/+server.ts → src/routes/api/repos/[npub]/[repo]/branches/default/+server.ts

@ -1,8 +1,13 @@ @@ -1,8 +1,13 @@
/**
* API endpoint for getting the default branch of a repository
* RESTful Default Branch Endpoint
*
* GET /api/repos/{npub}/{repo}/branches/default
*
* Returns the default branch of the repository
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';

125
src/routes/api/repos/[npub]/[repo]/clone-urls/+server.ts

@ -0,0 +1,125 @@ @@ -0,0 +1,125 @@
/**
* RESTful Clone URLs Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/clone-urls # List clone URLs
* POST /api/repos/{npub}/{repo}/clone-urls # Check reachability (body: {urls: [...]})
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { getCloneUrlsReachability } from '$lib/services/git/clone-url-reachability.js';
import { extractCloneUrls } from '$lib/utils/nostr-utils.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_RELAYS, DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient } from '$lib/services/service-registry.js';
import logger from '$lib/services/logger.js';
/**
* GET: List clone URLs
* Query params:
* - includeReachability: boolean (optional) - Include reachability status
* - forceRefresh: boolean (optional) - Force refresh reachability cache
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const url = new URL(event.request.url);
const includeReachability = url.searchParams.get('includeReachability') === 'true';
const forceRefresh = url.searchParams.get('forceRefresh') === 'true';
// Fetch repository announcement (case-insensitive) with caching
let allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
let announcement = findRepoAnnouncement(allEvents, context.repo);
// If no events found in cache/default relays, try all relays
if (!announcement) {
const allRelays = [...new Set([...DEFAULT_NOSTR_RELAYS, ...DEFAULT_NOSTR_SEARCH_RELAYS])];
if (allRelays.length > DEFAULT_NOSTR_RELAYS.length) {
const allRelaysClient = new NostrClient(allRelays);
allEvents = await fetchRepoAnnouncementsWithCache(allRelaysClient, context.repoOwnerPubkey, eventCache);
announcement = findRepoAnnouncement(allEvents, context.repo);
}
}
if (!announcement) {
logger.warn({ npub: context.npub, repo: context.repo }, 'Repository announcement not found for clone URLs');
return error(404, 'Repository announcement not found');
}
// Extract clone URLs
const cloneUrls = extractCloneUrls(announcement, false);
if (!includeReachability) {
return json({
cloneUrls,
count: cloneUrls.length
});
}
// Extract relay URLs from relays tag (for proper GRASP server detection)
const relayUrls: string[] = [];
for (const tag of announcement.tags) {
if (tag[0] === 'relays') {
for (let i = 1; i < tag.length; i++) {
const relayUrl = tag[i];
if (relayUrl && typeof relayUrl === 'string' && (relayUrl.startsWith('ws://') || relayUrl.startsWith('wss://'))) {
relayUrls.push(relayUrl);
}
}
}
}
// Get reachability for all clone URLs
const reachabilityResults = await getCloneUrlsReachability(
cloneUrls,
5000,
forceRefresh,
relayUrls.length > 0 ? relayUrls : undefined
);
return json({
cloneUrls,
count: cloneUrls.length,
reachability: reachabilityResults
});
} catch (err) {
return handleApiError(err, { operation: 'getCloneUrls', npub: context.npub, repo: context.repo }, 'Failed to get clone URLs');
}
},
{ operation: 'getCloneUrls', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Check reachability of clone URLs
* Body: { urls: string[], forceRefresh?: boolean }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const body = await event.request.json();
const { urls, forceRefresh = false } = body;
if (!Array.isArray(urls) || urls.length === 0) {
return error(400, 'urls must be a non-empty array');
}
// Validate URLs are strings
if (!urls.every(url => typeof url === 'string')) {
return error(400, 'All URLs must be strings');
}
// Get reachability for specified URLs
const results = await getCloneUrlsReachability(urls, 5000, forceRefresh);
return json({ results });
} catch (err) {
return handleApiError(err, { operation: 'checkReachability', npub: context.npub, repo: context.repo }, 'Failed to check clone URL reachability');
}
},
{ operation: 'checkReachability', requireRepoExists: false, requireRepoAccess: false }
);

210
src/routes/api/repos/[npub]/[repo]/code-search/+server.ts

@ -1,210 +0,0 @@ @@ -1,210 +0,0 @@
/**
* API endpoint for code search within repositories
* Searches file contents across repositories
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { join } from 'path';
import { existsSync } from 'fs';
import logger from '$lib/services/logger.js';
import { simpleGit } from 'simple-git';
import { readFile } from 'fs/promises';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export interface CodeSearchResult {
file: string;
line: number;
content: string;
branch: string;
commit?: string;
}
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const query = event.url.searchParams.get('q');
const branch = event.url.searchParams.get('branch') || 'HEAD';
const limit = parseInt(event.url.searchParams.get('limit') || '100', 10);
if (!query || query.trim().length < 2) {
throw handleValidationError('Query must be at least 2 characters', { operation: 'codeSearch', npub: context.npub, repo: context.repo });
}
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// Check if repo exists
if (!existsSync(repoPath)) {
logger.debug({ npub: context.npub, repo: context.repo, query }, 'Code search requested for non-existent repo');
return json([]);
}
try {
const git = simpleGit(repoPath);
const results: CodeSearchResult[] = [];
// For bare repositories, we need to use a worktree or search the index
// First, try to get or create a worktree for the branch
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
actualBranch = branchInfo.current || 'main';
// If no current branch, try common defaults
if (!actualBranch || actualBranch === 'HEAD') {
const allBranches = branchInfo.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
actualBranch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
actualBranch = 'main';
}
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, context.npub, context.repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub: context.npub, repo: context.repo, branch }, 'Could not create worktree, trying git grep with --cached');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return json([]);
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return json([]);
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep --cached
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with --cached to search the index
// For bare repos, we can search a specific tree
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return json([]);
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return json([]);
}
throw grepError;
}
}
return json(results);
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo, query }, 'Error performing code search');
throw err;
}
},
{ operation: 'codeSearch', requireRepoExists: false, requireRepoAccess: true }
);

6
src/routes/api/repos/[npub]/[repo]/commits/[hash]/verify/+server.ts → src/routes/api/repos/[npub]/[repo]/commits/[hash]/verification/+server.ts

@ -1,5 +1,9 @@ @@ -1,5 +1,9 @@
/**
* API endpoint for verifying commit signatures
* RESTful Commit Verification Endpoint
*
* GET /api/repos/{npub}/{repo}/commits/{hash}/verification
*
* Verifies the signature of a commit
*/
import { json } from '@sveltejs/kit';

12
src/routes/api/repos/[npub]/[repo]/diff/+server.ts → src/routes/api/repos/[npub]/[repo]/diffs/+server.ts

@ -1,8 +1,16 @@ @@ -1,8 +1,16 @@
/**
* API endpoint for getting diffs
* RESTful Diffs Endpoint
*
* GET /api/repos/{npub}/{repo}/diffs?from=...&to=...&path=...
*
* Query parameters:
* - from - Source ref (required)
* - to - Target ref (default: HEAD)
* - path - Optional file path to diff
*/
import { json, error } from '@sveltejs/kit';
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';

573
src/routes/api/repos/[npub]/[repo]/file/+server.ts

@ -1,573 +0,0 @@ @@ -1,573 +0,0 @@
/**
* API endpoint for reading and writing files in a repository
*/
import { json, error } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { nip19 } from 'nostr-tools';
import { verifyNIP98Auth } from '$lib/services/nostr/nip98-auth.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import logger from '$lib/services/logger.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { handleApiError, handleValidationError, handleNotFoundError } from '$lib/utils/error-handler.js';
import { KIND } from '$lib/types/nostr.js';
import { join } from 'path';
import { existsSync } from 'fs';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
export const GET: RequestHandler = async (event) => {
const { params, url, request } = event;
const { npub, repo } = params;
const filePath = url.searchParams.get('path');
let ref = url.searchParams.get('ref') || 'HEAD';
// Extract user pubkey using the same method as other endpoints
const requestContext = extractRequestContext(event);
const userPubkey = requestContext.userPubkey;
const userPubkeyHex = requestContext.userPubkeyHex;
// Debug logging for file endpoint
logger.debug({
hasUserPubkey: !!userPubkey,
hasUserPubkeyHex: !!userPubkeyHex,
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
npub,
repo,
filePath
}, 'File endpoint - extracted user context');
if (!npub || !repo || !filePath) {
return error(400, 'Missing npub, repo, or path parameter');
}
try {
const repoPath = join(repoRoot, npub, `${repo}.git`);
// If repo doesn't exist, try to fetch it on-demand
if (!existsSync(repoPath)) {
try {
// Get repo owner pubkey
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Fetch repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
// Try API-based fetching first (no cloning)
try {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
const fileContent = await tryApiFetchFile(announcement, npub, repo, filePath, ref);
if (fileContent && fileContent.content) {
logger.debug({ npub, repo, filePath, ref }, 'Successfully fetched file via API fallback');
return json(fileContent);
}
} catch (apiErr) {
// Log the error but don't throw - we'll return a helpful error message below
logger.debug({ error: apiErr, npub, repo, filePath, ref }, 'API file fetch failed, will return 404');
}
// API fetch failed - repo is not cloned and API fetch didn't work
// Check if announcement has clone URLs to provide better error message
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const cloneUrls = extractCloneUrls(announcement);
const hasCloneUrls = cloneUrls.length > 0;
logger.debug({ npub, repo, filePath, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available');
return error(404, hasCloneUrls
? 'Repository is not cloned locally and could not fetch file via API. Privileged users can clone this repository using the "Clone to Server" button.'
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.');
} else {
return error(404, 'Repository announcement not found in Nostr');
}
} catch (err) {
logger.error({ error: err, npub, repo, filePath }, 'Error in on-demand file fetch');
// Check if repo was created by another concurrent request
if (existsSync(repoPath)) {
// Repo exists now, clear cache and continue with normal flow
repoCache.delete(RepoCache.repoExistsKey(npub, repo));
} else {
// If fetching fails, return 404
return error(404, 'Repository not found');
}
}
}
// Double-check repo exists (should be true if we got here)
if (!existsSync(repoPath)) {
return error(404, 'Repository not found');
}
// Get repo owner pubkey for access check (already validated above if we did on-demand fetch)
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// If ref is a branch name, validate it exists or use default branch
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
try {
const branches = await fileManager.getBranches(npub, repo);
if (!branches.includes(ref)) {
// Branch doesn't exist, try to get default branch
try {
ref = await fileManager.getDefaultBranch(npub, repo);
logger.debug({ npub, repo, originalRef: url.searchParams.get('ref'), newRef: ref }, 'Branch not found, using default branch');
} catch (defaultBranchErr) {
// If we can't get default branch, fall back to HEAD
logger.warn({ error: defaultBranchErr, npub, repo, ref }, 'Could not get default branch, falling back to HEAD');
ref = 'HEAD';
}
}
} catch (branchErr) {
// If we can't get branches, fall back to HEAD
logger.warn({ error: branchErr, npub, repo, ref }, 'Could not get branches, falling back to HEAD');
ref = 'HEAD';
}
}
// Check repository privacy (repoOwnerPubkey already declared above)
logger.debug({
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...',
repo
}, 'File endpoint - checking canView before access check');
const canView = await maintainerService.canView(userPubkeyHex || null, repoOwnerPubkey, repo);
logger.debug({
canView,
userPubkeyHex: userPubkeyHex ? userPubkeyHex.substring(0, 16) + '...' : null,
repoOwnerPubkey: repoOwnerPubkey.substring(0, 16) + '...',
repo
}, 'File endpoint - canView result');
if (!canView) {
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'denied',
'Insufficient permissions'
);
return error(403, 'This repository is private. Only owners and maintainers can view it.');
}
try {
// Log what we're trying to do
logger.debug({ npub, repo, filePath, ref }, 'Attempting to read file from cloned repository');
let fileContent;
try {
fileContent = await fileManager.getFileContent(npub, repo, filePath, ref);
} catch (firstErr) {
// If the first attempt fails and ref is not HEAD, try with HEAD as fallback
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
logger.warn({
error: firstErr,
npub,
repo,
filePath,
originalRef: ref
}, 'Failed to read file with specified ref, trying HEAD as fallback');
try {
fileContent = await fileManager.getFileContent(npub, repo, filePath, 'HEAD');
ref = 'HEAD'; // Update ref for logging
} catch (headErr) {
// If HEAD also fails, try API fallback before throwing
logger.debug({ error: headErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
// Use the original ref, or 'main' as fallback
const apiRef = url.searchParams.get('ref') || 'main';
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef);
if (apiFileContent && apiFileContent.content) {
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(apiFileContent);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file');
}
// If API fallback also fails, throw the original error
throw firstErr;
}
} else {
// Try API fallback before throwing
logger.debug({ error: firstErr, npub, repo, filePath }, 'Failed to read file from local repo, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
const apiRef = ref === 'HEAD' ? 'main' : ref;
const apiFileContent = await tryApiFetchFile(announcement, npub, repo, filePath, apiRef);
if (apiFileContent && apiFileContent.content) {
logger.info({ npub, repo, filePath, ref: apiRef }, 'Successfully fetched file via API fallback for empty repo');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(apiFileContent);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub, repo, filePath }, 'API fallback failed for file');
}
throw firstErr;
}
}
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'success'
);
return json(fileContent);
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
const errorLower = errorMessage.toLowerCase();
const errorStack = err instanceof Error ? err.stack : undefined;
logger.error({
error: err,
errorStack,
npub,
repo,
filePath,
ref,
repoExists: existsSync(repoPath),
errorMessage
}, 'Error reading file from cloned repository');
auditLogger.logFileOperation(
userPubkeyHex || null,
requestContext.clientIp,
'read',
`${npub}/${repo}`,
filePath,
'failure',
errorMessage
);
// If file not found or path doesn't exist, return 404 instead of 500
if (errorLower.includes('not found') ||
errorLower.includes('no such file') ||
errorLower.includes('does not exist') ||
errorLower.includes('fatal:') ||
errorMessage.includes('pathspec')) {
return error(404, `File not found: ${filePath} at ref ${ref}`);
}
// For other errors, return 500 with a more helpful message
return error(500, `Failed to read file: ${errorMessage}`);
}
} catch (err) {
// This catch block handles errors that occur outside the file reading try-catch
// (e.g., in branch validation, access checks, etc.)
// If it's already a Response (from error handlers), return it
if (err instanceof Response) {
return err;
}
// If it's a SvelteKit HttpError (from error() function), re-throw it
// SvelteKit errors have a status property and body property
if (err && typeof err === 'object' && 'status' in err && 'body' in err) {
throw err;
}
const errorMessage = err instanceof Error ? err.message : String(err);
const errorStack = err instanceof Error ? err.stack : undefined;
logger.error({
error: err,
errorStack,
npub,
repo,
filePath,
ref: url.searchParams.get('ref'),
errorMessage
}, 'Unexpected error in file endpoint (outside file reading block)');
// Check if it's a "not found" type error
const errorLower = errorMessage.toLowerCase();
if (errorLower.includes('not found') ||
errorLower.includes('repository not found')) {
return error(404, errorMessage);
}
return handleApiError(err, { operation: 'readFile', npub, repo, filePath }, 'Failed to read file');
}
};
export const POST: RequestHandler = async ({ params, url, request }: { params: { npub?: string; repo?: string }; url: URL; request: Request }) => {
const { npub, repo } = params;
if (!npub || !repo) {
return error(400, 'Missing npub or repo parameter');
}
let path: string | undefined;
try {
const body = await request.json();
path = body.path;
const { content, commitMessage, authorName, authorEmail, branch, action, userPubkey, useNIP07, nsecKey, commitSignatureEvent } = body;
// Check for NIP-98 authentication (for git operations)
const authHeader = request.headers.get('Authorization');
let nip98Event = null;
if (authHeader && authHeader.startsWith('Nostr ')) {
const requestUrl = `${request.headers.get('x-forwarded-proto') || (url.protocol === 'https:' ? 'https' : 'http')}://${request.headers.get('host') || url.host}${url.pathname}${url.search}`;
const authResult = verifyNIP98Auth(authHeader, requestUrl, request.method);
if (authResult.valid && authResult.event) {
nip98Event = authResult.event;
}
}
if (!path || !commitMessage) {
return error(400, 'Missing required fields: path, commitMessage');
}
// Fetch authorName and authorEmail from kind 0 event if not provided
let finalAuthorName = authorName;
let finalAuthorEmail = authorEmail;
if (!finalAuthorName || !finalAuthorEmail) {
if (!userPubkey) {
return error(400, 'Missing userPubkey. Cannot fetch author information without userPubkey.');
}
const userPubkeyHexForProfile = decodeNpubToHex(userPubkey) || userPubkey;
try {
if (!finalAuthorName) {
finalAuthorName = await fetchUserName(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS);
}
if (!finalAuthorEmail) {
finalAuthorEmail = await fetchUserEmail(userPubkeyHexForProfile, userPubkey, DEFAULT_NOSTR_RELAYS);
}
} catch (err) {
logger.warn({ error: err, userPubkey }, 'Failed to fetch user profile for author info, using fallbacks');
// Use fallbacks if fetch fails
if (!finalAuthorName) {
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile);
finalAuthorName = npub.substring(0, 20);
}
if (!finalAuthorEmail) {
const npub = userPubkey.startsWith('npub') ? userPubkey : nip19.npubEncode(userPubkeyHexForProfile);
finalAuthorEmail = `${npub.substring(0, 20)}@gitrepublic.web`;
}
}
}
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
// Check if repo exists locally
if (!fileManager.repoExists(npub, repo)) {
// Try to fetch announcement to see if repo exists in Nostr
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Fetch repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repo);
if (announcement) {
// Repository exists in Nostr but is not cloned locally
// For file editing, we need a local clone
return error(404, 'Repository is not cloned locally. To edit files, the repository must be cloned to the server first. Please use the "Clone to Server" button if you have unlimited access, or contact a server administrator.');
} else {
return error(404, 'Repository not found');
}
}
// Check if user is a maintainer
let repoOwnerPubkey: string;
try {
repoOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Convert userPubkey to hex if needed
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
const isMaintainer = await maintainerService.isMaintainer(userPubkeyHex, repoOwnerPubkey, repo);
if (!isMaintainer) {
return error(403, 'Only repository maintainers can edit files directly. Please submit a pull request instead.');
}
// Prepare signing options
// NOTE: nsecKey is intentionally NOT supported from client requests for security reasons.
// Clients should use NIP-07 (browser extension) or NIP-98 (HTTP auth) instead.
// nsecKey is only for server-side use via environment variables.
const signingOptions: {
useNIP07?: boolean;
nip98Event?: NostrEvent;
nsecKey?: string;
commitSignatureEvent?: NostrEvent;
} = {};
// If client sent a pre-signed commit signature event (from NIP-07), use it
if (commitSignatureEvent && commitSignatureEvent.sig && commitSignatureEvent.id) {
signingOptions.commitSignatureEvent = commitSignatureEvent;
} else if (nip98Event) {
signingOptions.nip98Event = nip98Event;
}
// Note: useNIP07 is no longer used since signing happens client-side
// Explicitly ignore nsecKey from client requests - it's a security risk
// Server-side signing is not recommended - commits should be signed by their authors
if (nsecKey) {
// Security: Log warning but never log the actual key value
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
logger.warn({ clientIp, npub, repo }, '[SECURITY] Client attempted to send nsecKey in request. This is not allowed for security reasons.');
auditLogger.log({
user: userPubkeyHex || undefined,
ip: clientIp,
action: 'auth_attempt',
resource: 'file_operation',
result: 'failure',
error: 'Client attempted to send private key in request body',
metadata: { reason: 'security_violation' }
});
}
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
if (action === 'delete') {
try {
// Get default branch if not provided
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo);
await fileManager.deleteFile(
npub,
repo,
path,
commitMessage,
finalAuthorName,
finalAuthorEmail,
targetBranch,
Object.keys(signingOptions).length > 0 ? signingOptions : undefined
);
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
'delete',
`${npub}/${repo}`,
path,
'success'
);
return json({ success: true, message: 'File deleted and committed' });
} catch (err) {
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
'delete',
`${npub}/${repo}`,
path,
'failure',
err instanceof Error ? err.message : String(err)
);
throw err;
}
} else if (action === 'create' || content !== undefined) {
if (content === undefined) {
return error(400, 'Content is required for create/update operations');
}
try {
// Get default branch if not provided
const targetBranch = branch || await fileManager.getDefaultBranch(npub, repo);
await fileManager.writeFile(
npub,
repo,
path,
content,
commitMessage,
finalAuthorName,
finalAuthorEmail,
targetBranch,
Object.keys(signingOptions).length > 0 ? signingOptions : undefined
);
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
action === 'create' ? 'create' : 'write',
`${npub}/${repo}`,
path,
'success'
);
return json({ success: true, message: 'File saved and committed' });
} catch (err) {
auditLogger.logFileOperation(
userPubkeyHex,
clientIp,
action === 'create' ? 'create' : 'write',
`${npub}/${repo}`,
path,
'failure',
err instanceof Error ? err.message : String(err)
);
throw err;
}
} else {
return error(400, 'Invalid action or missing content');
}
} catch (err) {
return handleApiError(err, { operation: 'writeFile', npub, repo, filePath: path }, 'Failed to write file');
}
};

1194
src/routes/api/repos/[npub]/[repo]/files/+server.ts

File diff suppressed because it is too large Load Diff

543
src/routes/api/repos/[npub]/[repo]/fork/+server.ts

@ -1,543 +0,0 @@ @@ -1,543 +0,0 @@
/**
* API endpoint for forking repositories
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { DEFAULT_NOSTR_RELAYS, combineRelays, getGitUrl } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND, type NostrEvent } from '$lib/types/nostr.js';
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js';
import { nip19 } from 'nostr-tools';
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js';
import { existsSync } from 'fs';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import simpleGit from 'simple-git';
import { isValidBranchName, validateRepoPath } from '$lib/utils/security.js';
import { ResourceLimits } from '$lib/services/security/resource-limits.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js';
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import logger from '$lib/services/logger.js';
import { handleApiError, handleValidationError, handleNotFoundError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { repoManager, nostrClient, forkCountService } from '$lib/services/service-registry.js';
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos';
const repoRoot = resolve(repoRootEnv);
const resourceLimits = new ResourceLimits(repoRoot);
/**
* Retry publishing an event with exponential backoff
* Attempts up to 3 times with delays: 1s, 2s, 4s
*/
async function publishEventWithRetry(
event: NostrEvent,
relays: string[],
eventName: string,
maxAttempts: number = 3,
context?: string
): Promise<{ success: string[]; failed: Array<{ relay: string; error: string }> }> {
let lastResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
// Extract context from event if available (for better logging)
const eventId = event.id.slice(0, 8);
const logContext = context || `[event:${eventId}]`;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
logger.info({ logContext, eventName, attempt, maxAttempts }, `[Fork] Publishing ${eventName} - Attempt ${attempt}/${maxAttempts}...`);
lastResult = await nostrClient.publishEvent(event, relays);
if (lastResult.success.length > 0) {
logger.info({ logContext, eventName, successCount: lastResult.success.length, relays: lastResult.success }, `[Fork] ${eventName} published successfully`);
if (lastResult.failed.length > 0) {
logger.warn({ logContext, eventName, failed: lastResult.failed }, `[Fork] Some relays failed`);
}
return lastResult;
}
if (attempt < maxAttempts) {
const delayMs = Math.pow(2, attempt - 1) * 1000; // 1s, 2s, 4s
logger.warn({ logContext, eventName, attempt, delayMs, failed: lastResult.failed }, `[Fork] ${eventName} failed on attempt ${attempt}. Retrying...`);
await new Promise(resolve => setTimeout(resolve, delayMs));
}
}
// All attempts failed
logger.error({ logContext, eventName, maxAttempts, failed: lastResult?.failed }, `[Fork] ${eventName} failed after ${maxAttempts} attempts`);
return lastResult!;
}
/**
* POST - Fork a repository
* Body: { userPubkey, forkName? }
*/
export const POST: RequestHandler = async ({ params, request }) => {
const { npub, repo } = params;
if (!npub || !repo) {
return error(400, 'Missing npub or repo parameter');
}
try {
const body = await request.json();
const { userPubkey, forkName, localOnly } = body;
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
// Validate localOnly parameter
const isLocalOnly = localOnly === true;
// Decode original repo owner npub
let originalOwnerPubkey: string;
try {
originalOwnerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Decode user pubkey if needed (must be done before using it)
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
// Convert to npub for resource check and path construction
const userNpub = nip19.npubEncode(userPubkeyHex);
// Determine fork name (use original name if not specified)
const forkRepoName = forkName || repo;
// Check if user has unlimited access (required for storing repos locally)
const userLevel = getCachedUserLevel(userPubkeyHex);
if (!hasUnlimitedAccess(userLevel?.level)) {
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${npub}/${repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
'User does not have unlimited access'
);
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.');
}
// Check resource limits before forking
const resourceCheck = await resourceLimits.canCreateRepo(userNpub);
if (!resourceCheck.allowed) {
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${npub}/${repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
resourceCheck.reason
);
return error(403, resourceCheck.reason || 'Resource limit exceeded');
}
// Check if original repo exists
const originalRepoPath = join(repoRoot, npub, `${repo}.git`);
// Security: Ensure resolved path is within repoRoot
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot);
if (!originalPathValidation.valid) {
return error(403, originalPathValidation.error || 'Invalid repository path');
}
if (!existsSync(originalRepoPath)) {
return error(404, 'Original repository not found');
}
// Get original repo announcement (case-insensitive) with caching
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache);
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, repo);
if (!originalAnnouncement) {
return error(404, 'Original repository announcement not found');
}
// Check if fork already exists
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`);
// Security: Ensure resolved path is within repoRoot
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot);
if (!forkPathValidation.valid) {
return error(403, forkPathValidation.error || 'Invalid fork repository path');
}
if (existsSync(forkRepoPath)) {
return error(409, 'Fork already exists');
}
// Clone the repository using simple-git (safer than shell commands)
const clientIp = request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${npub}/${repo}`,
`${userNpub}/${forkRepoName}`,
'success'
);
const git = simpleGit();
await git.clone(originalRepoPath, forkRepoPath, ['--bare']);
// Invalidate resource limit cache after creating repo
resourceLimits.invalidateCache(userNpub);
// Create fork announcement
const gitDomain = process.env.GIT_DOMAIN || 'localhost:6543';
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1');
const protocol = isLocalhost ? 'http' : 'https';
const forkGitUrl = `${protocol}://${gitDomain}/${userNpub}/${forkRepoName}.git`;
// Get Tor .onion URL if available
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js');
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName);
// Extract original clone URLs and earliest unique commit
const originalCloneUrls = originalAnnouncement.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string')
.filter(url => {
// Exclude our domain and .onion URLs (we'll add our own if available)
if (url.includes(gitDomain)) return false;
if (url.includes('.onion')) return false;
return true;
}) as string[];
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc');
const earliestCommit = earliestCommitTag?.[1];
// Get original repo name and description
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || repo;
const originalDescription = originalAnnouncement.tags.find(t => t[0] === 'description')?.[1] || '';
// Build clone URLs for fork - NEVER include localhost, only include public domain or Tor .onion
const forkCloneUrls: string[] = [];
// Add our domain URL only if it's NOT localhost (explicitly check the URL)
if (!isLocalhost && !forkGitUrl.includes('localhost') && !forkGitUrl.includes('127.0.0.1')) {
forkCloneUrls.push(forkGitUrl);
}
// Add Tor .onion URL if available
if (torOnionUrl) {
forkCloneUrls.push(torOnionUrl);
}
// Add original clone URLs
forkCloneUrls.push(...originalCloneUrls);
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) {
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.');
}
// Preserve visibility and project-relay from original repo
const originalVisibility = getVisibility(originalAnnouncement);
const originalProjectRelays = getProjectRelays(originalAnnouncement);
// Build fork announcement tags
// Use standardized fork tag: ['fork', '30617:pubkey:d-tag']
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${repo}`;
const tags: string[][] = [
['d', forkRepoName],
['name', `${originalName} (fork)`],
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`],
['clone', ...forkCloneUrls],
['relays', ...DEFAULT_NOSTR_RELAYS],
['fork', originalRepoTag], // Standardized fork tag format
['p', originalOwnerPubkey], // Original owner
];
// Local-only forks are always private and marked as synthetic
if (isLocalOnly) {
tags.push(['visibility', 'private']);
tags.push(['local-only', 'true']); // Mark as synthetic/local-only
} else {
// Preserve visibility from original repo (defaults to public if not set)
if (originalVisibility !== 'public') {
tags.push(['visibility', originalVisibility]);
}
}
// Preserve project-relay tags from original repo
for (const relay of originalProjectRelays) {
tags.push(['project-relay', relay]);
}
// Add earliest unique commit if available
if (earliestCommit) {
tags.push(['r', earliestCommit, 'euc']);
}
// Create fork announcement event
const forkAnnouncementTemplate = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: '',
tags
};
// Sign fork announcement
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate);
// Security: Truncate npub in logs and create context (must be before use)
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub;
const truncatedOriginalNpub = npub.length > 16 ? `${npub.slice(0, 12)}...` : npub;
const context = `[${truncatedOriginalNpub}/${repo}${truncatedNpub}/${forkRepoName}]`;
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let signedOwnershipEvent: NostrEvent | null = null;
if (isLocalOnly) {
// Local-only fork: Skip publishing to Nostr relays
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)');
publishResult = { success: [], failed: [] };
ownershipPublishResult = { success: [], failed: [] };
// For local-only forks, create a synthetic ownership event (not published)
const ownershipService = new OwnershipTransferService([]);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Created synthetic ownership event for local-only fork');
} else {
// Public fork: Publish to Nostr relays
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient);
const combinedRelays = combineRelays(outbox);
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length, relays: combinedRelays }, 'Starting fork process');
publishResult = await publishEventWithRetry(
signedForkAnnouncement,
combinedRelays,
'fork announcement',
3,
context
);
if (publishResult.success.length === 0) {
// Clean up repo if announcement failed
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`;
return json({
success: false,
error: 'Failed to publish fork announcement to relays after 3 attempts',
details: errorDetails,
eventName: 'fork announcement'
}, { status: 500 });
}
// Create and publish initial ownership proof (self-transfer event)
// This MUST succeed for the fork to be valid - without it, there's no proof of ownership on Nostr
const ownershipService = new OwnershipTransferService(combinedRelays);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
ownershipPublishResult = await publishEventWithRetry(
signedOwnershipEvent,
combinedRelays,
'ownership transfer event',
3,
context
);
if (ownershipPublishResult.success.length === 0) {
// Clean up repo if ownership proof failed
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository and publishing deletion request.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
// Publish deletion request (NIP-09) for the announcement since it's invalid without ownership proof
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...');
const deletionRequest = {
kind: KIND.DELETION_REQUEST, // NIP-09: Event Deletion Request
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.',
tags: [
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`], // Reference to the repo announcement
['k', KIND.REPO_ANNOUNCEMENT.toString()] // Kind of event being deleted
]
};
const signedDeletionRequest = await signEventWithNIP07(deletionRequest);
const deletionResult = await publishEventWithRetry(
signedDeletionRequest,
combinedRelays,
'deletion request',
3,
context
);
if (deletionResult.success.length > 0) {
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Deletion request published successfully');
} else {
logger.error({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: deletionResult.failed }, 'Failed to publish deletion request');
}
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`;
return json({
success: false,
error: 'Failed to publish ownership transfer event to relays after 3 attempts',
details: errorDetails,
eventName: 'ownership transfer event'
}, { status: 500 });
}
}
// Provision the fork repo (this will create verification file and include self-transfer)
logger.info({ operation: 'fork', originalRepo: `${npub}/${repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...');
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false);
// Save fork announcement to repo (offline papertrail) in nostr/repo-events.jsonl
try {
const { fileManager } = await import('$lib/services/service-registry.js');
// Save to repo if it exists locally (should exist after provisioning)
if (fileManager.repoExists(userNpub, forkRepoName)) {
// Get worktree to save to repo-events.jsonl
const defaultBranch = await fileManager.getDefaultBranch(userNpub, forkRepoName).catch(() => 'main');
const repoPath = fileManager.getRepoPath(userNpub, forkRepoName);
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, userNpub, forkRepoName);
// Save to repo-events.jsonl
await fileManager.saveRepoEventToWorktree(workDir, signedForkAnnouncement as NostrEvent, 'announcement').catch(err => {
logger.debug({ error: err }, 'Failed to save fork announcement to repo-events.jsonl');
});
// Stage and commit the file
const workGit = simpleGit(workDir);
await workGit.add(['nostr/repo-events.jsonl']);
await workGit.commit(
`Add fork repository announcement: ${signedForkAnnouncement.id.slice(0, 16)}...`,
['nostr/repo-events.jsonl'],
{
'--author': `Nostr <${userPubkeyHex}@nostr>`
}
);
// Clean up worktree
await fileManager.removeWorktree(repoPath, workDir).catch(err => {
logger.debug({ error: err }, 'Failed to remove worktree after saving fork announcement');
});
}
} catch (err) {
// Log but don't fail - publishing to relays is more important
logger.warn({ error: err, npub: userNpub, repo: forkRepoName }, 'Failed to save fork announcement to repo');
}
logger.info({
operation: 'fork',
originalRepo: `${npub}/${repo}`,
forkRepo: `${userNpub}/${forkRepoName}`,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
announcementRelays: publishResult?.success.length || 0,
ownershipRelays: ownershipPublishResult?.success.length || 0
}, 'Fork completed successfully');
const message = isLocalOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`;
return json({
success: true,
fork: {
npub: userNpub,
repo: forkRepoName,
url: forkGitUrl,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
publishedTo: isLocalOnly ? null : {
announcement: publishResult?.success.length || 0,
ownershipTransfer: ownershipPublishResult?.success.length || 0
}
},
message
});
} catch (err) {
return handleApiError(err, { operation: 'fork', npub, repo }, 'Failed to fork repository');
}
};
/**
* GET - Get fork information
* Returns whether this is a fork and what it's forked from
*/
export const GET: RequestHandler = async ({ params }) => {
const { npub, repo } = params;
if (!npub || !repo) {
return error(400, 'Missing npub or repo parameter');
}
try {
// Decode repo owner npub
let ownerPubkey: string;
try {
ownerPubkey = requireNpubHex(npub);
} catch {
return error(400, 'Invalid npub format');
}
// Get repo announcement (case-insensitive) with caching
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, ownerPubkey, eventCache);
const announcement = findRepoAnnouncement(allAnnouncements, repo);
if (!announcement) {
return error(404, 'Repository announcement not found');
}
// announcement is already set above
const isFork = announcement.tags.some(t => t[0] === 't' && t[1] === 'fork');
// Get original repo reference
const originalRepoTag = announcement.tags.find(t => t[0] === 'a' && t[1]?.startsWith(`${KIND.REPO_ANNOUNCEMENT}:`));
const originalOwnerTag = announcement.tags.find(t => t[0] === 'p' && t[1] !== ownerPubkey);
let originalRepo: { npub: string; repo: string } | null = null;
if (originalRepoTag && originalRepoTag[1]) {
const match = originalRepoTag[1].match(new RegExp(`^${KIND.REPO_ANNOUNCEMENT}:([a-f0-9]{64}):(.+)$`));
if (match) {
const [, originalOwnerPubkey, originalRepoName] = match;
try {
const originalNpub = nip19.npubEncode(originalOwnerPubkey);
originalRepo = { npub: originalNpub, repo: originalRepoName };
} catch {
// Invalid pubkey
}
}
}
// Get fork count for this repo
let forkCount = 0;
if (!isFork && ownerPubkey && repo) {
try {
forkCount = await forkCountService.getForkCount(ownerPubkey, repo);
} catch (err) {
// Log but don't fail the request
const context = npub && repo ? `[${npub}/${repo}]` : '[unknown]';
logger.warn({ error: err, npub, repo }, `[Fork] ${context} Failed to get fork count`);
}
}
return json({
isFork,
originalRepo,
forkCount
});
} catch (err) {
return handleApiError(err, { operation: 'getForkInfo', npub, repo }, 'Failed to get fork information');
}
};

457
src/routes/api/repos/[npub]/[repo]/forks/+server.ts

@ -0,0 +1,457 @@ @@ -0,0 +1,457 @@
/**
* RESTful Forks Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/forks # List forks / Get fork info
* POST /api/repos/{npub}/{repo}/forks # Create fork (fork this repo)
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { KIND, type NostrEvent } from '$lib/types/nostr.js';
import { getVisibility, getProjectRelays } from '$lib/utils/repo-visibility.js';
import { nip19 } from 'nostr-tools';
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js';
import { existsSync } from 'fs';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import simpleGit from 'simple-git';
import { validateRepoPath } from '$lib/utils/security.js';
import { ResourceLimits } from '$lib/services/security/resource-limits.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js';
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import logger from '$lib/services/logger.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { repoManager, nostrClient, forkCountService } from '$lib/services/service-registry.js';
// Resolve GIT_REPO_ROOT to absolute path
const repoRootEnv = process.env.GIT_REPO_ROOT || '/repos';
const repoRoot = resolve(repoRootEnv);
const resourceLimits = new ResourceLimits(repoRoot);
/**
* Retry publishing an event with exponential backoff
*/
async function publishEventWithRetry(
event: NostrEvent,
relays: string[],
eventName: string,
maxAttempts: number = 3,
context?: string
): Promise<{ success: string[]; failed: Array<{ relay: string; error: string }> }> {
let lastResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
const eventId = event.id.slice(0, 8);
const logContext = context || `[event:${eventId}]`;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
logger.info({ logContext, eventName, attempt, maxAttempts }, `[Fork] Publishing ${eventName} - Attempt ${attempt}/${maxAttempts}...`);
lastResult = await nostrClient.publishEvent(event, relays);
if (lastResult.success.length > 0) {
logger.info({ logContext, eventName, successCount: lastResult.success.length }, `[Fork] ${eventName} published successfully`);
return lastResult;
}
if (attempt < maxAttempts) {
const delayMs = Math.pow(2, attempt - 1) * 1000;
logger.warn({ logContext, eventName, attempt, delayMs }, `[Fork] ${eventName} failed on attempt ${attempt}. Retrying...`);
await new Promise(resolve => setTimeout(resolve, delayMs));
}
}
logger.error({ logContext, eventName, maxAttempts }, `[Fork] ${eventName} failed after ${maxAttempts} attempts`);
return lastResult!;
}
/**
* GET: Get fork information
* Returns whether this repo is a fork and original repo info
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
try {
// Get repo announcement (case-insensitive) with caching
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allAnnouncements, context.repo);
if (!announcement) {
return error(404, 'Repository announcement not found');
}
// Check if this is a fork
const isFork = announcement.tags.some(t => t[0] === 'fork');
// Get original repo reference
const originalRepoTag = announcement.tags.find(t => t[0] === 'fork');
let originalRepo: { npub: string; repo: string } | null = null;
if (originalRepoTag && originalRepoTag[1]) {
const match = originalRepoTag[1].match(new RegExp(`^${KIND.REPO_ANNOUNCEMENT}:([a-f0-9]{64}):(.+)$`));
if (match) {
const [, originalOwnerPubkey, originalRepoName] = match;
try {
const originalNpub = nip19.npubEncode(originalOwnerPubkey);
originalRepo = { npub: originalNpub, repo: originalRepoName };
} catch {
// Invalid pubkey
}
}
}
// Get fork count for this repo (if not a fork itself)
let forkCount = 0;
if (!isFork && context.repoOwnerPubkey && context.repo) {
try {
forkCount = await forkCountService.getForkCount(context.repoOwnerPubkey, context.repo);
} catch (err) {
logger.warn({ error: err, npub: context.npub, repo: context.repo }, 'Failed to get fork count');
}
}
return json({
isFork,
originalRepo,
forkCount
});
} catch (err) {
return handleApiError(err, { operation: 'getForkInfo', npub: context.npub, repo: context.repo }, 'Failed to get fork information');
}
},
{ operation: 'getForkInfo', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Create fork
* Body: { userPubkey, forkName?, localOnly? }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const body = await event.request.json();
const { userPubkey, forkName, localOnly } = body;
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
}
const isLocalOnly = localOnly === true;
const originalOwnerPubkey = context.repoOwnerPubkey;
// Decode user pubkey
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
const userNpub = nip19.npubEncode(userPubkeyHex);
// Determine fork name
const forkRepoName = forkName || context.repo;
// Check if user has unlimited access
const userLevel = getCachedUserLevel(userPubkeyHex);
if (!hasUnlimitedAccess(userLevel?.level)) {
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
'User does not have unlimited access'
);
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.');
}
// Check resource limits
const resourceCheck = await resourceLimits.canCreateRepo(userNpub);
if (!resourceCheck.allowed) {
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
resourceCheck.reason
);
return error(403, resourceCheck.reason || 'Resource limit exceeded');
}
// Check if original repo exists
const originalRepoPath = join(repoRoot, context.npub, `${context.repo}.git`);
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot);
if (!originalPathValidation.valid) {
return error(403, originalPathValidation.error || 'Invalid repository path');
}
if (!existsSync(originalRepoPath)) {
return error(404, 'Original repository not found');
}
// Get original repo announcement
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache);
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, context.repo);
if (!originalAnnouncement) {
return error(404, 'Original repository announcement not found');
}
// Check if fork already exists
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`);
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot);
if (!forkPathValidation.valid) {
return error(403, forkPathValidation.error || 'Invalid fork repository path');
}
if (existsSync(forkRepoPath)) {
return error(409, 'Fork already exists');
}
// Clone the repository
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'success'
);
const git = simpleGit();
await git.clone(originalRepoPath, forkRepoPath, ['--bare']);
// Invalidate resource limit cache
resourceLimits.invalidateCache(userNpub);
// Create fork announcement
const gitDomain = process.env.GIT_DOMAIN || 'localhost:6543';
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1');
const protocol = isLocalhost ? 'http' : 'https';
const forkGitUrl = `${protocol}://${gitDomain}/${userNpub}/${forkRepoName}.git`;
// Get Tor .onion URL if available
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js');
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName);
// Extract original clone URLs
const originalCloneUrls = originalAnnouncement.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string')
.filter(url => {
if (url.includes(gitDomain)) return false;
if (url.includes('.onion')) return false;
return true;
}) as string[];
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc');
const earliestCommit = earliestCommitTag?.[1];
// Get original repo name and description
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || context.repo;
const originalDescription = originalAnnouncement.tags.find(t => t[0] === 'description')?.[1] || '';
// Build clone URLs for fork
const forkCloneUrls: string[] = [];
if (!isLocalhost && !forkGitUrl.includes('localhost') && !forkGitUrl.includes('127.0.0.1')) {
forkCloneUrls.push(forkGitUrl);
}
if (torOnionUrl) {
forkCloneUrls.push(torOnionUrl);
}
forkCloneUrls.push(...originalCloneUrls);
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) {
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.');
}
// Preserve visibility and project-relay from original repo
const originalVisibility = getVisibility(originalAnnouncement);
const originalProjectRelays = getProjectRelays(originalAnnouncement);
// Build fork announcement tags
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${context.repo}`;
const tags: string[][] = [
['d', forkRepoName],
['name', `${originalName} (fork)`],
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`],
['clone', ...forkCloneUrls],
['relays', ...DEFAULT_NOSTR_RELAYS],
['fork', originalRepoTag],
['p', originalOwnerPubkey],
];
// Local-only forks are always private
if (isLocalOnly) {
tags.push(['visibility', 'private']);
tags.push(['local-only', 'true']);
} else {
if (originalVisibility !== 'public') {
tags.push(['visibility', originalVisibility]);
}
}
// Preserve project-relay tags
for (const relay of originalProjectRelays) {
tags.push(['project-relay', relay]);
}
// Add earliest unique commit if available
if (earliestCommit) {
tags.push(['r', earliestCommit, 'euc']);
}
// Create fork announcement event
const forkAnnouncementTemplate = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: '',
tags
};
// Sign fork announcement
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate);
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub;
const truncatedOriginalNpub = context.npub.length > 16 ? `${context.npub.slice(0, 12)}...` : context.npub;
const logContext = `[${truncatedOriginalNpub}/${context.repo}${truncatedNpub}/${forkRepoName}]`;
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let signedOwnershipEvent: NostrEvent | null = null;
if (isLocalOnly) {
// Local-only fork: Skip publishing to Nostr relays
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)');
publishResult = { success: [], failed: [] };
ownershipPublishResult = { success: [], failed: [] };
// Create synthetic ownership event
const ownershipService = new OwnershipTransferService([]);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
} else {
// Public fork: Publish to Nostr relays
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient);
const combinedRelays = combineRelays(outbox);
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length }, 'Starting fork process');
publishResult = await publishEventWithRetry(
signedForkAnnouncement,
combinedRelays,
'fork announcement',
3,
logContext
);
if (publishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`;
return json({
success: false,
error: 'Failed to publish fork announcement to relays after 3 attempts',
details: errorDetails,
eventName: 'fork announcement'
}, { status: 500 });
}
// Create and publish initial ownership proof
const ownershipService = new OwnershipTransferService(combinedRelays);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
ownershipPublishResult = await publishEventWithRetry(
signedOwnershipEvent,
combinedRelays,
'ownership transfer event',
3,
logContext
);
if (ownershipPublishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
// Publish deletion request (NIP-09)
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...');
const deletionRequest = {
kind: KIND.DELETION_REQUEST,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.',
tags: [
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`],
['k', KIND.REPO_ANNOUNCEMENT.toString()]
]
};
const signedDeletionRequest = await signEventWithNIP07(deletionRequest);
const deletionResult = await publishEventWithRetry(
signedDeletionRequest,
combinedRelays,
'deletion request',
3,
logContext
);
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`;
return json({
success: false,
error: 'Failed to publish ownership transfer event to relays after 3 attempts',
details: errorDetails,
eventName: 'ownership transfer event'
}, { status: 500 });
}
}
// Provision the fork repo
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...');
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false);
logger.info({
operation: 'fork',
originalRepo: `${context.npub}/${context.repo}`,
forkRepo: `${userNpub}/${forkRepoName}`,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
announcementRelays: publishResult?.success.length || 0,
ownershipRelays: ownershipPublishResult?.success.length || 0
}, 'Fork completed successfully');
const message = isLocalOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`;
return json({
success: true,
fork: {
npub: userNpub,
repo: forkRepoName,
url: forkGitUrl,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
publishedTo: isLocalOnly ? null : {
announcement: publishResult?.success.length || 0,
ownershipTransfer: ownershipPublishResult?.success.length || 0
}
},
message
});
} catch (err) {
return handleApiError(err, { operation: 'createFork', npub: context.npub, repo: context.repo }, 'Failed to fork repository');
}
},
{ operation: 'createFork', requireRepoExists: false }
);

371
src/routes/api/repos/[npub]/[repo]/maintainers/+server.ts

@ -1,31 +1,380 @@ @@ -1,31 +1,380 @@
/**
* API endpoint for checking maintainer status
* RESTful Maintainers Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/maintainers # List maintainers
* POST /api/repos/{npub}/{repo}/maintainers # Add maintainer
* DELETE /api/repos/{npub}/{repo}/maintainers/{npub} # Remove maintainer
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { maintainerService } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { nip19 } from 'nostr-tools';
import { getPublicKeyWithNIP07, signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { nostrClient } from '$lib/services/service-registry.js';
import { KIND } from '$lib/types/nostr.js';
import { DEFAULT_NOSTR_RELAYS, combineRelays } from '$lib/config.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import logger from '$lib/services/logger.js';
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js';
import { AnnouncementManager } from '$lib/services/git/announcement-manager.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
/**
* GET: List maintainers
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
const { maintainers, owner } = await maintainerService.getMaintainers(context.repoOwnerPubkey, context.repo);
// Convert hex pubkeys to npubs for response
const maintainerNpubs = maintainers.map(p => nip19.npubEncode(p));
const ownerNpub = nip19.npubEncode(owner);
// If userPubkey provided, check if they're a maintainer
// SECURITY: Do NOT leak userPubkey in response - only return boolean status
if (context.userPubkeyHex) {
const isMaintainer = maintainers.includes(context.userPubkeyHex);
return json({
maintainers,
owner,
maintainers: maintainerNpubs,
owner: ownerNpub,
isMaintainer
// SECURITY: Removed userPubkey leak - client already knows their own pubkey
});
}
return json({ maintainers, owner });
return json({
maintainers: maintainerNpubs,
owner: ownerNpub
});
},
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST: Add maintainer
* Body: { maintainer: "npub..." }
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
if (!context.userPubkeyHex) {
return error(401, 'Authentication required');
}
// Verify user is owner or maintainer
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can add maintainers');
}
const body = await event.request.json();
const { maintainer } = body;
if (!maintainer) {
throw handleValidationError('Missing maintainer in request body', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Decode maintainer npub to hex
let maintainerHex: string;
// Try as hex first (most common case)
if (/^[0-9a-f]{64}$/i.test(maintainer)) {
maintainerHex = maintainer.toLowerCase();
} else {
// Try decoding as npub
try {
const decoded = nip19.decode(maintainer) as { type: string; data: unknown };
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') {
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
maintainerHex = decoded.data;
} catch (err) {
if (err instanceof Error && err.message.includes('Invalid maintainer format')) {
throw err;
}
throw handleValidationError('Invalid maintainer format. Must be npub or hex pubkey', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
}
// Get current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'addMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Get current maintainers
const { maintainers: currentMaintainers } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
// Check if already a maintainer
if (currentMaintainers.includes(maintainerHex)) {
return json({
success: true,
message: 'Maintainer already exists',
maintainer: maintainer
});
}
// Build updated tags
const tags: string[][] = [...announcement.tags];
// Remove existing maintainers tags
const maintainerTagIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'maintainers') {
maintainerTagIndices.push(index);
}
});
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) {
tags.splice(maintainerTagIndices[i], 1);
}
// Add all maintainers (including new one)
const allMaintainers = [...currentMaintainers, maintainerHex];
if (allMaintainers.length > 0) {
tags.push(['maintainers', ...allMaintainers]);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: context.userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign and publish
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])];
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
if (relaysToPublish.length > 0) {
await nostrClient.publishEvent(signedEvent, relaysToPublish);
}
// Save to repository
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err }, 'Failed to save maintainer update to repository');
}
return json({
success: true,
maintainer: maintainer,
message: 'Maintainer added successfully'
});
},
{ operation: 'addMaintainer', requireRepoExists: false }
);
/**
* DELETE: Remove maintainer
* Path: /api/repos/{npub}/{repo}/maintainers/{maintainerNpub}
*/
export const DELETE: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
if (!context.userPubkeyHex) {
return error(401, 'Authentication required');
}
// Get maintainer npub from path
const url = new URL(event.request.url);
const pathParts = url.pathname.split('/');
const maintainerNpub = pathParts[pathParts.length - 1]; // Last part of path
if (!maintainerNpub || maintainerNpub === 'maintainers') {
throw handleValidationError('Missing maintainer npub in path', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Verify user is owner or maintainer
const isMaintainer = await maintainerService.isMaintainer(context.userPubkeyHex, context.repoOwnerPubkey, context.repo);
if (!isMaintainer) {
return error(403, 'Only maintainers can remove maintainers');
}
// Decode maintainer npub to hex
let maintainerHex: string;
try {
const decoded = nip19.decode(maintainerNpub) as { type: string; data: unknown };
if (decoded.type !== 'npub' || typeof decoded.data !== 'string') {
throw handleValidationError('Invalid maintainer npub format', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
maintainerHex = decoded.data;
} catch (err) {
if (err instanceof Error && err.message.includes('Invalid maintainer')) {
throw err;
}
throw handleValidationError('Invalid maintainer npub format', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Get current maintainers
const { maintainers: currentMaintainers, owner } = await maintainerService.getMaintainers(
context.repoOwnerPubkey,
context.repo
);
// Cannot remove owner
if (maintainerHex === owner) {
return error(403, 'Cannot remove repository owner from maintainers');
}
// Check if maintainer exists
if (!currentMaintainers.includes(maintainerHex)) {
return json({
success: true,
message: 'Maintainer not found (may have already been removed)',
maintainer: maintainerNpub
});
}
// Get current announcement
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (!announcement) {
throw handleValidationError('Repository announcement not found', {
operation: 'removeMaintainer',
npub: context.npub,
repo: context.repo
});
}
// Build updated tags
const tags: string[][] = [...announcement.tags];
// Remove existing maintainers tags
const maintainerTagIndices: number[] = [];
tags.forEach((tag, index) => {
if (tag[0] === 'maintainers') {
maintainerTagIndices.push(index);
}
});
for (let i = maintainerTagIndices.length - 1; i >= 0; i--) {
tags.splice(maintainerTagIndices[i], 1);
}
// Add all maintainers except the one being removed
const remainingMaintainers = currentMaintainers.filter(m => m !== maintainerHex);
if (remainingMaintainers.length > 0) {
tags.push(['maintainers', ...remainingMaintainers]);
}
// Create updated event
const updatedEvent = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: context.userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: announcement.content || '',
tags
};
// Sign and publish
const signedEvent = await signEventWithNIP07(updatedEvent);
// Get user's relays
const allSearchRelays = [...new Set([...DEFAULT_NOSTR_SEARCH_RELAYS, ...DEFAULT_NOSTR_RELAYS])];
const fullRelayClient = new NostrClient(allSearchRelays);
let userRelays: string[] = [];
try {
const { inbox, outbox } = await getUserRelays(context.userPubkeyHex, fullRelayClient);
if (outbox.length > 0) {
userRelays = combineRelays(outbox, DEFAULT_NOSTR_RELAYS);
} else if (inbox.length > 0) {
userRelays = combineRelays(inbox, DEFAULT_NOSTR_RELAYS);
} else {
userRelays = DEFAULT_NOSTR_RELAYS;
}
} catch (err) {
logger.warn({ error: err }, 'Failed to fetch user relays, using defaults');
userRelays = DEFAULT_NOSTR_RELAYS;
}
const visibilityRelays = getRelaysForEventPublishing(signedEvent);
const relaysToPublish = visibilityRelays.length > 0 ? combineRelays([...visibilityRelays, ...userRelays]) : [];
if (relaysToPublish.length > 0) {
await nostrClient.publishEvent(signedEvent, relaysToPublish);
}
// Save to repository
const repoPath = `${repoRoot}/${context.npub}/${context.repo}.git`;
const announcementManager = new AnnouncementManager(repoRoot);
try {
await announcementManager.ensureAnnouncementInRepo(repoPath, signedEvent);
} catch (err) {
logger.error({ error: err }, 'Failed to save maintainer update to repository');
}
return json({
success: true,
maintainer: maintainerNpub,
message: 'Maintainer removed successfully'
});
},
{ operation: 'getMaintainers', requireRepoExists: false, requireRepoAccess: false } // Maintainer list is public info, doesn't need repo to exist
{ operation: 'removeMaintainer', requireRepoExists: false }
);

23
src/routes/api/repos/[npub]/[repo]/patches/[patchId]/apply/+server.ts → src/routes/api/repos/[npub]/[repo]/patches/[id]/application/+server.ts

@ -1,9 +1,13 @@ @@ -1,9 +1,13 @@
/**
* API endpoint for applying patches
* Only maintainers and owners can apply patches
* RESTful Patch Application Endpoint
*
* POST /api/repos/{npub}/{repo}/patches/{id}/application
*
* Applies a patch to the repository. Only maintainers and owners can apply patches.
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
@ -19,7 +23,6 @@ import { writeFile, unlink } from 'fs/promises'; @@ -19,7 +23,6 @@ import { writeFile, unlink } from 'fs/promises';
import { tmpdir } from 'os';
import { join as pathJoin } from 'path';
import { spawn } from 'child_process';
import { promisify } from 'util';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
@ -27,12 +30,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -27,12 +30,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const { patchId } = event.params;
const id = (event.params as any).id;
const body = await event.request.json();
const { branch = 'main', commitMessage } = body;
if (!patchId) {
throw handleValidationError('Missing patchId', { operation: 'applyPatch', npub: repoContext.npub, repo: repoContext.repo });
if (!id) {
throw handleValidationError('Missing patch ID', { operation: 'applyPatch', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer or owner
@ -54,7 +57,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -54,7 +57,7 @@ export const POST: RequestHandler = withRepoValidation(
const patchEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PATCH],
ids: [patchId],
ids: [id],
limit: 1
}
]);
@ -71,7 +74,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -71,7 +74,7 @@ export const POST: RequestHandler = withRepoValidation(
}
// Create temporary patch file
const tmpPatchFile = pathJoin(tmpdir(), `patch-${patchId}-${Date.now()}.patch`);
const tmpPatchFile = pathJoin(tmpdir(), `patch-${id}-${Date.now()}.patch`);
await writeFile(tmpPatchFile, patchContent, 'utf-8');
try {
@ -132,7 +135,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -132,7 +135,7 @@ export const POST: RequestHandler = withRepoValidation(
await git.add('.');
// Commit the changes
const finalCommitMessage = commitMessage || `Apply patch ${patchId.substring(0, 8)}`;
const finalCommitMessage = commitMessage || `Apply patch ${id.substring(0, 8)}`;
await git.commit(finalCommitMessage);
// Get the commit hash
@ -152,7 +155,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -152,7 +155,7 @@ export const POST: RequestHandler = withRepoValidation(
}
}
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, patchId }, 'Error applying patch');
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, id }, 'Error applying patch');
throw err;
}
},

111
src/routes/api/repos/[npub]/[repo]/prs/merge/+server.ts

@ -1,111 +0,0 @@ @@ -1,111 +0,0 @@
/**
* API endpoint for merging Pull Requests
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { prsService, repoManager, fileManager, maintainerService } from '$lib/services/service-registry.js';
import { simpleGit } from 'simple-git';
import { join } from 'path';
import { existsSync } from 'fs';
import logger from '$lib/services/logger.js';
import { isValidBranchName } from '$lib/utils/security.js';
import { validatePubkey } from '$lib/utils/input-validation.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, prCommitId, targetBranch = 'main', mergeMessage } = body;
// Validate required fields
if (!prId || typeof prId !== 'string' || prId.length !== 64) {
throw handleValidationError('Invalid prId: must be a 64-character hex string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
if (!prAuthor || typeof prAuthor !== 'string') {
throw handleValidationError('Invalid prAuthor: must be a string', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate pubkey format
const pubkeyValidation = validatePubkey(prAuthor);
if (!pubkeyValidation.valid) {
throw handleValidationError(`Invalid prAuthor: ${pubkeyValidation.error}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
if (!prCommitId || typeof prCommitId !== 'string' || prCommitId.length !== 40) {
throw handleValidationError('Invalid prCommitId: must be a 40-character commit hash', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate branch name
if (!isValidBranchName(targetBranch)) {
throw handleValidationError(`Invalid branch name: ${targetBranch}`, { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Validate merge message if provided
if (mergeMessage && (typeof mergeMessage !== 'string' || mergeMessage.length > 10000)) {
throw handleValidationError('Invalid mergeMessage: must be a string with max 10000 characters', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can merge PRs'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
// Check if repo exists locally
const repoPath = join(repoRoot, repoContext.npub, `${repoContext.repo}.git`);
if (!existsSync(repoPath)) {
throw handleApiError(new Error('Repository not cloned locally. Please clone the repository first.'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
}
// Get user info for commit
const authorName = requestContext.userName || 'GitRepublic User';
const authorEmail = requestContext.userEmail || `${requestContext.userPubkeyHex?.slice(0, 20)}@gitrepublic.web`;
try {
const git = simpleGit(repoPath);
// Fetch latest changes
await git.fetch(['origin']).catch(() => {}); // Ignore errors if no remote
// Checkout target branch
await git.checkout(targetBranch);
// Merge the PR commit
const mergeMessageText = mergeMessage || `Merge pull request ${prId.slice(0, 7)}`;
await git.merge([prCommitId, '--no-ff', '-m', mergeMessageText]);
// Get the merge commit ID
const mergeCommitId = (await git.revparse(['HEAD'])).trim();
// Update PR status to merged
const statusEvent = await prsService.updatePRStatus(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
'merged',
mergeCommitId
);
return json({
success: true,
mergeCommitId,
statusEvent
});
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, prId, prCommitId }, 'Error merging PR');
throw handleApiError(err instanceof Error ? err : new Error('Failed to merge PR'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to merge pull request');
}
},
{ operation: 'mergePR', requireRepoAccess: true }
);

43
src/routes/api/repos/[npub]/[repo]/prs/update/+server.ts

@ -1,43 +0,0 @@ @@ -1,43 +0,0 @@
/**
* API endpoint for updating Pull Requests (kind 1619)
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { prsService } from '$lib/services/service-registry.js';
import { getGitUrl } from '$lib/config.js';
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, newCommitId, mergeBase } = body;
if (!prId || !prAuthor || !newCommitId) {
throw handleValidationError('Missing required fields: prId, prAuthor, newCommitId', { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo });
}
// Only PR author can update their PR
if (requestContext.userPubkeyHex !== prAuthor) {
throw handleApiError(new Error('Only the PR author can update the PR'), { operation: 'updatePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo);
const updateEvent = await prsService.updatePullRequest(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
newCommitId,
cloneUrl,
mergeBase
);
return json({ success: true, event: updateEvent });
},
{ operation: 'updatePR', requireRepoAccess: false }
);

56
src/routes/api/repos/[npub]/[repo]/prs/+server.ts → src/routes/api/repos/[npub]/[repo]/pull-requests/+server.ts

@ -1,5 +1,8 @@ @@ -1,5 +1,8 @@
/**
* API endpoint for Pull Requests (NIP-34 kind 1618)
* RESTful Pull Requests Collection Endpoint
*
* GET /api/repos/{npub}/{repo}/pull-requests # List pull requests
* POST /api/repos/{npub}/{repo}/pull-requests # Create pull request
*/
import { json } from '@sveltejs/kit';
@ -21,7 +24,7 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -21,7 +24,7 @@ export const GET: RequestHandler = createRepoGetHandler(
const prs = await prsService.getPullRequests(context.repoOwnerPubkey, context.repo);
return json(prs);
},
{ operation: 'getPRs', requireRepoExists: false, requireRepoAccess: false } // PRs are stored in Nostr, don't require local repo
{ operation: 'getPullRequests', requireRepoExists: false, requireRepoAccess: false } // PRs are stored in Nostr, don't require local repo
);
export const POST: RequestHandler = withRepoValidation(
@ -30,12 +33,12 @@ export const POST: RequestHandler = withRepoValidation( @@ -30,12 +33,12 @@ export const POST: RequestHandler = withRepoValidation(
const { event: prEvent } = body;
if (!prEvent) {
throw handleValidationError('Missing event in request body', { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo });
throw handleValidationError('Missing event in request body', { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Verify the event is properly signed
if (!prEvent.sig || !prEvent.id) {
throw handleValidationError('Invalid event: missing signature or ID', { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo });
throw handleValidationError('Invalid event: missing signature or ID', { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Get repository announcement to determine visibility and relay publishing
@ -51,7 +54,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -51,7 +54,7 @@ export const POST: RequestHandler = withRepoValidation(
: { success: [], failed: [] };
if (result.failed.length > 0 && result.success.length === 0) {
throw handleApiError(new Error('Failed to publish pull request to all relays'), { operation: 'createPR', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish pull request to all relays');
throw handleApiError(new Error('Failed to publish pull request to all relays'), { operation: 'createPullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish pull request to all relays');
}
// Forward to messaging platforms if user has unlimited access and preferences configured
@ -65,46 +68,5 @@ export const POST: RequestHandler = withRepoValidation( @@ -65,46 +68,5 @@ export const POST: RequestHandler = withRepoValidation(
return json({ success: true, event: prEvent, published: result });
},
{ operation: 'createPR', requireRepoAccess: false } // PRs can be created by anyone with access
);
export const PATCH: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const body = await event.request.json();
const { prId, prAuthor, status, mergeCommitId } = body;
if (!prId || !prAuthor || !status) {
throw handleValidationError('Missing required fields: prId, prAuthor, status', { operation: 'updatePRStatus', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can update PR status'), { operation: 'updatePRStatus', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
// Get repository announcement to determine visibility and relay publishing
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoContext.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repoContext.repo);
// Determine which relays to publish to based on visibility
const relaysToPublish = announcement ? getRelaysForEventPublishing(announcement) : DEFAULT_NOSTR_RELAYS;
// Update PR status with visibility-based relays
const statusEvent = await prsService.updatePRStatus(
prId,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
status,
mergeCommitId,
relaysToPublish
);
return json({ success: true, event: statusEvent });
},
{ operation: 'updatePRStatus', requireRepoAccess: false }
{ operation: 'createPullRequest', requireRepoAccess: false } // PRs can be created by anyone with access
);

136
src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/+server.ts

@ -0,0 +1,136 @@ @@ -0,0 +1,136 @@
/**
* RESTful Pull Request Individual Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/pull-requests/{id} # Get pull request
* PATCH /api/repos/{npub}/{repo}/pull-requests/{id} # Update pull request status
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { prsService, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler, withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError, handleApiError } from '$lib/utils/error-handler.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { getRelaysForEventPublishing } from '$lib/utils/repo-visibility.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { KIND } from '$lib/types/nostr.js';
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const id = (event.params as any).id;
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'getPullRequest', npub: context.npub, repo: context.repo });
}
try {
// Fetch the PR event
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Pull request not found');
}
return json(prEvents[0]);
} catch (err) {
throw handleApiError(err, { operation: 'getPullRequest', npub: context.npub, repo: context.repo }, 'Failed to get pull request');
}
},
{ operation: 'getPullRequest', requireRepoExists: false, requireRepoAccess: false }
);
export const PATCH: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const id = (event.params as any).id;
const body = await event.request.json();
const { status, mergeCommitId, newCommitId, mergeBase } = body;
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Fetch the PR to get the author
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
}
const prEvent = prEvents[0];
const prAuthor = prEvent.pubkey;
// If updating status, check if user is maintainer
if (status !== undefined) {
const { MaintainerService } = await import('$lib/services/nostr/maintainer-service.js');
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can update PR status'), { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
if (!status) {
throw handleValidationError('Missing required field: status', { operation: 'updatePullRequestStatus', npub: repoContext.npub, repo: repoContext.repo });
}
// Get repository announcement to determine visibility and relay publishing
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, repoContext.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, repoContext.repo);
// Determine which relays to publish to based on visibility
const relaysToPublish = announcement ? getRelaysForEventPublishing(announcement) : DEFAULT_NOSTR_RELAYS;
// Update PR status with visibility-based relays
const statusEvent = await prsService.updatePRStatus(
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
status,
mergeCommitId,
relaysToPublish
);
return json({ success: true, event: statusEvent });
}
// If updating commit, only PR author can update
if (newCommitId !== undefined) {
if (requestContext.userPubkeyHex !== prAuthor) {
throw handleApiError(new Error('Only the PR author can update the PR commit'), { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const { getGitUrl } = await import('$lib/config.js');
const cloneUrl = getGitUrl(repoContext.npub, repoContext.repo);
const updateEvent = await prsService.updatePullRequest(
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
newCommitId,
cloneUrl,
mergeBase
);
return json({ success: true, event: updateEvent });
}
throw handleValidationError('Missing required field: status or newCommitId', { operation: 'updatePullRequest', npub: repoContext.npub, repo: repoContext.repo });
},
{ operation: 'updatePullRequest', requireRepoAccess: false }
);

38
src/routes/api/repos/[npub]/[repo]/prs/[prId]/merge/+server.ts → src/routes/api/repos/[npub]/[repo]/pull-requests/[id]/merge/+server.ts

@ -1,9 +1,13 @@ @@ -1,9 +1,13 @@
/**
* API endpoint for merging pull requests
* Only maintainers and owners can merge PRs
* RESTful Pull Request Merge Endpoint
*
* POST /api/repos/{npub}/{repo}/pull-requests/{id}/merge
*
* Merges a pull request. Only maintainers and owners can merge PRs.
*/
import { json } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { fileManager, nostrClient, prsService } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
@ -23,12 +27,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -23,12 +27,12 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
const { prId } = event.params;
const id = (event.params as any).id;
const body = await event.request.json();
const { targetBranch = 'main', mergeCommitMessage, mergeStrategy = 'merge' } = body;
if (!prId) {
throw handleValidationError('Missing prId', { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo });
if (!id) {
throw handleValidationError('Missing pull request ID', { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo });
}
// Check if user is maintainer or owner
@ -36,13 +40,13 @@ export const POST: RequestHandler = withRepoValidation( @@ -36,13 +40,13 @@ export const POST: RequestHandler = withRepoValidation(
const isMaintainer = await maintainerService.isMaintainer(requestContext.userPubkeyHex || '', repoContext.repoOwnerPubkey, repoContext.repo);
if (!isMaintainer && requestContext.userPubkeyHex !== repoContext.repoOwnerPubkey) {
throw handleApiError(new Error('Only repository owners and maintainers can merge pull requests'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
throw handleApiError(new Error('Only repository owners and maintainers can merge pull requests'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Unauthorized');
}
const repoPath = join(repoRoot, repoContext.npub, `${repoContext.repo}.git`);
if (!existsSync(repoPath)) {
throw handleApiError(new Error('Repository not found locally'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
throw handleApiError(new Error('Repository not found locally'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Repository not found');
}
try {
@ -50,13 +54,13 @@ export const POST: RequestHandler = withRepoValidation( @@ -50,13 +54,13 @@ export const POST: RequestHandler = withRepoValidation(
const prEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.PULL_REQUEST],
ids: [prId],
ids: [id],
limit: 1
}
]);
if (prEvents.length === 0) {
throw handleApiError(new Error('Pull request not found'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
throw handleApiError(new Error('Pull request not found'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Pull request not found');
}
const prEvent = prEvents[0];
@ -64,14 +68,14 @@ export const POST: RequestHandler = withRepoValidation( @@ -64,14 +68,14 @@ export const POST: RequestHandler = withRepoValidation(
// Get commit ID from PR
const commitTag = prEvent.tags.find(t => t[0] === 'c');
if (!commitTag || !commitTag[1]) {
throw handleApiError(new Error('Pull request does not have a commit ID'), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Invalid pull request');
throw handleApiError(new Error('Pull request does not have a commit ID'), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Invalid pull request');
}
const commitId = commitTag[1];
// Get branch name if available
const branchTag = prEvent.tags.find(t => t[0] === 'branch-name');
const sourceBranch = branchTag?.[1] || `pr-${prId.substring(0, 8)}`;
const sourceBranch = branchTag?.[1] || `pr-${id.substring(0, 8)}`;
const git = simpleGit(repoPath);
@ -89,7 +93,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -89,7 +93,7 @@ export const POST: RequestHandler = withRepoValidation(
try {
await git.show([commitId]);
} catch (showErr) {
throw handleApiError(new Error(`Commit ${commitId} not found in repository`), { operation: 'mergePR', npub: repoContext.npub, repo: repoContext.repo }, 'Commit not found');
throw handleApiError(new Error(`Commit ${commitId} not found in repository`), { operation: 'mergePullRequest', npub: repoContext.npub, repo: repoContext.repo }, 'Commit not found');
}
let mergeCommitHash: string;
@ -99,7 +103,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -99,7 +103,7 @@ export const POST: RequestHandler = withRepoValidation(
await git.raw(['merge', '--squash', commitId]);
await git.add('.');
const finalMessage = mergeCommitMessage || `Merge PR ${prId.substring(0, 8)}\n\n${prEvent.content || ''}`;
const finalMessage = mergeCommitMessage || `Merge PR ${id.substring(0, 8)}\n\n${prEvent.content || ''}`;
await git.commit(finalMessage);
mergeCommitHash = (await git.revparse(['HEAD'])).trim();
@ -126,7 +130,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -126,7 +130,7 @@ export const POST: RequestHandler = withRepoValidation(
}
} else {
// Regular merge
const finalMessage = mergeCommitMessage || `Merge PR ${prId.substring(0, 8)}`;
const finalMessage = mergeCommitMessage || `Merge PR ${id.substring(0, 8)}`;
await git.merge([commitId, '-m', finalMessage]);
mergeCommitHash = (await git.revparse(['HEAD'])).trim();
}
@ -134,7 +138,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -134,7 +138,7 @@ export const POST: RequestHandler = withRepoValidation(
// Update PR status to merged
const prAuthor = prEvent.pubkey;
await prsService.updatePRStatus(
prId,
id,
prAuthor,
repoContext.repoOwnerPubkey,
repoContext.repo,
@ -148,9 +152,9 @@ export const POST: RequestHandler = withRepoValidation( @@ -148,9 +152,9 @@ export const POST: RequestHandler = withRepoValidation(
message: 'Pull request merged successfully'
});
} catch (err) {
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, prId }, 'Error merging pull request');
logger.error({ error: err, npub: repoContext.npub, repo: repoContext.repo, id }, 'Error merging pull request');
throw err;
}
},
{ operation: 'mergePR', requireRepoExists: true, requireRepoAccess: true }
{ operation: 'mergePullRequest', requireRepoExists: true, requireRepoAccess: true }
);

153
src/routes/api/repos/[npub]/[repo]/raw/+server.ts

@ -1,153 +0,0 @@ @@ -1,153 +0,0 @@
/**
* API endpoint for raw file access
*/
import type { RequestHandler } from './$types';
import { fileManager, repoManager } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleValidationError } from '$lib/utils/error-handler.js';
import { spawn } from 'child_process';
import { join } from 'path';
import { promisify } from 'util';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
// Check if a file extension is a binary image type
function isBinaryImage(ext: string): boolean {
const binaryImageExtensions = ['png', 'jpg', 'jpeg', 'gif', 'webp', 'bmp', 'ico', 'apng', 'avif'];
return binaryImageExtensions.includes(ext.toLowerCase());
}
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const filePath = context.path || event.url.searchParams.get('path');
const ref = context.ref || event.url.searchParams.get('ref') || 'HEAD';
if (!filePath) {
throw handleValidationError('Missing path parameter', { operation: 'getRawFile', npub: context.npub, repo: context.repo });
}
// Determine content type based on file extension
const ext = filePath.split('.').pop()?.toLowerCase();
const contentTypeMap: Record<string, string> = {
'js': 'application/javascript',
'ts': 'application/typescript',
'json': 'application/json',
'css': 'text/css',
'html': 'text/html',
'xml': 'application/xml',
'svg': 'image/svg+xml',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'webp': 'image/webp',
'bmp': 'image/bmp',
'ico': 'image/x-icon',
'pdf': 'application/pdf',
'txt': 'text/plain',
'md': 'text/markdown',
'yml': 'text/yaml',
'yaml': 'text/yaml',
};
const contentType = contentTypeMap[ext || ''] || 'text/plain';
// For binary image files, use git cat-file to get raw binary data
if (ext && isBinaryImage(ext)) {
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// Get the blob hash for the file
return new Promise<Response>((resolve, reject) => {
// First, get the object hash using git ls-tree
const lsTreeProcess = spawn('git', ['ls-tree', ref, filePath], {
cwd: repoPath,
stdio: ['ignore', 'pipe', 'pipe']
});
let lsTreeOutput = '';
let lsTreeError = '';
lsTreeProcess.stdout.on('data', (data: Buffer) => {
lsTreeOutput += data.toString();
});
lsTreeProcess.stderr.on('data', (data: Buffer) => {
lsTreeError += data.toString();
});
lsTreeProcess.on('close', (code) => {
if (code !== 0) {
reject(new Error(`Failed to get file hash: ${lsTreeError || 'Unknown error'}`));
return;
}
// Parse the output: format is "mode type hash\tpath"
const match = lsTreeOutput.match(/^\d+\s+\w+\s+([a-f0-9]{40})\s+/);
if (!match) {
reject(new Error('Failed to parse file hash from git ls-tree output'));
return;
}
const blobHash = match[1];
// Now get the binary content using git cat-file
const catFileProcess = spawn('git', ['cat-file', 'blob', blobHash], {
cwd: repoPath,
stdio: ['ignore', 'pipe', 'pipe']
});
const chunks: Buffer[] = [];
let catFileError = '';
catFileProcess.stdout.on('data', (data: Buffer) => {
chunks.push(data);
});
catFileProcess.stderr.on('data', (data: Buffer) => {
catFileError += data.toString();
});
catFileProcess.on('close', (code) => {
if (code !== 0) {
reject(new Error(`Failed to get file content: ${catFileError || 'Unknown error'}`));
return;
}
const binaryContent = Buffer.concat(chunks);
resolve(new Response(binaryContent, {
headers: {
'Content-Type': contentType,
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`,
'Cache-Control': 'public, max-age=3600'
}
}));
});
catFileProcess.on('error', (err) => {
reject(new Error(`Failed to execute git cat-file: ${err.message}`));
});
});
lsTreeProcess.on('error', (err) => {
reject(new Error(`Failed to execute git ls-tree: ${err.message}`));
});
});
} else {
// For text files (including SVG), use the existing method
const fileData = await fileManager.getFileContent(context.npub, context.repo, filePath, ref);
return new Response(fileData.content, {
headers: {
'Content-Type': contentType,
'Content-Disposition': `inline; filename="${filePath.split('/').pop()}"`,
'Cache-Control': 'public, max-age=3600'
}
});
}
},
{ operation: 'getRawFile' }
);

130
src/routes/api/repos/[npub]/[repo]/transfer/+server.ts → src/routes/api/repos/[npub]/[repo]/transfers/+server.ts

@ -1,64 +1,71 @@ @@ -1,64 +1,71 @@
/**
* API endpoint for transferring repository ownership
* RESTful Transfers Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/transfers # Get transfer history
* POST /api/repos/{npub}/{repo}/transfers # Transfer ownership
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError, handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { verifyEvent } from 'nostr-tools';
import { KIND } from '$lib/types/nostr.js';
import { ownershipTransferService, nostrClient, fileManager } from '$lib/services/service-registry.js';
import { withRepoValidation } from '$lib/utils/api-handlers.js';
import { combineRelays } from '$lib/config.js';
import { KIND } from '$lib/types/nostr.js';
import { verifyEvent, nip19 } from 'nostr-tools';
import type { NostrEvent } from '$lib/types/nostr.js';
import { getUserRelays } from '$lib/services/nostr/user-relays.js';
import { createRepoGetHandler, withRepoValidation } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js';
import type { RequestEvent } from '@sveltejs/kit';
import { handleApiError, handleValidationError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import logger from '$lib/services/logger.js';
/**
* GET - Get current owner and transfer history
* GET: Get transfer history
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
// Get current owner (may be different if transferred)
const currentOwner = await ownershipTransferService.getCurrentOwner(context.repoOwnerPubkey, context.repo);
// Fetch transfer events for history
const repoTag = `${KIND.REPO_ANNOUNCEMENT}:${context.repoOwnerPubkey}:${context.repo}`;
const transferEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.OWNERSHIP_TRANSFER],
'#a': [repoTag],
limit: 100
}
]);
// Sort by created_at descending
transferEvents.sort((a, b) => b.created_at - a.created_at);
return json({
originalOwner: context.repoOwnerPubkey,
currentOwner,
transferred: currentOwner !== context.repoOwnerPubkey,
transfers: transferEvents.map(event => {
const pTag = event.tags.find(t => t[0] === 'p');
return {
eventId: event.id,
from: event.pubkey,
to: pTag?.[1] || 'unknown',
timestamp: event.created_at,
createdAt: new Date(event.created_at * 1000).toISOString()
};
})
});
try {
// Get current owner (may be different if transferred)
const currentOwner = await ownershipTransferService.getCurrentOwner(context.repoOwnerPubkey, context.repo);
// Fetch transfer events for history
const repoTag = `${KIND.REPO_ANNOUNCEMENT}:${context.repoOwnerPubkey}:${context.repo}`;
const transferEvents = await nostrClient.fetchEvents([
{
kinds: [KIND.OWNERSHIP_TRANSFER],
'#a': [repoTag],
limit: 100
}
]);
// Sort by created_at descending
transferEvents.sort((a, b) => b.created_at - a.created_at);
return json({
originalOwner: context.repoOwnerPubkey,
currentOwner,
transferred: currentOwner !== context.repoOwnerPubkey,
transfers: transferEvents.map(event => {
const pTag = event.tags.find(t => t[0] === 'p');
return {
eventId: event.id,
from: event.pubkey,
to: pTag?.[1] || 'unknown',
timestamp: event.created_at,
createdAt: new Date(event.created_at * 1000).toISOString()
};
})
});
} catch (err) {
return handleApiError(err, { operation: 'getTransferHistory', npub: context.npub, repo: context.repo }, 'Failed to get transfer history');
}
},
{ operation: 'getOwnership', requireRepoAccess: false } // Ownership info is public
{ operation: 'getTransferHistory', requireRepoExists: false, requireRepoAccess: false }
);
/**
* POST - Initiate ownership transfer
* Requires a pre-signed NIP-98 authenticated event from the current owner
* POST: Transfer ownership
* Body: { transferEvent }
*/
export const POST: RequestHandler = withRepoValidation(
async ({ repoContext, requestContext, event }) => {
@ -107,7 +114,7 @@ export const POST: RequestHandler = withRepoValidation( @@ -107,7 +114,7 @@ export const POST: RequestHandler = withRepoValidation(
const aTag = transferEvent.tags.find(t => t[0] === 'a');
const expectedRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${repoContext.repoOwnerPubkey}:${repoContext.repo}`;
if (!aTag || aTag[1] !== expectedRepoTag) {
throw handleValidationError("Transfer event 'a' tag does not match this repository", { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
throw handleValidationError(`Transfer event must reference this repository: ${expectedRepoTag}`, { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo });
}
// Get user's relays and publish
@ -120,39 +127,19 @@ export const POST: RequestHandler = withRepoValidation( @@ -120,39 +127,19 @@ export const POST: RequestHandler = withRepoValidation(
throw handleApiError(new Error('Failed to publish transfer event to any relays'), { operation: 'transferOwnership', npub: repoContext.npub, repo: repoContext.repo }, 'Failed to publish transfer event to any relays');
}
// Save transfer event to repo (offline papertrail - step 1 requirement)
// Save transfer event to repo (offline papertrail)
try {
const transferEventContent = JSON.stringify(transferEvent, null, 2) + '\n';
// Use consistent filename pattern: .nostr-ownership-transfer-{eventId}.json
const transferFileName = `.nostr-ownership-transfer-${transferEvent.id}.json`;
// Save to repo if it exists locally
if (fileManager.repoExists(repoContext.npub, repoContext.repo)) {
// Get worktree to save to repo-events.jsonl
const defaultBranch = await fileManager.getDefaultBranch(repoContext.npub, repoContext.repo).catch(() => 'main');
const repoPath = fileManager.getRepoPath(repoContext.npub, repoContext.repo);
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, repoContext.npub, repoContext.repo);
// Save to repo-events.jsonl (standard file for easy analysis)
// Save to repo-events.jsonl
await fileManager.saveRepoEventToWorktree(workDir, transferEvent as NostrEvent, 'transfer').catch(err => {
logger.debug({ error: err }, 'Failed to save transfer event to repo-events.jsonl');
});
// Also save individual transfer file
await fileManager.writeFile(
repoContext.npub,
repoContext.repo,
transferFileName,
transferEventContent,
`Add ownership transfer event: ${transferEvent.id.slice(0, 16)}...`,
'Nostr',
`${requestContext.userPubkeyHex}@nostr`,
defaultBranch
).catch(err => {
// Log but don't fail - publishing to relays is more important
logger.warn({ error: err, npub: repoContext.npub, repo: repoContext.repo }, 'Failed to save transfer event to repo');
});
// Clean up worktree
await fileManager.removeWorktree(repoPath, workDir).catch(err => {
logger.debug({ error: err }, 'Failed to remove worktree after saving transfer event');
@ -161,7 +148,6 @@ export const POST: RequestHandler = withRepoValidation( @@ -161,7 +148,6 @@ export const POST: RequestHandler = withRepoValidation(
logger.debug({ npub: repoContext.npub, repo: repoContext.repo }, 'Repo does not exist locally, skipping transfer event save to repo');
}
} catch (err) {
// Log but don't fail - publishing to relays is more important
logger.warn({ error: err, npub: repoContext.npub, repo: repoContext.repo }, 'Failed to save transfer event to repo');
}
@ -173,9 +159,11 @@ export const POST: RequestHandler = withRepoValidation( @@ -173,9 +159,11 @@ export const POST: RequestHandler = withRepoValidation(
event: transferEvent,
published: result,
message: 'Ownership transfer initiated successfully',
// Signal to client that page should refresh
refresh: true
transferEvent: {
id: transferEvent.id,
from: transferEvent.pubkey,
to: aTag[2] || 'unknown'
}
});
},
{ operation: 'transferOwnership', requireRepoAccess: false } // Override to check owner instead
}
);

327
src/routes/api/repos/[npub]/[repo]/tree/+server.ts

@ -1,327 +0,0 @@ @@ -1,327 +0,0 @@
/**
* API endpoint for listing files and directories in a repository
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { fileManager, repoManager, nostrClient } from '$lib/services/service-registry.js';
import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError, handleNotFoundError } from '$lib/utils/error-handler.js';
import { KIND } from '$lib/types/nostr.js';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import logger from '$lib/services/logger.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
const repoRootEnv = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoRoot = resolve(repoRootEnv);
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
const repoPath = join(repoRoot, context.npub, `${context.repo}.git`);
// If repo doesn't exist, try to fetch it on-demand
if (!existsSync(repoPath)) {
try {
// Fetch repository announcement from Nostr (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
// Try API-based fetching first (no cloning)
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const { extractCloneUrls: extractCloneUrlsHelper } = await import('$lib/utils/nostr-utils.js');
const cloneUrlsForLogging = extractCloneUrlsHelper(announcement);
logger.debug({ npub: context.npub, repo: context.repo, cloneUrlCount: cloneUrlsForLogging.length, cloneUrls: cloneUrlsForLogging, path: context.path }, 'Attempting API fallback for tree');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files !== undefined) {
// Return empty array if no files (legitimate for empty repos)
// Only proceed if we have files to filter
if (apiData.files.length === 0) {
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, 'API fallback returned empty files array (repo may be empty)');
return json([]);
}
logger.debug({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback');
// Return API data directly without cloning
const path = context.path || '';
// Filter files by path if specified
let filteredFiles: typeof apiData.files;
if (path) {
// Normalize path: ensure it ends with / for directory matching
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
// Filter files that are directly in this directory (not in subdirectories)
filteredFiles = apiData.files.filter(f => {
// File must start with the normalized path
if (!f.path.startsWith(normalizedPath)) {
return false;
}
// Get the relative path after the directory prefix
const relativePath = f.path.slice(normalizedPath.length);
// If relative path is empty, skip (this would be the directory itself)
if (!relativePath) {
return false;
}
// Remove trailing slash from relative path for directories
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
// Check if it's directly in this directory (no additional / in the relative path)
// This works for both files (e.g., "icon.svg") and directories (e.g., "subfolder")
return !cleanRelativePath.includes('/');
});
} else {
// Root directory: show only files and directories in root
filteredFiles = apiData.files.filter(f => {
// Remove trailing slash for directories
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
// Include only items in root (single path segment)
return pathParts.length === 1;
});
}
// Normalize type: API returns 'dir' but frontend expects 'directory'
// Also update name to be just the filename/dirname for display
const normalizedFiles = filteredFiles.map(f => {
// Extract display name from path
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
// API fetch failed - repo is not cloned and API fetch didn't work
// Check if announcement has clone URLs to provide better error message
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const cloneUrls = extractCloneUrls(announcement);
const hasCloneUrls = cloneUrls.length > 0;
logger.debug({ npub: context.npub, repo: context.repo, hasCloneUrls, cloneUrlCount: cloneUrls.length }, 'API fallback failed or no clone URLs available');
throw handleNotFoundError(
hasCloneUrls
? 'Repository is not cloned locally and could not be fetched via API. Privileged users can clone this repository using the "Clone to Server" button.'
: 'Repository is not cloned locally and has no external clone URLs for API fallback. Privileged users can clone this repository using the "Clone to Server" button.',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
} else {
throw handleNotFoundError(
'Repository announcement not found in Nostr',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
} catch (err) {
// Check if repo was created by another concurrent request
if (existsSync(repoPath)) {
// Repo exists now, clear cache and continue with normal flow
repoCache.delete(RepoCache.repoExistsKey(context.npub, context.repo));
} else {
// If fetching fails, return 404
throw handleNotFoundError(
'Repository not found',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
}
}
// Double-check repo exists (should be true if we got here)
if (!existsSync(repoPath)) {
throw handleNotFoundError(
'Repository not found',
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
// Get default branch if no ref specified
let ref = context.ref || 'HEAD';
// If ref is a branch name, validate it exists or use default branch
if (ref !== 'HEAD' && !ref.startsWith('refs/')) {
try {
const branches = await fileManager.getBranches(context.npub, context.repo);
if (!branches.includes(ref)) {
// Branch doesn't exist, use default branch
ref = await fileManager.getDefaultBranch(context.npub, context.repo);
}
} catch {
// If we can't get branches, fall back to HEAD
ref = 'HEAD';
}
}
const path = context.path || '';
try {
const files = await fileManager.listFiles(context.npub, context.repo, ref, path);
// If repo exists but has no files (empty repo), try API fallback
if (files.length === 0) {
logger.debug({ npub: context.npub, repo: context.repo, path, ref }, 'Repo exists but is empty, attempting API fallback for tree');
try {
// Fetch repository announcement for API fallback
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files && apiData.files.length > 0) {
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback for empty repo');
// Filter files by path if specified (same logic as above)
let filteredFiles: typeof apiData.files;
if (path) {
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
filteredFiles = apiData.files.filter(f => {
if (!f.path.startsWith(normalizedPath)) {
return false;
}
const relativePath = f.path.slice(normalizedPath.length);
if (!relativePath) {
return false;
}
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
return !cleanRelativePath.includes('/');
});
} else {
filteredFiles = apiData.files.filter(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
return pathParts.length === 1;
});
}
// Normalize type and name
const normalizedFiles = filteredFiles.map(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed for empty repo, returning empty files');
}
}
// Debug logging to help diagnose missing files
logger.debug({
npub: context.npub,
repo: context.repo,
path,
ref,
fileCount: files.length,
files: files.map(f => ({ name: f.name, path: f.path, type: f.type }))
}, '[Tree] Returning files from fileManager.listFiles');
return json(files);
} catch (err) {
// If error occurs, try API fallback before giving up
logger.debug({ error: err, npub: context.npub, repo: context.repo }, '[Tree] Error listing files, attempting API fallback');
try {
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
if (announcement) {
const { tryApiFetch } = await import('$lib/utils/api-repo-helper.js');
const apiData = await tryApiFetch(announcement, context.npub, context.repo);
if (apiData && apiData.files && apiData.files.length > 0) {
logger.info({ npub: context.npub, repo: context.repo, fileCount: apiData.files.length }, 'Successfully fetched files via API fallback after error');
// Filter and normalize files (same logic as above)
const path = context.path || '';
let filteredFiles: typeof apiData.files;
if (path) {
const normalizedPath = path.endsWith('/') ? path : `${path}/`;
filteredFiles = apiData.files.filter(f => {
if (!f.path.startsWith(normalizedPath)) return false;
const relativePath = f.path.slice(normalizedPath.length);
if (!relativePath) return false;
const cleanRelativePath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath;
return !cleanRelativePath.includes('/');
});
} else {
filteredFiles = apiData.files.filter(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
return cleanPath.split('/').length === 1;
});
}
const normalizedFiles = filteredFiles.map(f => {
const cleanPath = f.path.endsWith('/') ? f.path.slice(0, -1) : f.path;
const pathParts = cleanPath.split('/');
const displayName = pathParts[pathParts.length - 1] || f.name;
return {
name: displayName,
path: f.path,
type: (f.type === 'dir' ? 'directory' : 'file') as 'file' | 'directory',
size: f.size
};
});
return json(normalizedFiles);
}
}
} catch (apiErr) {
logger.debug({ error: apiErr, npub: context.npub, repo: context.repo }, 'API fallback failed after error');
}
// Log the actual error for debugging
logger.error({ error: err, npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Error listing files');
// For optional paths (like "docs"), return empty array instead of 404
// This allows components to gracefully handle missing directories
const optionalPaths = ['docs'];
if (context.path && optionalPaths.includes(context.path.toLowerCase())) {
logger.debug({ npub: context.npub, repo: context.repo, path: context.path }, '[Tree] Optional path not found, returning empty array');
return json([]);
}
// Check if it's a "not found" error for the repo itself
if (err instanceof Error && (err.message.includes('Repository not found') || err.message.includes('not cloned'))) {
throw handleNotFoundError(
err.message,
{ operation: 'listFiles', npub: context.npub, repo: context.repo }
);
}
// For other errors with optional paths, return empty array
if (context.path && optionalPaths.includes(context.path.toLowerCase())) {
return json([]);
}
// Otherwise, it's a server error
throw handleApiError(
err,
{ operation: 'listFiles', npub: context.npub, repo: context.repo },
'Failed to list files'
);
}
},
{ operation: 'listFiles', requireRepoExists: false, requireRepoAccess: false } // Tree listing should be publicly accessible for public repos
);

56
src/routes/api/repos/[npub]/[repo]/verify/+server.ts → src/routes/api/repos/[npub]/[repo]/verification/+server.ts

@ -1,27 +1,27 @@ @@ -1,27 +1,27 @@
/**
* API endpoint for verifying repository ownership
* RESTful Verification Resource Endpoint
*
* GET /api/repos/{npub}/{repo}/verification # Get verification status
* POST /api/repos/{npub}/{repo}/verification # Save announcement to repo
*/
import { json, error } from '@sveltejs/kit';
// @ts-ignore - SvelteKit generates this type
import type { RequestHandler } from './$types';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError } from '$lib/utils/error-handler.js';
import { fileManager } from '$lib/services/service-registry.js';
import { verifyRepositoryOwnership } from '$lib/services/nostr/repo-verification.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import { nostrClient } from '$lib/services/service-registry.js';
import { KIND } from '$lib/types/nostr.js';
import { existsSync } from 'fs';
import { join } from 'path';
import { decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { createRepoGetHandler, createRepoPostHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext, RequestEvent } from '$lib/utils/api-context.js';
import { handleApiError, handleValidationError } from '$lib/utils/error-handler.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { AnnouncementManager } from '$lib/services/git/announcement-manager.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { fetchUserEmail, fetchUserName } from '$lib/utils/user-profile.js';
import simpleGit from 'simple-git';
import logger from '$lib/services/logger.js';
@ -30,11 +30,14 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT @@ -30,11 +30,14 @@ const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const announcementManager = new AnnouncementManager(repoRoot);
/**
* GET: Get verification status
*/
export const GET: RequestHandler = createRepoGetHandler(
async (context: RepoRequestContext) => {
// Check if repository exists - verification doesn't require the repo to be cloned locally
// We can verify ownership from Nostr events alone
// Fetch the repository announcement (case-insensitive) with caching
const allEvents = await fetchRepoAnnouncementsWithCache(nostrClient, context.repoOwnerPubkey, eventCache);
const announcement = findRepoAnnouncement(allEvents, context.repo);
@ -61,7 +64,6 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -61,7 +64,6 @@ export const GET: RequestHandler = createRepoGetHandler(
}
// Verify ownership for each clone separately
// Ownership is determined by the most recent announcement file checked into each clone
const cloneVerifications: Array<{ url: string; verified: boolean; ownerPubkey: string | null; error?: string }> = [];
// First, verify the local GitRepublic clone (if it exists)
@ -74,14 +76,11 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -74,14 +76,11 @@ export const GET: RequestHandler = createRepoGetHandler(
if (repoExists) {
// Repo is cloned - verify the announcement file matches
try {
// Get current owner from the most recent announcement file in the repo
localOwner = await fileManager.getCurrentOwnerFromRepo(context.npub, context.repo);
if (localOwner) {
// Verify the announcement in nostr/repo-events.jsonl matches the announcement event
try {
const repoEventsFile = await fileManager.getFileContent(context.npub, context.repo, 'nostr/repo-events.jsonl', 'HEAD');
// Parse repo-events.jsonl and find the most recent announcement
const lines = repoEventsFile.content.trim().split('\n').filter(Boolean);
let repoAnnouncement: NostrEvent | null = null;
let latestTimestamp = 0;
@ -124,7 +123,6 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -124,7 +123,6 @@ export const GET: RequestHandler = createRepoGetHandler(
}
} else {
// Repo is not cloned yet - verify from Nostr announcement alone
// The announcement pubkey must match the repo owner
if (announcement.pubkey === context.repoOwnerPubkey) {
localVerified = true;
localOwner = context.repoOwnerPubkey;
@ -147,9 +145,6 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -147,9 +145,6 @@ export const GET: RequestHandler = createRepoGetHandler(
});
}
// For other clones (GitHub, GitLab, etc.), we'd need to fetch them first to check their announcement files
// This is a future enhancement - for now we only verify the local GitRepublic clone
// Overall verification: at least one clone must be verified
const overallVerified = cloneVerifications.some(cv => cv.verified);
const verifiedClones = cloneVerifications.filter(cv => cv.verified);
@ -185,12 +180,12 @@ export const GET: RequestHandler = createRepoGetHandler( @@ -185,12 +180,12 @@ export const GET: RequestHandler = createRepoGetHandler(
});
}
},
{ operation: 'verifyRepo', requireRepoExists: false, requireRepoAccess: false } // Verification is public, doesn't need repo to exist
{ operation: 'getVerification', requireRepoExists: false, requireRepoAccess: false }
);
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const announcementManager = new AnnouncementManager(repoRoot);
/**
* POST: Save announcement to repo
*/
export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
const requestContext = extractRequestContext(event);
@ -225,28 +220,24 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -225,28 +220,24 @@ export const POST: RequestHandler = createRepoPostHandler(
// Check if repository has any commits
const git = simpleGit(repoPath);
let hasCommits = false;
// Use same default branch logic as repo-manager (master, or from env)
let defaultBranch = process.env.DEFAULT_BRANCH || 'master';
try {
const commitCount = await git.raw(['rev-list', '--count', '--all']);
hasCommits = parseInt(commitCount.trim(), 10) > 0;
} catch {
// If we can't check, assume no commits
hasCommits = false;
}
// If repository has commits, get the default branch
if (hasCommits) {
try {
defaultBranch = await fileManager.getDefaultBranch(context.npub, context.repo);
} catch {
// Fallback to default if getDefaultBranch fails
defaultBranch = process.env.DEFAULT_BRANCH || 'master';
}
}
// Get worktree for the default branch (worktree manager will create branch if needed)
// Get worktree for the default branch
logger.info({ npub: context.npub, repo: context.repo, branch: defaultBranch, hasCommits }, 'Getting worktree for announcement commit');
const worktreePath = await fileManager.getWorktree(repoPath, defaultBranch, context.npub, context.repo);
@ -254,7 +245,6 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -254,7 +245,6 @@ export const POST: RequestHandler = createRepoPostHandler(
const hasAnnouncement = await announcementManager.hasAnnouncementInRepo(worktreePath, announcement.id);
if (hasAnnouncement) {
// Announcement already exists, but we'll update it anyway to ensure it's the latest
logger.debug({ npub: context.npub, repo: context.repo, eventId: announcement.id }, 'Announcement already exists, updating anyway');
}
@ -290,14 +280,11 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -290,14 +280,11 @@ export const POST: RequestHandler = createRepoPostHandler(
// For empty repositories, ensure the branch is set up in the worktree
if (!hasCommits) {
try {
// Check if branch exists in worktree
const currentBranch = await workGit.revparse(['--abbrev-ref', 'HEAD']).catch(() => null);
if (!currentBranch || currentBranch === 'HEAD') {
// Branch doesn't exist, create orphan branch in worktree
logger.debug({ npub: context.npub, repo: context.repo, branch: defaultBranch }, 'Creating orphan branch in worktree');
await workGit.raw(['checkout', '--orphan', defaultBranch]);
} else if (currentBranch !== defaultBranch) {
// Switch to the correct branch
logger.debug({ npub: context.npub, repo: context.repo, currentBranch, targetBranch: defaultBranch }, 'Switching to target branch in worktree');
await workGit.checkout(defaultBranch);
}
@ -322,7 +309,6 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -322,7 +309,6 @@ export const POST: RequestHandler = createRepoPostHandler(
try {
await workGit.push('origin', defaultBranch);
} catch (pushErr) {
// Push might fail if there's no remote, that's okay
logger.debug({ error: pushErr, npub: context.npub, repo: context.repo }, 'Push failed (may not have remote)');
}
@ -336,8 +322,8 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -336,8 +322,8 @@ export const POST: RequestHandler = createRepoPostHandler(
});
} catch (err) {
logger.error({ error: err, npub: context.npub, repo: context.repo }, 'Failed to commit announcement for verification');
return handleApiError(err, { operation: 'verifyRepoCommit', npub: context.npub, repo: context.repo }, 'Failed to commit announcement');
return handleApiError(err, { operation: 'saveAnnouncement', npub: context.npub, repo: context.repo }, 'Failed to commit announcement');
}
},
{ operation: 'verifyRepoCommit', requireRepoExists: true, requireRepoAccess: true }
{ operation: 'saveAnnouncement', requireRepoExists: true, requireRepoAccess: true }
);

33
src/routes/api/repos/poll/+server.ts

@ -0,0 +1,33 @@ @@ -0,0 +1,33 @@
/**
* API endpoint for manually triggering a repo poll
* This allows users to refresh the repo list and trigger provisioning of new repos
*
* This is the public API interface for triggering polls.
* All poll triggers should go through this endpoint or the shared triggerRepoPoll utility.
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { triggerRepoPoll } from '$lib/utils/repo-poll-trigger.js';
import { extractRequestContext } from '$lib/utils/api-context.js';
export const POST: RequestHandler = async (event) => {
const requestContext = extractRequestContext(event);
const clientIp = requestContext.clientIp || 'unknown';
try {
await triggerRepoPoll('api-endpoint');
return json({
success: true,
message: 'Poll triggered successfully'
});
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err);
return json({
success: false,
error: errorMessage
}, { status: err instanceof Error && errorMessage.includes('not available') ? 503 : 500 });
}
};

328
src/routes/api/search/+server.ts

@ -7,7 +7,7 @@ import { json } from '@sveltejs/kit'; @@ -7,7 +7,7 @@ import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS } from '$lib/config.js';
import { DEFAULT_NOSTR_SEARCH_RELAYS, DEFAULT_NOSTR_RELAYS } from '$lib/config.js';
import { KIND } from '$lib/types/nostr.js';
import type { NostrEvent, NostrFilter } from '$lib/types/nostr.js';
import { nip19 } from 'nostr-tools';
@ -19,6 +19,11 @@ import { eventCache } from '$lib/services/nostr/event-cache.js'; @@ -19,6 +19,11 @@ import { eventCache } from '$lib/services/nostr/event-cache.js';
import { decodeNostrAddress } from '$lib/services/nostr/nip19-utils.js';
import logger from '$lib/services/logger.js';
import { isParameterizedReplaceable } from '$lib/utils/nostr-event-utils.js';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
import { simpleGit } from 'simple-git';
import { fileManager } from '$lib/services/service-registry.js';
// Replaceable event kinds (only latest per pubkey matters)
const REPLACEABLE_KINDS = [0, 3, 10002]; // Profile, Contacts, Relay List
@ -46,7 +51,9 @@ function getDeduplicationKey(event: NostrEvent): string { @@ -46,7 +51,9 @@ function getDeduplicationKey(event: NostrEvent): string {
export const GET: RequestHandler = async (event) => {
const query = event.url.searchParams.get('q');
const limit = parseInt(event.url.searchParams.get('limit') || '20', 10);
const type = event.url.searchParams.get('type') || 'repos'; // Default to repos search
const limit = parseInt(event.url.searchParams.get('limit') || (type === 'code' ? '100' : '20'), 10);
const repoFilter = event.url.searchParams.get('repo'); // For code search: filter by specific repo (npub/repo format)
// Extract user pubkey for privacy filtering
const requestContext = extractRequestContext(event);
@ -60,6 +67,13 @@ export const GET: RequestHandler = async (event) => { @@ -60,6 +67,13 @@ export const GET: RequestHandler = async (event) => {
return handleValidationError('Query must be at least 2 characters', { operation: 'search', query });
}
// If type is 'code', delegate to code search logic
if (type === 'code') {
return handleCodeSearch(event, query, limit, repoFilter, requestContext);
}
// Otherwise, continue with repository search (type === 'repos' or default)
try {
// Collect all available relays - prioritize DEFAULT_NOSTR_SEARCH_RELAYS
const allRelays = new Set<string>();
@ -634,3 +648,313 @@ function filterRepos( @@ -634,3 +648,313 @@ function filterRepos(
return false;
});
}
/**
* Handle code search (type=code)
*/
async function handleCodeSearch(
event: { url: URL; request: Request },
query: string,
limit: number,
repoFilter: string | null,
requestContext: ReturnType<typeof extractRequestContext>
) {
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
interface CodeSearchResult {
repo: string;
npub: string;
file: string;
line: number;
content: string;
branch: string;
}
const results: CodeSearchResult[] = [];
try {
// If repo filter is specified, search only that repo
if (repoFilter) {
const [npub, repo] = repoFilter.split('/');
if (npub && repo) {
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (existsSync(repoPath)) {
const repoResults = await searchInRepoForCode(npub, repo, query, limit);
results.push(...repoResults);
}
}
return json(results);
}
// Search across all repositories
// First, get list of all repos from filesystem
if (!existsSync(repoRoot)) {
return json([]);
}
const users = await readdir(repoRoot);
for (const user of users) {
const userPath = join(repoRoot, user);
const userStat = await stat(userPath);
if (!userStat.isDirectory()) {
continue;
}
const repos = await readdir(userPath);
for (const repo of repos) {
if (!repo.endsWith('.git')) {
continue;
}
const repoName = repo.replace(/\.git$/, '');
const repoPath = join(userPath, repo);
const repoStat = await stat(repoPath);
if (!repoStat.isDirectory()) {
continue;
}
// Check access for private repos
try {
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
// Decode npub to hex
let repoOwnerPubkey: string;
try {
const decoded = nip19.decode(user);
if (decoded.type === 'npub') {
repoOwnerPubkey = decoded.data as string;
} else {
repoOwnerPubkey = user; // Assume it's already hex
}
} catch {
repoOwnerPubkey = user; // Assume it's already hex
}
const canView = await maintainerService.canView(
requestContext.userPubkeyHex || null,
repoOwnerPubkey,
repoName
);
if (!canView) {
continue; // Skip private repos user can't access
}
} catch (accessErr) {
logger.debug({ error: accessErr, user, repo: repoName }, 'Error checking access, skipping repo');
continue;
}
// Search in this repo
try {
const repoResults = await searchInRepoForCode(user, repoName, query, limit - results.length);
results.push(...repoResults);
if (results.length >= limit) {
break;
}
} catch (searchErr) {
logger.debug({ error: searchErr, user, repo: repoName }, 'Error searching repo, continuing');
continue;
}
}
if (results.length >= limit) {
break;
}
}
return json(results.slice(0, limit));
} catch (err) {
logger.error({ error: err, query }, 'Error performing code search');
throw handleApiError(err, { operation: 'codeSearch' }, 'Failed to perform code search');
}
}
async function searchInRepoForCode(
npub: string,
repo: string,
query: string,
limit: number
): Promise<Array<{ repo: string; npub: string; file: string; line: number; content: string; branch: string }>> {
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoPath = join(repoRoot, npub, `${repo}.git`);
if (!existsSync(repoPath)) {
return [];
}
const results: Array<{ repo: string; npub: string; file: string; line: number; content: string; branch: string }> = [];
const git = simpleGit(repoPath);
try {
// Get default branch
let branch = 'HEAD';
try {
const branches = await git.branchLocal();
branch = branches.current || 'HEAD';
// If no current branch, try common defaults
if (!branch || branch === 'HEAD') {
const allBranches = branches.all.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, ''));
branch = allBranches.find(b => b === 'main') || allBranches.find(b => b === 'master') || allBranches[0] || 'main';
}
} catch {
branch = 'main';
}
// For bare repositories, we need to use a worktree or search the index
let worktreePath: string | null = null;
try {
// Get the actual branch name (resolve HEAD if needed)
let actualBranch = branch;
if (branch === 'HEAD') {
actualBranch = 'main';
}
// Get or create worktree
worktreePath = await fileManager.getWorktree(repoPath, actualBranch, npub, repo);
} catch (worktreeError) {
logger.debug({ error: worktreeError, npub, repo, branch }, 'Could not create worktree, trying git grep with tree reference');
// Fall back to searching the index
}
const searchQuery = query.trim();
// If we have a worktree, search in the worktree
if (worktreePath && existsSync(worktreePath)) {
try {
const worktreeGit = simpleGit(worktreePath);
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery];
const grepOutput = await worktreeGit.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
// Make file path relative to repo root
const relativeFile = currentFile.replace(worktreePath + '/', '').replace(/^\.\//, '');
results.push({
repo,
npub,
file: relativeFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
} else {
// Fallback: search in the index using git grep with tree reference
try {
// Get the tree for the branch
let treeRef = branch;
if (branch === 'HEAD') {
try {
const branchInfo = await git.branch(['-a']);
treeRef = branchInfo.current || 'HEAD';
} catch {
treeRef = 'HEAD';
}
}
// Use git grep with tree reference for bare repos
const gitArgs = ['grep', '-n', '-I', '--break', '--heading', searchQuery, treeRef];
const grepOutput = await git.raw(gitArgs);
if (!grepOutput || !grepOutput.trim()) {
return [];
}
// Parse git grep output
const lines = grepOutput.split('\n');
let currentFile = '';
for (const line of lines) {
if (!line.trim()) {
continue;
}
// Check if this is a filename (no colon)
if (!line.includes(':')) {
currentFile = line.trim();
continue;
}
// Parse line:content format
const colonIndex = line.indexOf(':');
if (colonIndex > 0 && currentFile) {
const lineNumber = parseInt(line.substring(0, colonIndex), 10);
const content = line.substring(colonIndex + 1);
if (!isNaN(lineNumber) && content) {
results.push({
repo,
npub,
file: currentFile,
line: lineNumber,
content: content.trim(),
branch: branch === 'HEAD' ? 'HEAD' : branch
});
if (results.length >= limit) {
break;
}
}
}
}
} catch (grepError: any) {
// git grep returns exit code 1 when no matches found
if (grepError.message && grepError.message.includes('exit code 1')) {
return [];
}
throw grepError;
}
}
} catch (err) {
logger.debug({ error: err, npub, repo, query }, 'Error searching in repo');
return [];
}
return results;
}

7
src/routes/api/user/level/+server.ts

@ -17,6 +17,7 @@ import { extractRequestContext } from '$lib/utils/api-context.js'; @@ -17,6 +17,7 @@ import { extractRequestContext } from '$lib/utils/api-context.js';
import { sanitizeError } from '$lib/utils/security.js';
import { verifyEvent } from 'nostr-tools';
import logger from '$lib/services/logger.js';
import { triggerRepoPoll } from '$lib/utils/repo-poll-trigger.js';
export const POST: RequestHandler = async (event) => {
const requestContext = extractRequestContext(event);
@ -143,6 +144,12 @@ export const POST: RequestHandler = async (event) => { @@ -143,6 +144,12 @@ export const POST: RequestHandler = async (event) => {
// Cache the successful verification
cacheUserLevel(userPubkeyHex, 'unlimited');
// Trigger a repo poll to provision repos now that user is verified
// This is non-blocking - we don't wait for it to complete
triggerRepoPoll('user-verification').catch((err) => {
logger.warn({ error: err, userPubkeyHex }, 'Failed to trigger poll after user verification (non-blocking)');
});
auditLogger.logAuth(
userPubkeyHex,
clientIp,

48
src/routes/docs/[slug]/+page.svelte

@ -45,9 +45,29 @@ @@ -45,9 +45,29 @@
});
// Convert relative markdown links to docs routes
rendered = rendered.replace(/<a href="\.\/([^"]+\.md)"/g, (match, file) => {
const slug = file.replace('.md', '');
return `<a href="/docs/${slug}"`;
// Handle various link formats:
// - ./file.md -> /docs/file
// - file.md -> /docs/file
// - /file.md -> /docs/file (though this shouldn't happen in markdown)
rendered = rendered.replace(/<a href="([^"]*\.md)"/g, (match, file) => {
// Remove leading ./ or / if present
const cleanFile = file.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanFile.replace(/\.md$/, '');
// Only process if it's a relative link (not already starting with /docs or http)
if (!slug.startsWith('docs/') && !slug.startsWith('http')) {
return `<a href="/docs/${slug}"`;
}
return match; // Return original if already processed or external
});
// Also handle links with anchors: ./file.md#section -> /docs/file#section
rendered = rendered.replace(/<a href="([^"]*\.md)(#[^"]*)"/g, (match, file, anchor) => {
const cleanFile = file.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanFile.replace(/\.md$/, '');
if (!slug.startsWith('docs/') && !slug.startsWith('http')) {
return `<a href="/docs/${slug}${anchor}"`;
}
return match;
});
content = rendered;
@ -66,15 +86,31 @@ @@ -66,15 +86,31 @@
if (markdownContent) {
markdownContent.addEventListener('click', (e) => {
const target = e.target as HTMLElement;
if (target.tagName === 'A' && target.getAttribute('href')?.startsWith('#')) {
const id = target.getAttribute('href')?.substring(1);
if (id) {
if (target.tagName === 'A') {
const href = target.getAttribute('href');
if (!href) return;
// Handle anchor links
if (href.startsWith('#')) {
const id = href.substring(1);
const element = document.getElementById(id);
if (element) {
e.preventDefault();
element.scrollIntoView({ behavior: 'smooth', block: 'start' });
window.history.pushState(null, '', `#${id}`);
}
return;
}
// Handle .md file links that weren't converted properly
if (href.endsWith('.md') && !href.startsWith('/docs/') && !href.startsWith('http')) {
e.preventDefault();
// Remove leading ./ or / if present, then remove .md extension
const cleanHref = href.replace(/^\.\//, '').replace(/^\//, '');
const slug = cleanHref.replace(/\.md$/, '');
// Navigate to docs route
window.location.href = `/docs/${slug}`;
return;
}
}
});

29
src/routes/repos/+page.svelte

@ -308,7 +308,7 @@ @@ -308,7 +308,7 @@
}
}
async function loadRepos() {
async function loadRepos(triggerPoll = false) {
loading = true;
error = null;
@ -357,6 +357,31 @@ @@ -357,6 +357,31 @@
loadForkCounts(registeredRepos.map(r => r.event)).catch(err => {
console.warn('[RepoList] Failed to load some fork counts:', err);
});
// If triggerPoll is true, trigger a poll and then refresh the list
if (triggerPoll) {
try {
// Trigger poll (non-blocking)
const pollResponse = await fetch('/api/repos/poll', {
method: 'POST',
headers: userPubkeyHex ? {
'X-User-Pubkey': userPubkeyHex
} : {}
});
if (pollResponse.ok) {
// Wait a bit for the poll to process (lazy - don't wait for full completion)
// Give it 2-3 seconds to provision repos
await new Promise(resolve => setTimeout(resolve, 2500));
// Refresh the list after poll
await loadRepos(false);
}
} catch (pollErr) {
// Don't fail the whole operation if poll fails
console.warn('[RepoList] Failed to trigger poll:', pollErr);
}
}
} catch (e) {
error = String(e);
console.error('[RepoList] Failed to load repos:', e);
@ -777,7 +802,7 @@ @@ -777,7 +802,7 @@
<div class="repos-header">
<h2>Repositories on {$page.data.gitDomain || 'localhost:6543'}</h2>
<button onclick={loadRepos} disabled={loading}>
<button onclick={() => loadRepos(true)} disabled={loading}>
{loading ? 'Loading...' : 'Refresh'}
</button>
</div>

9
src/routes/repos/[npub]/[repo]/+page.svelte

@ -407,7 +407,7 @@ @@ -407,7 +407,7 @@
const loadCloneUrlReachability = (forceRefresh = false) => loadCloneUrlReachabilityService(forceRefresh, state, repoCloneUrls);
const loadForkInfo = async () => {
try {
const response = await fetch(`/api/repos/${state.npub}/${state.repo}/fork`, { headers: buildApiHeaders() });
const response = await fetch(`/api/repos/${state.npub}/${state.repo}/forks`, { headers: buildApiHeaders() });
if (response.ok) state.fork.info = await response.json();
} catch (err) {
console.error('Error loading fork info:', err);
@ -1603,9 +1603,9 @@ @@ -1603,9 +1603,9 @@
// Pre-fill download URL with full URL
if (typeof window !== 'undefined') {
const origin = window.location.origin;
state.forms.release.downloadUrl = `${origin}/api/repos/${state.npub}/${state.repo}/download?ref=${encodeURIComponent(tagName)}&format=zip`;
state.forms.release.downloadUrl = `${origin}/api/repos/${state.npub}/${state.repo}/archive?ref=${encodeURIComponent(tagName)}&format=zip`;
} else {
state.forms.release.downloadUrl = `/api/repos/${state.npub}/${state.repo}/download?ref=${encodeURIComponent(tagName)}&format=zip`;
state.forms.release.downloadUrl = `/api/repos/${state.npub}/${state.repo}/archive?ref=${encodeURIComponent(tagName)}&format=zip`;
}
state.openDialog = 'createRelease';
}}
@ -1704,11 +1704,10 @@ @@ -1704,11 +1704,10 @@
}
try {
const response = await fetch(`/api/repos/${state.npub}/${state.repo}/prs`, {
const response = await fetch(`/api/repos/${state.npub}/${state.repo}/pull-requests/${id}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
prId: id,
prAuthor: pr.author,
status
})

45
src/routes/repos/[npub]/[repo]/components/DocsTab.svelte

@ -65,6 +65,13 @@ @@ -65,6 +65,13 @@
}
});
// Ensure README is always selected by default if available
$effect(() => {
if (hasReadme && !selectedDoc) {
selectedDoc = 'README.md';
}
});
async function loadDocumentation() {
loading = true;
loadingDocs = true;
@ -78,7 +85,8 @@ @@ -78,7 +85,8 @@
try {
logger.operation('Loading documentation', { npub, repo, branch: currentBranch });
// Load README FIRST and display immediately
// ALWAYS load README FIRST and display immediately if available
// README is standard documentation and should always be shown
try {
const readmeResponse = await fetch(`/api/repos/${npub}/${repo}/readme?ref=${currentBranch || 'HEAD'}`);
if (readmeResponse.ok) {
@ -96,9 +104,9 @@ @@ -96,9 +104,9 @@
logger.debug({ error: readmeErr, npub, repo }, 'No README found');
}
// Now check for docs folder in the background
// Now check for docs folder in the background (but don't replace README)
try {
const response = await fetch(`/api/repos/${npub}/${repo}/tree?ref=${currentBranch || 'HEAD'}&path=docs`);
const response = await fetch(`/api/repos/${npub}/${repo}/files?action=tree&ref=${currentBranch || 'HEAD'}&path=docs`);
if (response.ok) {
const data = await response.json();
const docsFiles = Array.isArray(data) ? data : (data.files || []);
@ -121,6 +129,7 @@ @@ -121,6 +129,7 @@
}
// Load Nostr documentation events (kind 30818, 30041, 30817, 30023)
// These are additional docs, but README should still be shown if available
await loadNostrDocumentation();
} catch (err) {
@ -141,7 +150,7 @@ @@ -141,7 +150,7 @@
documentationTitle = null;
indexEvent = null;
const response = await fetch(`/api/repos/${npub}/${repo}/raw?path=${encodeURIComponent(path)}&ref=${currentBranch || 'HEAD'}`);
const response = await fetch(`/api/repos/${npub}/${repo}/files?path=${encodeURIComponent(path)}&format=raw&ref=${currentBranch || 'HEAD'}`);
if (response.ok) {
const content = await response.text();
documentationContent = content;
@ -165,6 +174,30 @@ @@ -165,6 +174,30 @@
}
} catch (err) {
logger.warn({ error: err, path }, 'Failed to load doc file');
// If loading a doc file fails and we have a README, fall back to README
if (hasReadme) {
await loadReadme();
}
}
}
// Helper function to reload README
async function loadReadme() {
try {
const readmeResponse = await fetch(`/api/repos/${npub}/${repo}/readme?ref=${currentBranch || 'HEAD'}`);
if (readmeResponse.ok) {
const readmeData = await readmeResponse.json();
if (readmeData.content) {
documentationContent = readmeData.content;
documentationKind = readmeData.type || 'markdown';
selectedDoc = 'README.md';
documentationTitle = null;
indexEvent = null;
logger.debug({ npub, repo }, 'README reloaded');
}
}
} catch (readmeErr) {
logger.debug({ error: readmeErr, npub, repo }, 'Failed to reload README');
}
}
@ -305,7 +338,7 @@ @@ -305,7 +338,7 @@
// Clear any Nostr doc state first
documentationTitle = null;
indexEvent = null;
loadDocumentation();
loadReadme();
}}
>
README.md
@ -342,7 +375,7 @@ @@ -342,7 +375,7 @@
</li>
{/each}
{/if}
{#if !hasReadme && docFiles.length === 0 && nostrDocs.length === 0}
{#if !hasReadme && docFiles.length === 0 && nostrDocs.length === 0 && !indexEvent}
<div class="empty-sidebar">
<p>No documentation files found</p>
</div>

2
src/routes/repos/[npub]/[repo]/components/DocsViewer.svelte

@ -54,7 +54,7 @@ @@ -54,7 +54,7 @@
imagePath = normalizedPath.join('/');
// Build API URL
const apiUrl = `/api/repos/${npub}/${repo}/raw?path=${encodeURIComponent(imagePath)}&ref=${encodeURIComponent(branch)}`;
const apiUrl = `/api/repos/${npub}/${repo}/files?path=${encodeURIComponent(imagePath)}&format=raw&ref=${encodeURIComponent(branch)}`;
return `<img${before} src="${apiUrl}"${after}>`;
});

2
src/routes/repos/[npub]/[repo]/components/dialogs/CreateReleaseDialog.svelte

@ -27,7 +27,7 @@ @@ -27,7 +27,7 @@
</label>
<label>
Download URL (optional):
<input type="url" bind:value={state.forms.release.downloadUrl} placeholder="/api/repos/.../download?ref=..." />
<input type="url" bind:value={state.forms.release.downloadUrl} placeholder="/api/repos/.../archive?ref=..." />
<small class="field-hint">Pre-filled with the ZIP download URL for this tag. You can change it if needed.</small>
</label>
<label>

6
src/routes/repos/[npub]/[repo]/hooks/use-repo-api.ts

@ -29,7 +29,7 @@ export async function loadFiles(options: LoadFilesOptions): Promise<Array<{ name @@ -29,7 +29,7 @@ export async function loadFiles(options: LoadFilesOptions): Promise<Array<{ name
try {
logger.operation('Loading files', { npub, repo, branch, path });
const url = `/api/repos/${npub}/${repo}/tree?ref=${branch}${path ? `&path=${encodeURIComponent(path)}` : ''}`;
const url = `/api/repos/${npub}/${repo}/files?action=tree&ref=${branch}${path ? `&path=${encodeURIComponent(path)}` : ''}`;
const response = await fetch(url, {
headers: buildApiHeaders()
});
@ -56,7 +56,7 @@ export async function loadFile(options: LoadFileOptions): Promise<{ content: str @@ -56,7 +56,7 @@ export async function loadFile(options: LoadFileOptions): Promise<{ content: str
try {
logger.operation('Loading file', { npub, repo, branch, filePath });
const url = `/api/repos/${npub}/${repo}/raw?path=${encodeURIComponent(filePath)}&ref=${branch}`;
const url = `/api/repos/${npub}/${repo}/files?path=${encodeURIComponent(filePath)}&format=raw&ref=${branch}`;
const response = await fetch(url, {
headers: buildApiHeaders()
});
@ -162,7 +162,7 @@ export async function loadPRs(npub: string, repo: string): Promise<Array<any>> { @@ -162,7 +162,7 @@ export async function loadPRs(npub: string, repo: string): Promise<Array<any>> {
try {
logger.operation('Loading PRs', { npub, repo });
const response = await fetch(`/api/repos/${npub}/${repo}/prs`, {
const response = await fetch(`/api/repos/${npub}/${repo}/pull-requests`, {
headers: buildApiHeaders()
});

2
src/routes/repos/[npub]/[repo]/services/branch-operations.ts

@ -131,7 +131,7 @@ export async function loadBranches( @@ -131,7 +131,7 @@ export async function loadBranches(
// Fetch the actual default branch from the API
try {
const defaultBranchData = await apiRequest<{ defaultBranch?: string; branch?: string }>(
`/api/repos/${state.npub}/${state.repo}/default-branch`
`/api/repos/${state.npub}/${state.repo}/branches/default`
);
state.git.defaultBranch = defaultBranchData.defaultBranch || defaultBranchData.branch || null;
} catch (err) {

5
src/routes/repos/[npub]/[repo]/services/code-search-operations.ts

@ -27,9 +27,8 @@ export async function performCodeSearch( @@ -27,9 +27,8 @@ export async function performCodeSearch(
: '';
// For "All Repositories", don't pass repo filter - let it search all repos
const url = state.codeSearch.scope === 'repo'
? `/api/repos/${state.npub}/${state.repo}/code-search?q=${encodeURIComponent(state.codeSearch.query.trim())}${branchParam}`
: `/api/code-search?q=${encodeURIComponent(state.codeSearch.query.trim())}`;
const repoParam = state.codeSearch.scope === 'repo' ? `&repo=${encodeURIComponent(`${state.npub}/${state.repo}`)}` : '';
const url = `/api/search?type=code&q=${encodeURIComponent(state.codeSearch.query.trim())}${repoParam}${branchParam}`;
const data = await apiRequest<Array<any>>(url);
state.codeSearch.results = Array.isArray(data) ? data : [];

27
src/routes/repos/[npub]/[repo]/services/commit-operations.ts

@ -25,16 +25,33 @@ export async function loadCommitHistory( @@ -25,16 +25,33 @@ export async function loadCommitHistory(
const url = `/api/repos/${state.npub}/${state.repo}/commits?branch=${encodeURIComponent(branch)}&limit=50`;
console.log('[loadCommitHistory] Fetching commits:', { url, branch, currentBranch: state.git.currentBranch, defaultBranch: state.git.defaultBranch });
const data = await apiRequest<Array<{
const response = await apiRequest<Array<{
hash?: string;
sha?: string;
message?: string;
author?: string;
date?: string;
files?: string[];
}>>(url);
}> | { commitCount?: number; data?: Array<any> }>(url);
console.log('[loadCommitHistory] Received data:', { commitCount: data?.length || 0, data });
// Handle both array and object response formats
// API should return array, but handle object wrappers like { data: [] } or { commits: [] }
let data: Array<any>;
if (Array.isArray(response)) {
data = response;
} else if (response && typeof response === 'object') {
// Try common wrapper formats
data = (response as any).data || (response as any).commits || [];
} else {
data = [];
}
console.log('[loadCommitHistory] Received response:', {
responseType: Array.isArray(response) ? 'array' : typeof response,
responseKeys: typeof response === 'object' && response !== null ? Object.keys(response) : [],
commitCount: data?.length || 0,
data
});
// Normalize commits: API-based commits use 'sha', local commits use 'hash'
state.git.commits = data.map((commit: any) => ({
@ -85,7 +102,7 @@ export async function verifyCommit( @@ -85,7 +102,7 @@ export async function verifyCommit(
authorEmail?: string;
timestamp?: number;
eventId?: string;
}>(`/api/repos/${state.npub}/${state.repo}/commits/${commitHash}/verify`);
}>(`/api/repos/${state.npub}/${state.repo}/commits/${commitHash}/verification`);
// Only update verification if there's actually a signature
// If hasSignature is false or undefined, don't set verification at all
@ -127,7 +144,7 @@ export async function viewDiff( @@ -127,7 +144,7 @@ export async function viewDiff(
additions: number;
deletions: number;
diff: string;
}>>(`/api/repos/${state.npub}/${state.repo}/diff?from=${parentHash}&to=${commitHash}`);
}>>(`/api/repos/${state.npub}/${state.repo}/diffs?from=${parentHash}&to=${commitHash}`);
state.git.diffData = diffData;
state.git.showDiff = true;

37
src/routes/repos/[npub]/[repo]/services/file-operations.ts

@ -72,8 +72,7 @@ export async function saveFile( @@ -72,8 +72,7 @@ export async function saveFile(
}
}
await apiPost(`/api/repos/${state.npub}/${state.repo}/file`, {
path: state.files.currentFile,
await apiPost(`/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(state.files.currentFile)}`, {
content: state.files.editedContent,
commitMessage: state.forms.commit.message.trim(),
authorName: authorName,
@ -150,14 +149,12 @@ export async function createFile( @@ -150,14 +149,12 @@ export async function createFile(
}
}
await apiPost(`/api/repos/${state.npub}/${state.repo}/file`, {
path: filePath,
await apiPost(`/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(filePath)}`, {
content: state.forms.file.content,
commitMessage: commitMsg,
authorName: authorName,
authorEmail: authorEmail,
branch: state.git.currentBranch,
action: 'create',
userPubkey: state.user.pubkey,
commitSignatureEvent: commitSignatureEvent
});
@ -232,15 +229,16 @@ export async function deleteFile( @@ -232,15 +229,16 @@ export async function deleteFile(
}
}
await apiPost(`/api/repos/${state.npub}/${state.repo}/file`, {
path: filePath,
commitMessage: commitMsg,
authorName: authorName,
authorEmail: authorEmail,
branch: state.git.currentBranch,
action: 'delete',
userPubkey: state.user.pubkey,
commitSignatureEvent: commitSignatureEvent
await apiRequest(`/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(filePath)}`, {
method: 'DELETE',
body: JSON.stringify({
commitMessage: commitMsg,
authorName: authorName,
authorEmail: authorEmail,
branch: state.git.currentBranch,
userPubkey: state.user.pubkey,
commitSignatureEvent: commitSignatureEvent
})
});
// Clear current file if it was deleted
@ -423,7 +421,7 @@ export async function loadFiles( @@ -423,7 +421,7 @@ export async function loadFiles(
}
const data = await apiRequest<Array<{ name: string; path: string; type: 'file' | 'directory'; size?: number }>>(
`/api/repos/${state.npub}/${state.repo}/tree?ref=${encodeURIComponent(branchName)}&path=${encodeURIComponent(path)}`
`/api/repos/${state.npub}/${state.repo}/files?action=tree&ref=${encodeURIComponent(branchName)}&path=${encodeURIComponent(path)}`
);
state.files.list = data;
@ -555,7 +553,7 @@ export async function loadFile( @@ -555,7 +553,7 @@ export async function loadFile(
if (state.preview.file.isImage) {
// For image files, construct the raw file URL and skip loading text content
state.preview.file.imageUrl = `/api/repos/${state.npub}/${state.repo}/raw?path=${encodeURIComponent(filePath)}&ref=${encodeURIComponent(branchName)}`;
state.preview.file.imageUrl = `/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(filePath)}&format=raw&ref=${encodeURIComponent(branchName)}`;
state.files.content = ''; // Clear content for images
state.files.editedContent = ''; // Clear edited content for images
state.preview.file.html = ''; // Clear HTML for images
@ -568,7 +566,7 @@ export async function loadFile( @@ -568,7 +566,7 @@ export async function loadFile(
state.preview.file.imageUrl = null;
const data = await apiRequest<{ content: string }>(
`/api/repos/${state.npub}/${state.repo}/file?path=${encodeURIComponent(filePath)}&ref=${encodeURIComponent(branchName)}`
`/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(filePath)}&ref=${encodeURIComponent(branchName)}`
);
state.files.content = data.content;
@ -712,10 +710,9 @@ export async function autoSaveFile( @@ -712,10 +710,9 @@ export async function autoSaveFile(
}
}
await apiPost(`/api/repos/${state.npub}/${state.repo}/file`, {
path: state.files.currentFile,
await apiPost(`/api/repos/${state.npub}/${state.repo}/files?path=${encodeURIComponent(state.files.currentFile)}`, {
content: state.files.editedContent,
message: autoCommitMessage,
commitMessage: autoCommitMessage,
authorName: authorName,
authorEmail: authorEmail,
branch: state.git.currentBranch,

2
src/routes/repos/[npub]/[repo]/services/pr-operations.ts

@ -34,7 +34,7 @@ export async function loadPRs( @@ -34,7 +34,7 @@ export async function loadPRs(
created_at: number;
commitId?: string;
kind?: number;
}>>(`/api/repos/${state.npub}/${state.repo}/prs`);
}>>(`/api/repos/${state.npub}/${state.repo}/pull-requests`);
state.prs = data.map((pr) => ({
id: pr.id,

12
src/routes/repos/[npub]/[repo]/services/repo-operations.ts

@ -263,7 +263,7 @@ export async function forkRepository( @@ -263,7 +263,7 @@ export async function forkRepository(
error?: string;
details?: string;
eventName?: string;
}>(`/api/repos/${state.npub}/${state.repo}/fork`, {
}>(`/api/repos/${state.npub}/${state.repo}/forks`, {
userPubkey: state.user.pubkey,
localOnly
});
@ -463,7 +463,7 @@ export async function checkVerification( @@ -463,7 +463,7 @@ export async function checkVerification(
error?: string;
message?: string;
cloneVerifications?: Array<{ url: string; verified: boolean; ownerPubkey: string | null; error?: string }>;
}>(`/api/repos/${state.npub}/${state.repo}/verify`);
}>(`/api/repos/${state.npub}/${state.repo}/verification`);
console.log('[Verification] Response:', data);
state.verification.status = {
@ -576,7 +576,7 @@ export async function loadForkInfo( @@ -576,7 +576,7 @@ export async function loadForkInfo(
npub: string;
repo: string;
};
}>(`/api/repos/${state.npub}/${state.repo}/fork`);
}>(`/api/repos/${state.npub}/${state.repo}/forks`);
if (data.isFork && data.originalRepo) {
state.fork.info = {
@ -787,8 +787,8 @@ export async function saveAnnouncementToRepo( @@ -787,8 +787,8 @@ export async function saveAnnouncementToRepo(
state.creating.announcement = true;
state.error = null;
// Use the existing verify endpoint which saves and commits the announcement
const data = await apiRequest<{ message?: string; announcementId?: string }>(`/api/repos/${state.npub}/${state.repo}/verify`, {
// Use the existing verification endpoint which saves and commits the announcement
const data = await apiRequest<{ message?: string; announcementId?: string }>(`/api/repos/${state.npub}/${state.repo}/verification`, {
method: 'POST'
} as RequestInit);
@ -845,7 +845,7 @@ export async function verifyCloneUrl( @@ -845,7 +845,7 @@ export async function verifyCloneUrl(
state.error = null;
try {
const data = await apiRequest<{ message?: string }>(`/api/repos/${state.npub}/${state.repo}/verify`, {
const data = await apiRequest<{ message?: string }>(`/api/repos/${state.npub}/${state.repo}/verification`, {
method: 'POST'
} as RequestInit);

2
src/routes/repos/[npub]/[repo]/utils/download.ts

@ -71,7 +71,7 @@ export async function downloadRepository(options: DownloadOptions): Promise<void @@ -71,7 +71,7 @@ export async function downloadRepository(options: DownloadOptions): Promise<void
params.set('ref', ref);
}
params.set('format', 'zip');
const downloadUrl = `/api/repos/${npub}/${repo}/download?${params.toString()}`;
const downloadUrl = `/api/repos/${npub}/${repo}/archive?${params.toString()}`;
logger.info({ url: downloadUrl, ref }, '[Download] Starting download');

2
src/routes/repos/[npub]/[repo]/utils/file-processing.ts

@ -181,7 +181,7 @@ export function rewriteImagePaths( @@ -181,7 +181,7 @@ export function rewriteImagePaths(
// Build API URL if npub, repo, and branch are provided
if (npub && repo) {
const ref = branch || 'HEAD';
const apiUrl = `/api/repos/${npub}/${repo}/raw?path=${encodeURIComponent(imagePath)}&ref=${encodeURIComponent(ref)}`;
const apiUrl = `/api/repos/${npub}/${repo}/files?path=${encodeURIComponent(imagePath)}&format=raw&ref=${encodeURIComponent(ref)}`;
const before = beforeAttrs ? beforeAttrs.trim() : '';
return `<img${before ? ' ' + before : ''} src="${apiUrl}"${afterAttrs}>`;
}

Loading…
Cancel
Save