Browse Source

bug-fixes

Nostr-Signature: e4db3dfc316a2ae2edebbe25fef26a917710b9066df4ee168f83f3342359cdb5 573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc a3f3ab6208ef1438778ffb399fdb40e0d8317812987e1360f896b106ab2a948b2d9878d23033c8d3c2a4fe70c5f2fa215ed81962162d94c1ec79767d5a4ebca5
main
Silberengel 2 weeks ago
parent
commit
f93c409355
  1. 1
      nostr/commit-signatures.jsonl
  2. 251
      package-lock.json
  3. 37
      src/app.css
  4. 154
      src/lib/components/CodeEditor.svelte
  5. 38
      src/lib/services/git/repo-manager.ts
  6. 15
      src/lib/services/nostr/maintainer-service.ts
  7. 1
      src/lib/styles/repo.css
  8. 48
      src/lib/utils/input-validation.ts
  9. 138
      src/routes/api/repos/[npub]/[repo]/clone/+server.ts
  10. 23
      src/routes/api/repos/[npub]/[repo]/delete/+server.ts
  11. 466
      src/routes/api/repos/[npub]/[repo]/forks/+server.ts
  12. 175
      src/routes/api/repos/list/+server.ts
  13. 164
      src/routes/api/repos/local/+server.ts
  14. 39
      src/routes/repos/+page.svelte
  15. 27
      src/routes/repos/[npub]/[repo]/+page.svelte
  16. 49
      src/routes/repos/[npub]/[repo]/+page.ts
  17. 4
      src/routes/repos/[npub]/[repo]/components/TabLayout.svelte
  18. 11
      src/routes/repos/[npub]/[repo]/hooks/use-repo-effects.ts
  19. 109
      src/routes/repos/[npub]/[repo]/read-announcement-from-fs.ts
  20. 67
      src/routes/repos/[npub]/[repo]/services/repo-operations.ts
  21. 192
      src/routes/signup/+page.svelte
  22. 2
      vite.config.ts

1
nostr/commit-signatures.jsonl

@ -130,3 +130,4 @@ @@ -130,3 +130,4 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772302842,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"5c4b680a04363718d8de6aa05b824d30417221a9095be57bb9a7c2cf01c5af59","sig":"51ffa554e83a6a3c4ca97cffc7eca67e770ca822e43e9e78692bafcd63401c4df84e1fe030592e63982b509d3cfa8bfbd57c6b4257661b0f43adedef335c7575"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772303976,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fix"]],"content":"Signed commit: bug-fix","id":"a8e7a4f38f815abaa8cc807e43da842cc4715ff41e722ee6657cae57915b753e","sig":"9c427e839796099f8fdfc0dc4a6f4500ecb1835dbf62cf0b1d3dbe8f98c98a1bc7b28266cbc7a993a18f905ec6e57c610e10492c88e7f863319cefb2eab58fdc"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772305338,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","allow forks"]],"content":"Signed commit: allow forks","id":"47f1aa9a47f4488a9babf752466bb2e4cb7974bd67aa827a4b70c57bac839750","sig":"3652f31ee120f894f7dbb04bb2e625dc2f97758b9fe99fa24aa0efe23872851aceb6f460d2c479bbfc9aa495b5c6d993b2ec7d65e21c90ee8ca6a9f23bfac498"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772305971,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","correct search page"]],"content":"Signed commit: correct search page","id":"2a93ec13a9ae177dfd3f4b59cfc7341e9a3a073367b43976ea161802efc76c44","sig":"7821315a6b3b5761c8938fd9b247db0f3344336fbf706b1fb5921ee5645b0f77298cb41a96cd5d1afa8e05c25eb7d64d69d42de585cdd016ad1425ca5b1f4772"}

251
package-lock.json generated

@ -1358,9 +1358,9 @@ @@ -1358,9 +1358,9 @@
}
},
"node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz",
"integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz",
"integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==",
"cpu": [
"arm"
],
@ -1371,9 +1371,9 @@ @@ -1371,9 +1371,9 @@
]
},
"node_modules/@rollup/rollup-android-arm64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz",
"integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz",
"integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==",
"cpu": [
"arm64"
],
@ -1384,9 +1384,9 @@ @@ -1384,9 +1384,9 @@
]
},
"node_modules/@rollup/rollup-darwin-arm64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz",
"integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz",
"integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==",
"cpu": [
"arm64"
],
@ -1397,9 +1397,9 @@ @@ -1397,9 +1397,9 @@
]
},
"node_modules/@rollup/rollup-darwin-x64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz",
"integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz",
"integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==",
"cpu": [
"x64"
],
@ -1410,9 +1410,9 @@ @@ -1410,9 +1410,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-arm64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz",
"integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz",
"integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==",
"cpu": [
"arm64"
],
@ -1423,9 +1423,9 @@ @@ -1423,9 +1423,9 @@
]
},
"node_modules/@rollup/rollup-freebsd-x64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz",
"integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz",
"integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==",
"cpu": [
"x64"
],
@ -1436,9 +1436,9 @@ @@ -1436,9 +1436,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz",
"integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz",
"integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==",
"cpu": [
"arm"
],
@ -1449,9 +1449,9 @@ @@ -1449,9 +1449,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz",
"integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz",
"integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==",
"cpu": [
"arm"
],
@ -1462,9 +1462,9 @@ @@ -1462,9 +1462,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz",
"integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz",
"integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==",
"cpu": [
"arm64"
],
@ -1475,9 +1475,9 @@ @@ -1475,9 +1475,9 @@
]
},
"node_modules/@rollup/rollup-linux-arm64-musl": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz",
"integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz",
"integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==",
"cpu": [
"arm64"
],
@ -1488,9 +1488,9 @@ @@ -1488,9 +1488,9 @@
]
},
"node_modules/@rollup/rollup-linux-loong64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz",
"integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz",
"integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==",
"cpu": [
"loong64"
],
@ -1501,9 +1501,9 @@ @@ -1501,9 +1501,9 @@
]
},
"node_modules/@rollup/rollup-linux-loong64-musl": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz",
"integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz",
"integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==",
"cpu": [
"loong64"
],
@ -1514,9 +1514,9 @@ @@ -1514,9 +1514,9 @@
]
},
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz",
"integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz",
"integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==",
"cpu": [
"ppc64"
],
@ -1527,9 +1527,9 @@ @@ -1527,9 +1527,9 @@
]
},
"node_modules/@rollup/rollup-linux-ppc64-musl": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz",
"integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz",
"integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==",
"cpu": [
"ppc64"
],
@ -1540,9 +1540,9 @@ @@ -1540,9 +1540,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz",
"integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz",
"integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==",
"cpu": [
"riscv64"
],
@ -1553,9 +1553,9 @@ @@ -1553,9 +1553,9 @@
]
},
"node_modules/@rollup/rollup-linux-riscv64-musl": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz",
"integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz",
"integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==",
"cpu": [
"riscv64"
],
@ -1566,9 +1566,9 @@ @@ -1566,9 +1566,9 @@
]
},
"node_modules/@rollup/rollup-linux-s390x-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz",
"integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz",
"integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==",
"cpu": [
"s390x"
],
@ -1579,9 +1579,9 @@ @@ -1579,9 +1579,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz",
"integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz",
"integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==",
"cpu": [
"x64"
],
@ -1592,9 +1592,9 @@ @@ -1592,9 +1592,9 @@
]
},
"node_modules/@rollup/rollup-linux-x64-musl": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz",
"integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz",
"integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==",
"cpu": [
"x64"
],
@ -1605,9 +1605,9 @@ @@ -1605,9 +1605,9 @@
]
},
"node_modules/@rollup/rollup-openbsd-x64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz",
"integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz",
"integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==",
"cpu": [
"x64"
],
@ -1618,9 +1618,9 @@ @@ -1618,9 +1618,9 @@
]
},
"node_modules/@rollup/rollup-openharmony-arm64": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz",
"integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz",
"integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==",
"cpu": [
"arm64"
],
@ -1631,9 +1631,9 @@ @@ -1631,9 +1631,9 @@
]
},
"node_modules/@rollup/rollup-win32-arm64-msvc": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz",
"integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz",
"integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==",
"cpu": [
"arm64"
],
@ -1644,9 +1644,9 @@ @@ -1644,9 +1644,9 @@
]
},
"node_modules/@rollup/rollup-win32-ia32-msvc": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz",
"integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz",
"integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==",
"cpu": [
"ia32"
],
@ -1657,9 +1657,9 @@ @@ -1657,9 +1657,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-gnu": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz",
"integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz",
"integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==",
"cpu": [
"x64"
],
@ -1670,9 +1670,9 @@ @@ -1670,9 +1670,9 @@
]
},
"node_modules/@rollup/rollup-win32-x64-msvc": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz",
"integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz",
"integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==",
"cpu": [
"x64"
],
@ -1750,9 +1750,9 @@ @@ -1750,9 +1750,9 @@
}
},
"node_modules/@sveltejs/kit": {
"version": "2.52.0",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.52.0.tgz",
"integrity": "sha512-zG+HmJuSF7eC0e7xt2htlOcEMAdEtlVdb7+gAr+ef08EhtwUsjLxcAwBgUCJY3/5p08OVOxVZti91WfXeuLvsg==",
"version": "2.53.4",
"resolved": "https://registry.npmjs.org/@sveltejs/kit/-/kit-2.53.4.tgz",
"integrity": "sha512-iAIPEahFgDJJyvz8g0jP08KvqnM6JvdW8YfsygZ+pMeMvyM2zssWMltcsotETvjSZ82G3VlitgDtBIvpQSZrTA==",
"license": "MIT",
"dependencies": {
"@standard-schema/spec": "^1.0.0",
@ -1760,12 +1760,11 @@ @@ -1760,12 +1760,11 @@
"@types/cookie": "^0.6.0",
"acorn": "^8.14.1",
"cookie": "^0.6.0",
"devalue": "^5.6.2",
"devalue": "^5.6.3",
"esm-env": "^1.2.2",
"kleur": "^4.1.5",
"magic-string": "^0.30.5",
"mrmime": "^2.0.0",
"sade": "^1.8.1",
"set-cookie-parser": "^3.0.0",
"sirv": "^3.0.0"
},
@ -1777,10 +1776,10 @@ @@ -1777,10 +1776,10 @@
},
"peerDependencies": {
"@opentelemetry/api": "^1.0.0",
"@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0",
"@sveltejs/vite-plugin-svelte": "^3.0.0 || ^4.0.0-next.1 || ^5.0.0 || ^6.0.0-next.0 || ^7.0.0",
"svelte": "^4.0.0 || ^5.0.0-next.0",
"typescript": "^5.3.3",
"vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0"
"vite": "^5.0.3 || ^6.0.0 || ^7.0.0-beta.0 || ^8.0.0"
},
"peerDependenciesMeta": {
"@opentelemetry/api": {
@ -2219,9 +2218,9 @@ @@ -2219,9 +2218,9 @@
"license": "Python-2.0"
},
"node_modules/aria-query": {
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.2.tgz",
"integrity": "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw==",
"version": "5.3.1",
"resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.1.tgz",
"integrity": "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g==",
"license": "Apache-2.0",
"engines": {
"node": ">= 0.4"
@ -2652,9 +2651,9 @@ @@ -2652,9 +2651,9 @@
}
},
"node_modules/devalue": {
"version": "5.6.2",
"resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.2.tgz",
"integrity": "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg==",
"version": "5.6.3",
"resolved": "https://registry.npmjs.org/devalue/-/devalue-5.6.3.tgz",
"integrity": "sha512-nc7XjUU/2Lb+SvEFVGcWLiKkzfw8+qHI7zn8WYXKkLMgfGSHbgCEaR6bJpev8Cm6Rmrb19Gfd/tZvGqx9is3wg==",
"license": "MIT"
},
"node_modules/dir-glob": {
@ -3903,15 +3902,15 @@ @@ -3903,15 +3902,15 @@
}
},
"node_modules/minimatch": {
"version": "10.2.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.1.tgz",
"integrity": "sha512-MClCe8IL5nRRmawL6ib/eT4oLyeKMGCghibcDWK+J0hh0Q8kqSdia6BvbRMVk6mPa6WqUa5uR2oxt6C5jd533A==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"license": "BlueOak-1.0.0",
"dependencies": {
"brace-expansion": "^5.0.2"
},
"engines": {
"node": "20 || >=22"
"node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
@ -3943,6 +3942,7 @@ @@ -3943,6 +3942,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
"integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=4"
@ -4657,9 +4657,9 @@ @@ -4657,9 +4657,9 @@
}
},
"node_modules/rollup": {
"version": "4.57.1",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz",
"integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==",
"version": "4.59.0",
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz",
"integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
"license": "MIT",
"dependencies": {
"@types/estree": "1.0.8"
@ -4672,31 +4672,31 @@ @@ -4672,31 +4672,31 @@
"npm": ">=8.0.0"
},
"optionalDependencies": {
"@rollup/rollup-android-arm-eabi": "4.57.1",
"@rollup/rollup-android-arm64": "4.57.1",
"@rollup/rollup-darwin-arm64": "4.57.1",
"@rollup/rollup-darwin-x64": "4.57.1",
"@rollup/rollup-freebsd-arm64": "4.57.1",
"@rollup/rollup-freebsd-x64": "4.57.1",
"@rollup/rollup-linux-arm-gnueabihf": "4.57.1",
"@rollup/rollup-linux-arm-musleabihf": "4.57.1",
"@rollup/rollup-linux-arm64-gnu": "4.57.1",
"@rollup/rollup-linux-arm64-musl": "4.57.1",
"@rollup/rollup-linux-loong64-gnu": "4.57.1",
"@rollup/rollup-linux-loong64-musl": "4.57.1",
"@rollup/rollup-linux-ppc64-gnu": "4.57.1",
"@rollup/rollup-linux-ppc64-musl": "4.57.1",
"@rollup/rollup-linux-riscv64-gnu": "4.57.1",
"@rollup/rollup-linux-riscv64-musl": "4.57.1",
"@rollup/rollup-linux-s390x-gnu": "4.57.1",
"@rollup/rollup-linux-x64-gnu": "4.57.1",
"@rollup/rollup-linux-x64-musl": "4.57.1",
"@rollup/rollup-openbsd-x64": "4.57.1",
"@rollup/rollup-openharmony-arm64": "4.57.1",
"@rollup/rollup-win32-arm64-msvc": "4.57.1",
"@rollup/rollup-win32-ia32-msvc": "4.57.1",
"@rollup/rollup-win32-x64-gnu": "4.57.1",
"@rollup/rollup-win32-x64-msvc": "4.57.1",
"@rollup/rollup-android-arm-eabi": "4.59.0",
"@rollup/rollup-android-arm64": "4.59.0",
"@rollup/rollup-darwin-arm64": "4.59.0",
"@rollup/rollup-darwin-x64": "4.59.0",
"@rollup/rollup-freebsd-arm64": "4.59.0",
"@rollup/rollup-freebsd-x64": "4.59.0",
"@rollup/rollup-linux-arm-gnueabihf": "4.59.0",
"@rollup/rollup-linux-arm-musleabihf": "4.59.0",
"@rollup/rollup-linux-arm64-gnu": "4.59.0",
"@rollup/rollup-linux-arm64-musl": "4.59.0",
"@rollup/rollup-linux-loong64-gnu": "4.59.0",
"@rollup/rollup-linux-loong64-musl": "4.59.0",
"@rollup/rollup-linux-ppc64-gnu": "4.59.0",
"@rollup/rollup-linux-ppc64-musl": "4.59.0",
"@rollup/rollup-linux-riscv64-gnu": "4.59.0",
"@rollup/rollup-linux-riscv64-musl": "4.59.0",
"@rollup/rollup-linux-s390x-gnu": "4.59.0",
"@rollup/rollup-linux-x64-gnu": "4.59.0",
"@rollup/rollup-linux-x64-musl": "4.59.0",
"@rollup/rollup-openbsd-x64": "4.59.0",
"@rollup/rollup-openharmony-arm64": "4.59.0",
"@rollup/rollup-win32-arm64-msvc": "4.59.0",
"@rollup/rollup-win32-ia32-msvc": "4.59.0",
"@rollup/rollup-win32-x64-gnu": "4.59.0",
"@rollup/rollup-win32-x64-msvc": "4.59.0",
"fsevents": "~2.3.2"
}
},
@ -4728,6 +4728,7 @@ @@ -4728,6 +4728,7 @@
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz",
"integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==",
"dev": true,
"license": "MIT",
"dependencies": {
"mri": "^1.1.0"
@ -5029,9 +5030,9 @@ @@ -5029,9 +5030,9 @@
}
},
"node_modules/svelte": {
"version": "5.51.2",
"resolved": "https://registry.npmjs.org/svelte/-/svelte-5.51.2.tgz",
"integrity": "sha512-AqApqNOxVS97V4Ko9UHTHeSuDJrwauJhZpLDs1gYD8Jk48ntCSWD7NxKje+fnGn5Ja1O3u2FzQZHPdifQjXe3w==",
"version": "5.53.6",
"resolved": "https://registry.npmjs.org/svelte/-/svelte-5.53.6.tgz",
"integrity": "sha512-lP5DGF3oDDI9fhHcSpaBiJEkFLuS16h92DhM1L5K1lFm0WjOmUh1i2sNkBBk8rkxJRpob0dBE75jRfUzGZUOGA==",
"license": "MIT",
"dependencies": {
"@jridgewell/remapping": "^2.3.4",
@ -5040,10 +5041,10 @@ @@ -5040,10 +5041,10 @@
"@types/estree": "^1.0.5",
"@types/trusted-types": "^2.0.7",
"acorn": "^8.12.1",
"aria-query": "^5.3.1",
"aria-query": "5.3.1",
"axobject-query": "^4.1.0",
"clsx": "^2.1.1",
"devalue": "^5.6.2",
"devalue": "^5.6.3",
"esm-env": "^1.2.1",
"esrap": "^2.2.2",
"is-reference": "^3.0.3",

37
src/app.css

@ -2118,3 +2118,40 @@ html[data-theme="light"] .swagger-ui .responses-wrapper pre.microlight { @@ -2118,3 +2118,40 @@ html[data-theme="light"] .swagger-ui .responses-wrapper pre.microlight {
background: #0f172a !important; /* Darker slate background for light mode (slate-900) */
color: #e2e8f0 !important; /* Light text for dark background */
}
/* Fix scrolling issue on signup page - ensure page can scroll to bottom */
/* Override any height constraints that might prevent scrolling */
/* Apply to all screen sizes, not just mobile */
html {
overflow-y: auto !important;
height: auto !important;
min-height: 100%;
}
body {
overflow-y: auto !important;
height: auto !important;
min-height: 100vh;
position: relative;
}
/* Ensure container allows full scrolling on all pages */
.container,
.container-narrow,
.container-wide {
min-height: auto !important;
height: auto !important;
max-height: none !important;
overflow: visible !important;
padding-bottom: 3rem; /* Extra padding at bottom to ensure content is accessible */
}
.container main,
.container-narrow main,
.container-wide main {
min-height: auto !important;
height: auto !important;
max-height: none !important;
overflow: visible !important;
padding-bottom: 2rem;
}

154
src/lib/components/CodeEditor.svelte

@ -102,10 +102,22 @@ @@ -102,10 +102,22 @@
switch (language) {
case 'markdown':
// markdown() already includes syntax highlighting - don't add defaultHighlightStyle
return [markdown()];
// Wrap in try-catch to handle parser errors gracefully
try {
return [markdown()];
} catch (err) {
console.warn('Error initializing markdown parser, falling back to plain text:', err);
// Fall back to plain text if markdown parser fails
return [];
}
case 'asciidoc':
// StreamLanguage includes its own highlighting - don't use defaultHighlightStyle with it
return [StreamLanguage.define(asciidoc)];
try {
return [StreamLanguage.define(asciidoc)];
} catch (err) {
console.warn('Error initializing asciidoc parser, falling back to plain text:', err);
return [];
}
default:
// Plain text - no syntax highlighting needed
return [];
@ -156,15 +168,95 @@ @@ -156,15 +168,95 @@
}
onMount(() => {
const state = EditorState.create({
doc: content,
extensions: createExtensions()
});
// Ensure content is always a string
const safeContent = typeof content === 'string' ? content : '';
// Create extensions without language first to avoid parser errors during initialization
const baseExtensions: Extension[] = [
history(),
closeBrackets(),
autocompletion(),
highlightSelectionMatches(),
highlightField,
EditorView.lineWrapping,
keymap.of([
...closeBracketsKeymap,
...defaultKeymap,
...searchKeymap,
...historyKeymap,
...completionKeymap
]),
editableCompartment.of(EditorView.editable.of(!readOnly)),
EditorView.updateListener.of((update) => {
if (update.docChanged) {
const newContent = update.state.doc.toString();
onChange(newContent);
}
if (update.selectionSet) {
const selection = update.state.selection.main;
if (!selection.empty) {
const selectedText = update.state.doc.sliceString(selection.from, selection.to);
const startLine = update.state.doc.lineAt(selection.from);
const endLine = update.state.doc.lineAt(selection.to);
onSelection(
selectedText,
startLine.number,
endLine.number,
selection.from,
selection.to
);
}
}
})
];
try {
// Initialize with base extensions first (no language parser)
const state = EditorState.create({
doc: safeContent,
extensions: baseExtensions
});
editorView = new EditorView({
state,
parent: editorElement
});
editorView = new EditorView({
state,
parent: editorElement
});
// Now try to add language extension after editor is created
// This way if the parser fails, the editor still works
try {
const langExtensions = getLanguageExtension();
if (langExtensions.length > 0) {
editorView.dispatch({
effects: languageCompartment.reconfigure(langExtensions)
});
}
} catch (langErr) {
console.warn('Error adding language extension, using plain text:', langErr);
// Editor still works without syntax highlighting
}
} catch (err) {
console.error('Error initializing CodeMirror editor:', err);
// Try to initialize with minimal extensions if everything fails
try {
const state = EditorState.create({
doc: safeContent,
extensions: [
EditorView.lineWrapping,
EditorView.editable.of(!readOnly)
]
});
editorView = new EditorView({
state,
parent: editorElement
});
} catch (fallbackErr) {
console.error('Error initializing CodeMirror editor (fallback):', fallbackErr);
}
}
return () => {
editorView?.destroy();
@ -179,15 +271,21 @@ @@ -179,15 +271,21 @@
$effect(() => {
if (!editorView) return;
const currentContent = editorView.state.doc.toString();
if (content !== currentContent) {
editorView.dispatch({
changes: {
from: 0,
to: editorView.state.doc.length,
insert: content
}
});
try {
const currentContent = editorView.state.doc.toString();
const safeContent = typeof content === 'string' ? content : '';
if (safeContent !== currentContent) {
editorView.dispatch({
changes: {
from: 0,
to: editorView.state.doc.length,
insert: safeContent
}
});
}
} catch (err) {
console.error('Error updating editor content:', err);
}
});
@ -195,9 +293,21 @@ @@ -195,9 +293,21 @@
$effect(() => {
if (!editorView) return;
editorView.dispatch({
effects: languageCompartment.reconfigure(getLanguageExtension())
});
try {
editorView.dispatch({
effects: languageCompartment.reconfigure(getLanguageExtension())
});
} catch (err) {
console.error('Error updating language extension:', err);
// Fall back to plain text if language extension fails
try {
editorView.dispatch({
effects: languageCompartment.reconfigure([])
});
} catch (fallbackErr) {
console.error('Error falling back to plain text:', fallbackErr);
}
}
});
// Update editable state when readOnly prop changes

38
src/lib/services/git/repo-manager.ts

@ -578,28 +578,40 @@ Your commits will all be signed by your Nostr keys and saved to the event files @@ -578,28 +578,40 @@ Your commits will all be signed by your Nostr keys and saved to the event files
const hasCommits = !isNaN(commitCount) && commitCount > 0;
if (hasCommits) {
// Repo has commits, check if it has an announcement
// If an announcement was explicitly provided (e.g., private fork), always save it
// even if the repo already has an announcement (the provided one might be different/updated)
if (announcementEvent) {
try {
await this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementEvent);
logger.info({ repoPath, eventId: announcementEvent.id }, 'Saved provided announcement to existing repo');
return { success: true, announcement: announcementEvent };
} catch (err) {
logger.error({ error: err, repoPath, eventId: announcementEvent?.id },
'Failed to save provided announcement to repo');
// Don't fail the request - repo exists and can be accessed
return { success: true, announcement: announcementEvent };
}
}
// No announcement provided - check if repo has an announcement
const hasAnnouncement = await this.announcementManager.hasAnnouncementInRepoFile(repoPath);
if (hasAnnouncement) {
return { success: true };
}
// Repo has commits but no announcement - use provided announcement or try to fetch from relays
let announcementToUse: NostrEvent | null | undefined = announcementEvent;
if (!announcementToUse) {
const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js');
const repoOwnerPubkey = requireNpubHexUtil(npub);
announcementToUse = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName);
}
// Repo has commits but no announcement - try to fetch from relays
const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js');
const repoOwnerPubkey = requireNpubHexUtil(npub);
const fetchedAnnouncement = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName);
if (announcementToUse) {
// Save announcement to repo asynchronously (non-blocking)
this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementToUse)
if (fetchedAnnouncement) {
// Fetched from relays - save asynchronously (non-blocking)
this.announcementManager.ensureAnnouncementInRepo(repoPath, fetchedAnnouncement)
.catch((err) => {
logger.warn({ error: err, repoPath, eventId: announcementToUse?.id },
logger.warn({ error: err, repoPath, eventId: fetchedAnnouncement?.id },
'Failed to save announcement to repo (non-blocking, announcement available from relays)');
});
return { success: true, announcement: announcementToUse };
return { success: true, announcement: fetchedAnnouncement };
}
// Repo has commits but no announcement found - needs announcement

15
src/lib/services/nostr/maintainer-service.ts

@ -102,8 +102,19 @@ export class MaintainerService { @@ -102,8 +102,19 @@ export class MaintainerService {
// Ownership is determined by what's checked into the git repository, not Nostr events
const { nip19 } = await import('nostr-tools');
const npub = nip19.npubEncode(announcement.pubkey);
const { fileManager } = await import('../../services/service-registry.js');
const currentOwner = await fileManager.getCurrentOwnerFromRepo(npub, repoId) || announcement.pubkey;
let currentOwner = announcement.pubkey; // Default to announcement pubkey
try {
const { fileManager } = await import('../../services/service-registry.js');
const ownerFromRepo = await fileManager.getCurrentOwnerFromRepo(npub, repoId);
if (ownerFromRepo) {
currentOwner = ownerFromRepo;
}
} catch (err) {
// If repo doesn't exist or can't read owner, use announcement pubkey
// This is expected for repos that haven't been cloned yet
const logger = await getLogger();
logger.debug({ error: err, npub, repoId }, 'Could not get current owner from repo, using announcement pubkey');
}
const maintainers: string[] = [currentOwner]; // Current owner is always a maintainer
const contributors: string[] = []; // Contributors can view but not modify

1
src/lib/styles/repo.css

@ -178,6 +178,7 @@ @@ -178,6 +178,7 @@
display: flex;
flex-direction: column;
overflow: hidden;
min-height: 0; /* Allow flex shrinking */
}
.repo-layout {

48
src/lib/utils/input-validation.ts

@ -3,6 +3,54 @@ @@ -3,6 +3,54 @@
* Prevents injection attacks, path traversal, and other security issues
*/
/**
* Sanitize repository name for filesystem use
* Converts to lowercase, removes illegal characters, and normalizes
*/
export function sanitizeRepoNameForFilesystem(name: string): string {
if (!name || typeof name !== 'string') {
return '';
}
// Remove leading/trailing whitespace
let sanitized = name.trim();
// Convert to lowercase for filesystem compatibility
sanitized = sanitized.toLowerCase();
// Replace spaces and other illegal characters with hyphens
sanitized = sanitized.replace(/[\s_]+/g, '-');
// Remove any characters that aren't alphanumeric, hyphens, or dots
sanitized = sanitized.replace(/[^a-z0-9.-]/g, '');
// Remove consecutive dots and hyphens
sanitized = sanitized.replace(/\.{2,}/g, '.');
sanitized = sanitized.replace(/-{2,}/g, '-');
// Remove leading/trailing dots and hyphens
sanitized = sanitized.replace(/^[.-]+|[.-]+$/g, '');
// Ensure it doesn't start with a number (git convention)
if (/^[0-9]/.test(sanitized)) {
sanitized = 'repo-' + sanitized;
}
// Ensure minimum length
if (sanitized.length === 0) {
sanitized = 'repository';
}
// Truncate to max length
if (sanitized.length > 100) {
sanitized = sanitized.substring(0, 100);
// Remove trailing dots/hyphens after truncation
sanitized = sanitized.replace(/[.-]+$/, '');
}
return sanitized;
}
/**
* Validate and sanitize repository name
* Repository names should be alphanumeric with hyphens and underscores

138
src/routes/api/repos/[npub]/[repo]/clone/+server.ts

@ -51,8 +51,9 @@ export const POST: RequestHandler = async (event) => { @@ -51,8 +51,9 @@ export const POST: RequestHandler = async (event) => {
hasUnlimitedAccess: userLevel ? hasUnlimitedAccess(userLevel.level) : false
}, 'Checking user access level for clone operation');
// Extract defaultBranch from request body if present (before body is consumed)
// Extract defaultBranch and announcementEvent from request body if present (before body is consumed)
let preferredDefaultBranch: string | undefined;
let providedAnnouncementEvent: NostrEvent | undefined;
const contentType = event.request.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
try {
@ -66,8 +67,13 @@ export const POST: RequestHandler = async (event) => { @@ -66,8 +67,13 @@ export const POST: RequestHandler = async (event) => {
preferredDefaultBranch = body.defaultBranch;
logger.debug({ preferredDefaultBranch }, 'Extracted defaultBranch from request body');
}
// Allow passing announcement event directly (useful for private repos not on relays)
if (body.announcementEvent && typeof body.announcementEvent === 'object') {
providedAnnouncementEvent = body.announcementEvent as NostrEvent;
logger.debug({ eventId: providedAnnouncementEvent.id }, 'Extracted announcementEvent from request body');
}
} catch {
// Not valid JSON or missing defaultBranch - continue
// Not valid JSON or missing fields - continue
}
}
} catch {
@ -214,8 +220,13 @@ export const POST: RequestHandler = async (event) => { @@ -214,8 +220,13 @@ export const POST: RequestHandler = async (event) => {
isEmpty = true;
}
// If repo is empty, we should still try to commit the announcement
if (!isEmpty) {
// If repo is not empty and we have a provided announcement event (e.g., private fork),
// we should still save the announcement to the repo
if (!isEmpty && providedAnnouncementEvent) {
logger.info({ npub, repo }, 'Repository exists but announcement provided - will save announcement to existing repo');
// Continue to save the announcement (don't return early)
} else if (!isEmpty && !providedAnnouncementEvent) {
// Repo exists and is not empty, and no announcement provided - return early
return json({
success: true,
message: 'Repository already exists locally',
@ -223,63 +234,80 @@ export const POST: RequestHandler = async (event) => { @@ -223,63 +234,80 @@ export const POST: RequestHandler = async (event) => {
});
}
// If empty, continue to fetch announcement and commit it
logger.info({ npub, repo }, 'Repository exists but is empty, will commit announcement');
if (isEmpty) {
logger.info({ npub, repo }, 'Repository exists but is empty, will commit announcement');
}
}
// Fetch repository announcement (case-insensitive)
// Note: Nostr d-tag filters are case-sensitive, so we fetch all announcements by the author
// and filter case-insensitively in JavaScript
logger.debug({ npub, repo, repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...' }, 'Fetching repository announcement from Nostr (case-insensitive)');
// Use provided announcement event if available (e.g., for private repos not on relays)
// Otherwise, fetch from Nostr relays
let announcementEvent: NostrEvent | null = null;
let authorAnnouncements: NostrEvent[];
try {
authorAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
logger.debug({
npub,
repo,
authorAnnouncementCount: authorAnnouncements.length,
eventIds: authorAnnouncements.map(e => e.id)
}, 'Fetched repository announcements by author');
} catch (err) {
logger.error({
error: err,
npub,
repo,
repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...'
}, 'Error fetching repository announcement from Nostr');
throw handleApiError(
err instanceof Error ? err : new Error(String(err)),
{ operation: 'cloneRepo', npub, repo },
'Failed to fetch repository announcement from Nostr relays. Please check that the repository exists and the relays are accessible.'
);
if (providedAnnouncementEvent) {
// Validate the provided announcement event
const dTag = providedAnnouncementEvent.tags.find(t => t[0] === 'd')?.[1];
if (dTag && dTag.toLowerCase() === repo.toLowerCase() && providedAnnouncementEvent.pubkey === repoOwnerPubkey) {
announcementEvent = providedAnnouncementEvent;
logger.info({ npub, repo, eventId: announcementEvent.id }, 'Using provided announcement event (likely private repo)');
} else {
logger.warn({ npub, repo, dTag, pubkey: providedAnnouncementEvent.pubkey, expectedPubkey: repoOwnerPubkey }, 'Provided announcement event does not match repo, will fetch from relays');
}
}
// Find the matching repo announcement (case-insensitive)
const announcementEvent = findRepoAnnouncement(authorAnnouncements, repo);
// If no valid announcement provided, fetch from Nostr relays
if (!announcementEvent) {
const dTags = authorAnnouncements
.map(e => e.tags.find(t => t[0] === 'd')?.[1])
.filter(Boolean);
logger.warn({
npub,
repo,
repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...',
authorAnnouncementCount: authorAnnouncements.length,
authorRepos: dTags,
searchedRepo: repo
}, 'Repository announcement not found in Nostr (case-insensitive search)');
logger.debug({ npub, repo, repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...' }, 'Fetching repository announcement from Nostr (case-insensitive)');
const errorMessage = authorAnnouncements.length > 0
? `Repository announcement not found in Nostr for ${npub}/${repo}. Found ${authorAnnouncements.length} other repository announcement(s) by this author. Please verify the repository name is correct.`
: `Repository announcement not found in Nostr for ${npub}/${repo}. Please verify that the repository exists and has been announced on Nostr relays.`;
throw handleValidationError(
errorMessage,
{ operation: 'cloneRepo', npub, repo }
);
let authorAnnouncements: NostrEvent[];
try {
authorAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, repoOwnerPubkey, eventCache);
logger.debug({
npub,
repo,
authorAnnouncementCount: authorAnnouncements.length,
eventIds: authorAnnouncements.map(e => e.id)
}, 'Fetched repository announcements by author');
} catch (err) {
logger.error({
error: err,
npub,
repo,
repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...'
}, 'Error fetching repository announcement from Nostr');
throw handleApiError(
err instanceof Error ? err : new Error(String(err)),
{ operation: 'cloneRepo', npub, repo },
'Failed to fetch repository announcement from Nostr relays. Please check that the repository exists and the relays are accessible.'
);
}
// Find the matching repo announcement (case-insensitive)
announcementEvent = findRepoAnnouncement(authorAnnouncements, repo);
if (!announcementEvent) {
const dTags = authorAnnouncements
.map(e => e.tags.find(t => t[0] === 'd')?.[1])
.filter(Boolean);
logger.warn({
npub,
repo,
repoOwnerPubkey: repoOwnerPubkey.slice(0, 16) + '...',
authorAnnouncementCount: authorAnnouncements.length,
authorRepos: dTags,
searchedRepo: repo
}, 'Repository announcement not found in Nostr (case-insensitive search)');
const errorMessage = authorAnnouncements.length > 0
? `Repository announcement not found in Nostr for ${npub}/${repo}. Found ${authorAnnouncements.length} other repository announcement(s) by this author. Please verify the repository name is correct.`
: `Repository announcement not found in Nostr for ${npub}/${repo}. Please verify that the repository exists and has been announced on Nostr relays.`;
throw handleValidationError(
errorMessage,
{ operation: 'cloneRepo', npub, repo }
);
}
}
// Extract and log clone URLs for debugging

23
src/routes/api/repos/[npub]/[repo]/delete/+server.ts

@ -15,6 +15,9 @@ import { auditLogger } from '$lib/services/security/audit-logger.js'; @@ -15,6 +15,9 @@ import { auditLogger } from '$lib/services/security/audit-logger.js';
import logger from '$lib/services/logger.js';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { isAdmin } from '$lib/utils/admin-check.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { KIND } from '$lib/types/nostr.js';
import type { NostrFilter } from '$lib/types/nostr.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
@ -92,10 +95,28 @@ export const DELETE: RequestHandler = createRepoGetHandler( @@ -92,10 +95,28 @@ export const DELETE: RequestHandler = createRepoGetHandler(
}
try {
// Invalidate event cache for this repository announcement
// We invalidate by filter and pubkey to ensure all cache entries are cleared
// This prevents deleted repos from appearing in search results
// Invalidate by filter to catch any cache entries that might contain this repo
const repoFilter: NostrFilter = {
kinds: [KIND.REPO_ANNOUNCEMENT],
authors: [repoOwnerPubkey],
'#d': [repo]
};
eventCache.invalidate([repoFilter]);
logger.debug({ npub, repo, pubkey: repoOwnerPubkey.substring(0, 16) + '...' }, 'Invalidated event cache by filter');
// Also invalidate all events for this pubkey to be thorough
// (in case the repo name doesn't match exactly due to case sensitivity)
eventCache.invalidatePubkey(repoOwnerPubkey);
logger.debug({ npub, repo, pubkey: repoOwnerPubkey.substring(0, 16) + '...' }, 'Invalidated event cache for pubkey');
// Delete the repository directory
await rm(repoPath, { recursive: true, force: true });
// Clear cache
// Clear repo cache
repoCache.delete(RepoCache.repoExistsKey(npub, repo));
// Log successful deletion

466
src/routes/api/repos/[npub]/[repo]/forks/+server.ts

@ -19,16 +19,19 @@ import { nip19 } from 'nostr-tools'; @@ -19,16 +19,19 @@ import { nip19 } from 'nostr-tools';
import { signEventWithNIP07 } from '$lib/services/nostr/nip07-signer.js';
import { requireNpubHex, decodeNpubToHex } from '$lib/utils/npub-utils.js';
import { OwnershipTransferService } from '$lib/services/nostr/ownership-transfer-service.js';
import { existsSync } from 'fs';
import { existsSync, statSync } from 'fs';
import { rm } from 'fs/promises';
import { join, resolve } from 'path';
import simpleGit from 'simple-git';
import { validateRepoPath } from '$lib/utils/security.js';
import { sanitizeRepoNameForFilesystem } from '$lib/utils/input-validation.js';
import { ResourceLimits } from '$lib/services/security/resource-limits.js';
import { auditLogger } from '$lib/services/security/audit-logger.js';
import { ForkCountService } from '$lib/services/nostr/fork-count-service.js';
import { getCachedUserLevel } from '$lib/services/security/user-level-cache.js';
import { getCachedUserLevel, cacheUserLevel } from '$lib/services/security/user-level-cache.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import { verifyRelayWriteProof } from '$lib/services/nostr/relay-write-proof.js';
import { verifyEvent } from 'nostr-tools';
import logger from '$lib/services/logger.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
@ -140,7 +143,7 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -140,7 +143,7 @@ export const POST: RequestHandler = createRepoPostHandler(
async (context: RepoRequestContext, event: RequestEvent) => {
try {
const body = await event.request.json();
const { userPubkey, forkName, localOnly } = body;
const { userPubkey, forkName, localOnly, proofEvent, forkAnnouncementEvent, ownershipTransferEvent } = body;
if (!userPubkey) {
return error(401, 'Authentication required. Please provide userPubkey.');
@ -153,21 +156,112 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -153,21 +156,112 @@ export const POST: RequestHandler = createRepoPostHandler(
const userPubkeyHex = decodeNpubToHex(userPubkey) || userPubkey;
const userNpub = nip19.npubEncode(userPubkeyHex);
// Determine fork name
const forkRepoName = forkName || context.repo;
// Determine fork name and sanitize it for filesystem use
// The d-tag (repo identifier) must be filesystem-safe (lowercase, no spaces, etc.)
const rawForkName = forkName || context.repo;
const forkRepoName = sanitizeRepoNameForFilesystem(rawForkName);
if (!forkRepoName || forkRepoName.length === 0) {
return error(400, 'Invalid fork repository name. Please use only alphanumeric characters, hyphens, and dots.');
}
// Check if user has unlimited access
const userLevel = getCachedUserLevel(userPubkeyHex);
let userLevel = getCachedUserLevel(userPubkeyHex);
logger.debug({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
hasCachedLevel: !!userLevel,
cachedLevel: userLevel?.level,
cachedAt: userLevel?.cachedAt ? new Date(userLevel.cachedAt).toISOString() : null,
expiresAt: userLevel?.expiresAt ? new Date(userLevel.expiresAt).toISOString() : null,
hasProofEvent: !!proofEvent
}, '[Fork] Checking user level');
// If cache is empty and proof event is provided, try to verify and cache it
if (!userLevel && proofEvent) {
logger.info({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
proofEventKind: proofEvent.kind
}, '[Fork] Cache empty, attempting to verify proof event');
// Validate proof event structure
if (!proofEvent.kind || !proofEvent.pubkey || !proofEvent.created_at || !proofEvent.id) {
logger.warn({ userPubkeyHex: userPubkeyHex.substring(0, 16) + '...' }, '[Fork] Invalid proof event structure');
return error(400, 'Invalid proof event structure');
}
// Validate proof event signature
if (!verifyEvent(proofEvent)) {
logger.warn({ userPubkeyHex: userPubkeyHex.substring(0, 16) + '...' }, '[Fork] Invalid proof event signature');
return error(400, 'Invalid proof event signature');
}
// Verify pubkey matches
if (proofEvent.pubkey !== userPubkeyHex) {
logger.warn({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
proofPubkey: proofEvent.pubkey.substring(0, 16) + '...'
}, '[Fork] Proof event pubkey does not match user pubkey');
return error(400, 'Proof event pubkey does not match user pubkey');
}
// Verify relay write proof
const verification = await verifyRelayWriteProof(
proofEvent,
userPubkeyHex,
DEFAULT_NOSTR_RELAYS
);
if (verification.valid) {
// Cache the successful verification
cacheUserLevel(userPubkeyHex, 'unlimited');
userLevel = getCachedUserLevel(userPubkeyHex);
logger.info({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
relay: verification.relay
}, '[Fork] Proof verified, user level cached as unlimited');
} else if (verification.relayDown) {
logger.warn({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
error: verification.error
}, '[Fork] Relays down, cannot verify proof event');
return error(503, 'Relays are temporarily unavailable. Please try again later or verify your access first.');
} else {
// User is logged in but no write access - cache as rate_limited
cacheUserLevel(userPubkeyHex, 'rate_limited');
userLevel = getCachedUserLevel(userPubkeyHex);
logger.info({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
error: verification.error
}, '[Fork] Proof verification failed, user level cached as rate_limited');
}
}
if (!hasUnlimitedAccess(userLevel?.level)) {
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
const reason = !userLevel
? 'User level not cached - please verify write access first'
: `User level is ${userLevel.level}, not unlimited`;
logger.warn({
userPubkeyHex: userPubkeyHex.substring(0, 16) + '...',
reason,
cachedLevel: userLevel?.level,
hadProofEvent: !!proofEvent
}, '[Fork] Access denied - user does not have unlimited access');
auditLogger.logRepoFork(
userPubkeyHex,
`${context.npub}/${context.repo}`,
`${userNpub}/${forkRepoName}`,
'failure',
'User does not have unlimited access'
reason
);
return error(403, 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay.');
const errorMessage = !userLevel
? 'Repository creation requires unlimited access. Please verify you can write to at least one default Nostr relay by visiting your profile or the signup page first, or provide a proof event in the request.'
: `Repository creation requires unlimited access. Your current access level is ${userLevel.level}. Please verify you can write to at least one default Nostr relay.`;
return error(403, errorMessage);
}
// Check resource limits
@ -184,17 +278,7 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -184,17 +278,7 @@ export const POST: RequestHandler = createRepoPostHandler(
return error(403, resourceCheck.reason || 'Resource limit exceeded');
}
// Check if original repo exists
const originalRepoPath = join(repoRoot, context.npub, `${context.repo}.git`);
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot);
if (!originalPathValidation.valid) {
return error(403, originalPathValidation.error || 'Invalid repository path');
}
if (!existsSync(originalRepoPath)) {
return error(404, 'Original repository not found');
}
// Get original repo announcement
// Get original repo announcement first (needed for clone URLs)
const allAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, originalOwnerPubkey, eventCache);
const originalAnnouncement = findRepoAnnouncement(allAnnouncements, context.repo);
@ -202,15 +286,93 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -202,15 +286,93 @@ export const POST: RequestHandler = createRepoPostHandler(
return error(404, 'Original repository announcement not found');
}
// Extract clone URLs from announcement
const { extractCloneUrls } = await import('$lib/utils/nostr-utils.js');
const allOriginalCloneUrls = extractCloneUrls(originalAnnouncement);
if (allOriginalCloneUrls.length === 0) {
return error(400, 'Original repository has no clone URLs available');
}
// Check if original repo exists locally (preferred for faster cloning)
const originalRepoPath = join(repoRoot, context.npub, `${context.repo}.git`);
const originalPathValidation = validateRepoPath(originalRepoPath, repoRoot);
const originalRepoExistsLocally = originalPathValidation.valid && existsSync(originalRepoPath);
logger.debug({
originalRepoExistsLocally,
cloneUrlCount: allOriginalCloneUrls.length,
npub: context.npub,
repo: context.repo
}, '[Fork] Checking original repository availability');
// Check if fork already exists
const forkRepoPath = join(repoRoot, userNpub, `${forkRepoName}.git`);
const forkPathValidation = validateRepoPath(forkRepoPath, repoRoot);
if (!forkPathValidation.valid) {
return error(403, forkPathValidation.error || 'Invalid fork repository path');
}
if (existsSync(forkRepoPath)) {
// Check if directory exists and is actually a directory (not a file)
let forkDirExists = false;
try {
if (existsSync(forkRepoPath)) {
const stats = statSync(forkRepoPath);
forkDirExists = stats.isDirectory();
if (!forkDirExists) {
logger.warn({
forkRepoPath,
userNpub,
forkRepoName
}, '[Fork] Path exists but is not a directory - removing and allowing fork creation');
// Remove the file and allow fork creation
await rm(forkRepoPath, { force: true, recursive: true });
}
}
} catch (err) {
logger.warn({
error: err,
forkRepoPath,
userNpub,
forkRepoName
}, '[Fork] Error checking fork directory existence');
// If we can't check, assume it doesn't exist and proceed
forkDirExists = false;
}
// If directory exists, fork already exists
if (forkDirExists) {
logger.warn({
forkRepoPath,
userNpub,
forkRepoName
}, '[Fork] Fork directory already exists');
return error(409, 'Fork already exists');
}
// Check if fork announcement exists but directory doesn't (orphaned announcement)
// In this case, we'll allow the fork to be created again
try {
const allForkAnnouncements = await fetchRepoAnnouncementsWithCache(nostrClient, userPubkeyHex, eventCache);
const existingForkAnnouncement = findRepoAnnouncement(allForkAnnouncements, forkRepoName);
if (existingForkAnnouncement && !forkDirExists) {
logger.info({
userNpub,
forkRepoName,
announcementId: existingForkAnnouncement.id
}, '[Fork] Fork announcement exists but directory is missing - allowing fork creation to proceed');
// Allow fork creation to proceed - the directory will be created
}
} catch (err) {
logger.warn({
error: err,
userNpub,
forkRepoName
}, '[Fork] Failed to check for existing fork announcement, proceeding with fork creation');
// Continue with fork creation even if announcement check fails
}
// Clone the repository
const clientIp = event.request.headers.get('x-forwarded-for') || event.request.headers.get('x-real-ip') || 'unknown';
@ -222,7 +384,87 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -222,7 +384,87 @@ export const POST: RequestHandler = createRepoPostHandler(
);
const git = simpleGit();
await git.clone(originalRepoPath, forkRepoPath, ['--bare']);
// Clone from local repo if available, otherwise clone from remote URL
if (originalRepoExistsLocally) {
logger.info({
source: 'local',
originalRepoPath,
forkRepoPath: forkRepoPath
}, '[Fork] Cloning from local repository');
await git.clone(originalRepoPath, forkRepoPath, ['--bare']);
} else {
// Clone from the first available clone URL
// Prefer HTTPS URLs, then SSH, then others
const httpsUrls = allOriginalCloneUrls.filter(url => url.startsWith('https://'));
const sshUrls = allOriginalCloneUrls.filter(url => url.startsWith('git@') || url.startsWith('ssh://'));
const otherUrls = allOriginalCloneUrls.filter(url => !url.startsWith('https://') && !url.startsWith('git@') && !url.startsWith('ssh://'));
const preferredUrls = [...httpsUrls, ...sshUrls, ...otherUrls];
const cloneUrl = preferredUrls[0];
if (!cloneUrl) {
return error(400, 'No valid clone URL available for the original repository');
}
logger.info({
source: 'remote',
cloneUrl,
forkRepoPath,
totalUrls: allOriginalCloneUrls.length
}, '[Fork] Cloning from remote repository');
try {
await git.clone(cloneUrl, forkRepoPath, ['--bare']);
} catch (cloneError) {
logger.error({
error: cloneError,
cloneUrl,
forkRepoPath
}, '[Fork] Failed to clone from remote URL');
// If first URL failed, try other URLs
let cloned = false;
for (let i = 1; i < preferredUrls.length && !cloned; i++) {
try {
logger.info({
attempt: i + 1,
cloneUrl: preferredUrls[i]
}, '[Fork] Trying alternative clone URL');
await git.clone(preferredUrls[i], forkRepoPath, ['--bare']);
cloned = true;
} catch (altError) {
logger.warn({
error: altError,
cloneUrl: preferredUrls[i]
}, '[Fork] Alternative clone URL also failed');
}
}
if (!cloned) {
return error(500, `Failed to clone repository from any available URL. Please ensure the repository is accessible and you have the necessary permissions.`);
}
}
}
// Get the HEAD commit from the cloned fork repository (this is the current last commit at fork time)
let forkHeadCommit: string | null = null;
try {
const forkGit = simpleGit(forkRepoPath);
const headCommit = await forkGit.revparse(['HEAD']);
if (headCommit && /^[0-9a-f]{40}$/i.test(headCommit.trim())) {
forkHeadCommit = headCommit.trim();
logger.info({
forkRepoPath,
headCommit: forkHeadCommit
}, '[Fork] Retrieved HEAD commit from cloned fork repository');
}
} catch (err) {
logger.warn({
error: err,
forkRepoPath
}, '[Fork] Failed to get HEAD commit from fork repository, will use original earliest commit');
}
// Invalidate resource limit cache
resourceLimits.invalidateCache(userNpub);
@ -237,19 +479,21 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -237,19 +479,21 @@ export const POST: RequestHandler = createRepoPostHandler(
const { getTorGitUrl } = await import('$lib/services/tor/hidden-service.js');
const torOnionUrl = await getTorGitUrl(userNpub, forkRepoName);
// Extract original clone URLs
const originalCloneUrls = originalAnnouncement.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string')
// Extract original clone URLs (excluding our domain and Tor URLs)
const originalCloneUrlsForFork = allOriginalCloneUrls
.filter(url => {
if (url.includes(gitDomain)) return false;
if (url.includes('.onion')) return false;
return true;
}) as string[];
});
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc');
const earliestCommit = earliestCommitTag?.[1];
// For forks, use the HEAD commit of the forked repository as the earliest commit
// This represents the state of the repo at the time of forking
const earliestCommit = forkHeadCommit || (() => {
// Fallback to original earliest commit if we couldn't get HEAD
const earliestCommitTag = originalAnnouncement.tags.find(t => t[0] === 'r' && t[2] === 'euc');
return earliestCommitTag?.[1];
})();
// Get original repo name and description
const originalName = originalAnnouncement.tags.find(t => t[0] === 'name')?.[1] || context.repo;
@ -266,10 +510,10 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -266,10 +510,10 @@ export const POST: RequestHandler = createRepoPostHandler(
forkCloneUrls.push(torOnionUrl);
}
forkCloneUrls.push(...originalCloneUrls);
forkCloneUrls.push(...originalCloneUrlsForFork);
// Validate: If using localhost, require either Tor .onion URL or at least one other clone URL
if (isLocalhost && !torOnionUrl && originalCloneUrls.length === 0) {
if (isLocalhost && !torOnionUrl && originalCloneUrlsForFork.length === 0) {
return error(400, 'Cannot create fork with only localhost. The original repository must have at least one public clone URL, or you need to configure a Tor .onion address.');
}
@ -281,7 +525,7 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -281,7 +525,7 @@ export const POST: RequestHandler = createRepoPostHandler(
const originalRepoTag = `${KIND.REPO_ANNOUNCEMENT}:${originalOwnerPubkey}:${context.repo}`;
const tags: string[][] = [
['d', forkRepoName],
['name', `${originalName} (fork)`],
['name', originalName], // Don't append "(fork)" to the name
['description', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`],
['clone', ...forkCloneUrls],
['relays', ...DEFAULT_NOSTR_RELAYS],
@ -309,146 +553,52 @@ export const POST: RequestHandler = createRepoPostHandler( @@ -309,146 +553,52 @@ export const POST: RequestHandler = createRepoPostHandler(
tags.push(['r', earliestCommit, 'euc']);
}
// Create fork announcement event
const forkAnnouncementTemplate = {
kind: KIND.REPO_ANNOUNCEMENT,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: '',
tags
};
// Sign fork announcement
const signedForkAnnouncement = await signEventWithNIP07(forkAnnouncementTemplate);
const truncatedNpub = userNpub.length > 16 ? `${userNpub.slice(0, 12)}...` : userNpub;
const truncatedOriginalNpub = context.npub.length > 16 ? `${context.npub.slice(0, 12)}...` : context.npub;
const logContext = `[${truncatedOriginalNpub}/${context.repo}${truncatedNpub}/${forkRepoName}]`;
let publishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let ownershipPublishResult: { success: string[]; failed: Array<{ relay: string; error: string }> } | null = null;
let signedOwnershipEvent: NostrEvent | null = null;
// Redirect to signup page with fork information pre-filled
// The signup page will handle signing and publishing the fork announcement
const signupUrl = new URL('/signup', event.url.origin);
signupUrl.searchParams.set('npub', userNpub);
signupUrl.searchParams.set('repo', forkRepoName);
signupUrl.searchParams.set('fork', 'true');
signupUrl.searchParams.set('forkOriginalRepo', originalRepoTag);
signupUrl.searchParams.set('forkName', originalName); // Don't append "(fork)" to the name
signupUrl.searchParams.set('forkDescription', `Fork of ${originalName}${originalDescription ? `: ${originalDescription}` : ''}`);
if (isLocalOnly) {
// Local-only fork: Skip publishing to Nostr relays
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: true }, 'Creating local-only fork (not publishing to Nostr)');
publishResult = { success: [], failed: [] };
ownershipPublishResult = { success: [], failed: [] };
// Create synthetic ownership event
const ownershipService = new OwnershipTransferService([]);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
} else {
// Public fork: Publish to Nostr relays
const { outbox } = await getUserRelays(userPubkeyHex, nostrClient);
const combinedRelays = combineRelays(outbox);
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, relayCount: combinedRelays.length }, 'Starting fork process');
publishResult = await publishEventWithRetry(
signedForkAnnouncement,
combinedRelays,
'fork announcement',
3,
logContext
);
if (publishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: publishResult.failed }, 'Fork announcement failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
const errorDetails = `All relays failed: ${publishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}`;
return json({
success: false,
error: 'Failed to publish fork announcement to relays after 3 attempts',
details: errorDetails,
eventName: 'fork announcement'
}, { status: 500 });
}
// Create and publish initial ownership proof
const ownershipService = new OwnershipTransferService(combinedRelays);
const initialOwnershipEvent = ownershipService.createInitialOwnershipEvent(userPubkeyHex, forkRepoName);
signedOwnershipEvent = await signEventWithNIP07(initialOwnershipEvent);
ownershipPublishResult = await publishEventWithRetry(
signedOwnershipEvent,
combinedRelays,
'ownership transfer event',
3,
logContext
);
if (ownershipPublishResult.success.length === 0) {
logger.error({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, failed: ownershipPublishResult.failed }, 'Ownership transfer event failed after all retries. Cleaning up repository.');
await rm(forkRepoPath, { recursive: true, force: true }).catch(() => {});
// Publish deletion request (NIP-09)
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}` }, 'Publishing deletion request for invalid fork announcement...');
const deletionRequest = {
kind: KIND.DELETION_REQUEST,
pubkey: userPubkeyHex,
created_at: Math.floor(Date.now() / 1000),
content: 'Fork failed: ownership transfer event could not be published after 3 attempts. This announcement is invalid.',
tags: [
['a', `${KIND.REPO_ANNOUNCEMENT}:${userPubkeyHex}:${forkRepoName}`],
['k', KIND.REPO_ANNOUNCEMENT.toString()]
]
};
const signedDeletionRequest = await signEventWithNIP07(deletionRequest);
const deletionResult = await publishEventWithRetry(
signedDeletionRequest,
combinedRelays,
'deletion request',
3,
logContext
);
const errorDetails = `Fork is invalid without ownership proof. All relays failed: ${ownershipPublishResult.failed.map(f => `${f.relay}: ${f.error}`).join('; ')}. Deletion request ${deletionResult.success.length > 0 ? 'published' : 'failed to publish'}.`;
return json({
success: false,
error: 'Failed to publish ownership transfer event to relays after 3 attempts',
details: errorDetails,
eventName: 'ownership transfer event'
}, { status: 500 });
}
signupUrl.searchParams.set('localOnly', 'true');
}
// Provision the fork repo
logger.info({ operation: 'fork', originalRepo: `${context.npub}/${context.repo}`, forkRepo: `${userNpub}/${forkRepoName}`, localOnly: isLocalOnly }, 'Provisioning fork repository...');
await repoManager.provisionRepo(signedForkAnnouncement, signedOwnershipEvent || undefined, false);
// Add clone URLs as comma-separated list
if (forkCloneUrls.length > 0) {
signupUrl.searchParams.set('cloneUrls', forkCloneUrls.join(','));
}
// Add visibility
if (isLocalOnly || originalVisibility !== 'public') {
signupUrl.searchParams.set('visibility', isLocalOnly ? 'private' : originalVisibility);
}
// Add project relays
if (originalProjectRelays.length > 0) {
signupUrl.searchParams.set('projectRelays', originalProjectRelays.join(','));
}
// Add earliest commit
if (earliestCommit) {
signupUrl.searchParams.set('earliestCommit', earliestCommit);
}
logger.info({
operation: 'fork',
originalRepo: `${context.npub}/${context.repo}`,
forkRepo: `${userNpub}/${forkRepoName}`,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
announcementRelays: publishResult?.success.length || 0,
ownershipRelays: ownershipPublishResult?.success.length || 0
}, 'Fork completed successfully');
const message = isLocalOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${publishResult?.success.length || 0} relay(s) for announcement and ${ownershipPublishResult?.success.length || 0} relay(s) for ownership proof.`;
signupUrl: signupUrl.toString()
}, 'Fork repository cloned, redirecting to signup page for announcement publishing');
return json({
success: true,
redirect: signupUrl.toString(),
fork: {
npub: userNpub,
repo: forkRepoName,
url: forkGitUrl,
localOnly: isLocalOnly,
announcementId: signedForkAnnouncement.id,
ownershipTransferId: signedOwnershipEvent?.id,
publishedTo: isLocalOnly ? null : {
announcement: publishResult?.success.length || 0,
ownershipTransfer: ownershipPublishResult?.success.length || 0
}
localOnly: isLocalOnly
},
message
message: 'Fork repository created! Please sign and publish the fork announcement on the next page.'
});
} catch (err) {
return handleApiError(err, { operation: 'createFork', npub: context.npub, repo: context.repo }, 'Failed to fork repository');

175
src/routes/api/repos/list/+server.ts

@ -16,7 +16,11 @@ import logger from '$lib/services/logger.js'; @@ -16,7 +16,11 @@ import logger from '$lib/services/logger.js';
import type { NostrEvent } from '$lib/types/nostr.js';
import type { RequestEvent } from '@sveltejs/kit';
import { existsSync } from 'fs';
import { readdir, stat } from 'fs/promises';
import { join, resolve } from 'path';
import { eventCache } from '$lib/services/nostr/event-cache.js';
import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/utils/nostr-utils.js';
import { simpleGit } from 'simple-git';
const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
@ -32,20 +36,98 @@ interface RepoListItem { @@ -32,20 +36,98 @@ interface RepoListItem {
isRegistered: boolean; // Has this domain in clone URLs
}
/**
* Read announcement from filesystem (nostr/repo-events.jsonl)
* Returns null if not found or on error
*/
async function readAnnouncementFromFilesystem(npub: string, repoName: string, expectedPubkey: string): Promise<NostrEvent | null> {
try {
const repoPath = join(repoRoot, npub, `${repoName}.git`);
if (!existsSync(repoPath)) {
return null;
}
const git = simpleGit(repoPath);
// Get the most recent commit that modified repo-events.jsonl
const logOutput = await git.raw(['log', '--all', '--format=%H', '--reverse', '--', 'nostr/repo-events.jsonl']).catch(() => '');
const commitHashes = logOutput.trim().split('\n').filter(Boolean);
if (commitHashes.length === 0) {
return null;
}
const mostRecentCommit = commitHashes[commitHashes.length - 1];
// Read the file content from git
const fileContent = await git.show([`${mostRecentCommit}:nostr/repo-events.jsonl`]).catch(() => null);
if (!fileContent) {
return null;
}
// Parse repo-events.jsonl to find the most recent announcement
let announcementEvent: NostrEvent | null = null;
let latestTimestamp = 0;
try {
const lines = fileContent.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const entry = JSON.parse(line);
if (entry.type === 'announcement' && entry.event && entry.timestamp) {
// Verify the announcement is for the expected pubkey
if (entry.event.pubkey === expectedPubkey) {
if (entry.timestamp > latestTimestamp) {
latestTimestamp = entry.timestamp;
announcementEvent = entry.event;
}
}
}
} catch {
continue;
}
}
} catch (parseError) {
logger.debug({ error: parseError, npub, repoName }, 'Failed to parse repo-events.jsonl');
return null;
}
if (!announcementEvent) {
return null;
}
// Validate the announcement
const { validateAnnouncementEvent } = await import('$lib/services/nostr/repo-verification.js');
const validation = validateAnnouncementEvent(announcementEvent, repoName);
if (!validation.valid) {
logger.debug({ error: validation.error, npub, repoName }, 'Announcement validation failed');
return null;
}
return announcementEvent;
} catch (error) {
logger.debug({ error, npub, repoName }, 'Error reading announcement from filesystem');
return null;
}
}
export const GET: RequestHandler = async (event) => {
try {
const requestContext = extractRequestContext(event);
const userPubkey = requestContext.userPubkeyHex || null;
const gitDomain = event.url.searchParams.get('domain') || GIT_DOMAIN;
// Fetch all repository announcements
// Fetch all repository announcements from Nostr relays
const events = await nostrClient.fetchEvents([
{ kinds: [KIND.REPO_ANNOUNCEMENT], limit: 100 }
]);
const repos: RepoListItem[] = [];
const processedRepos = new Set<string>(); // Track processed repos to avoid duplicates
// Process each announcement
// Process each announcement from Nostr
for (const event of events) {
const cloneUrls = event.tags
.filter(t => t[0] === 'clone')
@ -110,6 +192,95 @@ export const GET: RequestHandler = async (event) => { @@ -110,6 +192,95 @@ export const GET: RequestHandler = async (event) => {
repoName: dTag,
isRegistered: hasDomain
});
processedRepos.add(`${npub}/${dTag}`);
}
// Also check filesystem for repos that might not be in Nostr (e.g., private forks)
// Scan local repos and check if they have announcements
if (existsSync(repoRoot)) {
try {
const userDirs = await readdir(repoRoot);
for (const userDir of userDirs) {
if (!userDir.startsWith('npub') || userDir.length < 60) continue;
try {
const userPath = join(repoRoot, userDir);
const stats = await stat(userPath);
if (!stats.isDirectory()) continue;
const repoFiles = await readdir(userPath);
for (const repoFile of repoFiles) {
if (!repoFile.endsWith('.git')) continue;
const repoName = repoFile.replace(/\.git$/, '');
const repoKey = `${userDir}/${repoName}`;
// Skip if already processed from Nostr
if (processedRepos.has(repoKey)) continue;
try {
// Decode npub to get pubkey
const decoded = nip19.decode(userDir);
if (decoded.type !== 'npub') continue;
const ownerPubkey = decoded.data as string;
// Try to read announcement from filesystem
const announcementFromRepo = await readAnnouncementFromFilesystem(userDir, repoName, ownerPubkey);
if (announcementFromRepo) {
// Check if repo has this domain in clone URLs
const cloneUrls = announcementFromRepo.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string');
const hasDomain = cloneUrls.some(url => url.includes(gitDomain));
// Check privacy
const isPrivate = announcementFromRepo.tags.some(t =>
(t[0] === 'private' && t[1] === 'true') ||
(t[0] === 't' && t[1] === 'private')
);
// Check if user can view this repo
let canView = false;
if (!isPrivate) {
canView = true; // Public repos are viewable by anyone
} else if (userPubkey) {
// Private repos require authentication
try {
canView = await maintainerService.canView(userPubkey, ownerPubkey, repoName);
} catch (err) {
logger.debug({ error: err, pubkey: ownerPubkey, repo: repoName }, 'Failed to check repo access for filesystem announcement');
canView = false;
}
}
// Only include repos the user can view
if (canView) {
repos.push({
event: announcementFromRepo,
npub: userDir,
repoName,
isRegistered: hasDomain
});
processedRepos.add(repoKey);
logger.debug({ npub: userDir, repo: repoName }, 'Added repo from filesystem to list');
}
}
} catch (err) {
logger.debug({ error: err, npub: userDir, repo: repoName }, 'Failed to process repo from filesystem');
}
}
} catch (err) {
logger.debug({ error: err, userDir }, 'Failed to read user directory');
}
}
} catch (err) {
logger.warn({ error: err }, 'Failed to scan filesystem for repos');
}
}
// Only return registered repos (repos with this domain in clone URLs)

164
src/routes/api/repos/local/+server.ts

@ -6,7 +6,7 @@ @@ -6,7 +6,7 @@
import { json } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { readdir, stat } from 'fs/promises';
import { join } from 'path';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { NostrClient } from '$lib/services/nostr/nostr-client.js';
import { MaintainerService } from '$lib/services/nostr/maintainer-service.js';
@ -24,6 +24,94 @@ import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/util @@ -24,6 +24,94 @@ import { fetchRepoAnnouncementsWithCache, findRepoAnnouncement } from '$lib/util
const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
/**
* Read announcement from filesystem (nostr/repo-events.jsonl)
* Returns null if not found or on error
*/
async function readAnnouncementFromFilesystem(npub: string, repoName: string): Promise<NostrEvent | null> {
// Guard against client-side execution
if (typeof process === 'undefined' || typeof process.env === 'undefined') {
return null;
}
try {
const repoPath = join(repoRoot, npub, `${repoName}.git`);
if (!existsSync(repoPath)) {
return null;
}
const { simpleGit } = await import('simple-git');
const git = simpleGit(repoPath);
// Get the most recent commit that modified repo-events.jsonl
const logOutput = await git.raw(['log', '--all', '--format=%H', '--reverse', '--', 'nostr/repo-events.jsonl']).catch(() => '');
const commitHashes = logOutput.trim().split('\n').filter(Boolean);
if (commitHashes.length === 0) {
return null;
}
const mostRecentCommit = commitHashes[commitHashes.length - 1];
// Read the file content from git
const fileContent = await git.show([`${mostRecentCommit}:nostr/repo-events.jsonl`]).catch(() => null);
if (!fileContent) {
return null;
}
// Parse repo-events.jsonl to find the most recent announcement
let announcementEvent: NostrEvent | null = null;
let latestTimestamp = 0;
try {
const lines = fileContent.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const entry = JSON.parse(line);
if (entry.type === 'announcement' && entry.event && entry.timestamp) {
if (entry.timestamp > latestTimestamp) {
latestTimestamp = entry.timestamp;
announcementEvent = entry.event;
}
}
} catch {
continue;
}
}
} catch (parseError) {
logger.debug({ error: parseError, npub, repoName }, 'Failed to parse repo-events.jsonl');
return null;
}
if (!announcementEvent) {
return null;
}
// Validate the announcement (case-insensitive repo name matching)
const dTag = announcementEvent.tags.find(t => t[0] === 'd')?.[1];
// Check if d-tag matches repo name (case-insensitive)
if (!dTag || dTag.toLowerCase() !== repoName.toLowerCase()) {
logger.debug({ npub, repoName, dTag }, 'Announcement d-tag does not match repo name (case-insensitive)');
return null;
}
const { validateAnnouncementEvent } = await import('$lib/services/nostr/repo-verification.js');
const validation = validateAnnouncementEvent(announcementEvent, repoName);
if (!validation.valid) {
logger.debug({ error: validation.error, npub, repoName }, 'Announcement validation failed');
return null;
}
return announcementEvent;
} catch (error) {
logger.debug({ error, npub, repoName }, 'Error reading announcement from filesystem');
return null;
}
}
// Cache for local repo list (5 minute TTL)
interface CacheEntry {
repos: LocalRepoItem[];
@ -54,9 +142,11 @@ interface LocalRepoItem { @@ -54,9 +142,11 @@ interface LocalRepoItem {
isRegistered: boolean; // Has this domain in clone URLs
}
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
const repoRootEnv = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoRoot = resolve(repoRootEnv);
/**
* Scan filesystem for local repositories
@ -197,9 +287,55 @@ async function enrichLocalRepos( @@ -197,9 +287,55 @@ async function enrichLocalRepos(
});
}
} else {
// No announcement found - only show if user is owner (for security)
// For now, skip repos without announcements
// In the future, we could allow owners to see their own repos
// No announcement found in relays - try reading from filesystem
// This is important for private forks that weren't published to relays
try {
const { fileManager } = await import('$lib/services/service-registry.js');
// Read announcement from repo-events.jsonl file
const announcementFromRepo = await readAnnouncementFromFilesystem(repo.npub, repo.repoName);
if (announcementFromRepo) {
// Check if registered (has domain in clone URLs)
const cloneUrls = announcementFromRepo.tags
.filter(t => t[0] === 'clone')
.flatMap(t => t.slice(1))
.filter(url => url && typeof url === 'string');
const hasDomain = cloneUrls.some(url => url.includes(gitDomain));
// Check privacy
const isPrivate = announcementFromRepo.tags.some(t =>
(t[0] === 'private' && t[1] === 'true') ||
(t[0] === 't' && t[1] === 'private')
);
// Check if user can view
let canView = false;
if (!isPrivate) {
canView = true;
} else if (userPubkey) {
try {
canView = await maintainerService.canView(userPubkey, pubkey, repo.repoName);
} catch (err) {
logger.warn({ error: err, pubkey, repo: repo.repoName }, 'Failed to check repo access for filesystem announcement');
canView = false;
}
}
// Only include repos user can view
if (canView) {
enriched.push({
...repo,
announcement: announcementFromRepo,
isRegistered: hasDomain
});
logger.debug({ npub: repo.npub, repo: repo.repoName }, 'Found announcement in filesystem for local repo');
}
}
} catch (err) {
logger.debug({ error: err, npub: repo.npub, repo: repo.repoName }, 'Failed to read announcement from filesystem');
// Continue - repo won't be included if no announcement found
}
}
} catch {
// Skip invalid repos
@ -234,10 +370,24 @@ export const GET: RequestHandler = async (event) => { @@ -234,10 +370,24 @@ export const GET: RequestHandler = async (event) => {
}
// Scan filesystem
const localRepos = await scanLocalRepos();
let localRepos: LocalRepoItem[] = [];
try {
localRepos = await scanLocalRepos();
} catch (scanError) {
logger.error({ error: scanError }, 'Failed to scan local repos, returning empty list');
// Return empty list instead of failing
return json([]);
}
// Enrich with announcements and filter by privacy
const enriched = await enrichLocalRepos(localRepos, userPubkey, gitDomain);
let enriched: LocalRepoItem[] = [];
try {
enriched = await enrichLocalRepos(localRepos, userPubkey, gitDomain);
} catch (enrichError) {
logger.error({ error: enrichError }, 'Failed to enrich local repos, returning scanned repos without announcements');
// Return repos without announcements rather than failing completely
enriched = localRepos;
}
// Filter out registered repos (they're in the main list)
const unregistered = enriched.filter(r => !r.isRegistered);

39
src/routes/repos/+page.svelte

@ -217,7 +217,7 @@ @@ -217,7 +217,7 @@
loadingMyRepos = true;
try {
// Fetch all repos where user is current owner
// Fetch all repos where user is current owner from Nostr
const ownerRepos = await nostrClient.fetchEvents([
{
kinds: [KIND.REPO_ANNOUNCEMENT],
@ -227,8 +227,9 @@ @@ -227,8 +227,9 @@
]);
const repos: Array<{ event: NostrEvent; npub: string; repoName: string; transferred?: boolean; currentOwner?: string }> = [];
const processedRepos = new Set<string>(); // Track processed repos to avoid duplicates
// Add repos where user is current owner
// Add repos where user is current owner from Nostr
for (const event of ownerRepos) {
const dTag = event.tags.find(t => t[0] === 'd')?.[1];
if (!dTag) continue;
@ -241,10 +242,44 @@ @@ -241,10 +242,44 @@
repoName: dTag,
transferred: false
});
processedRepos.add(`${npub}/${dTag}`);
} catch (err) {
console.warn('Failed to encode npub for repo:', err);
}
}
// Also check filesystem for private repos that weren't published to Nostr
try {
const gitDomain = $page.data.gitDomain || 'localhost:6543';
const localReposResponse = await fetch(`/api/repos/local?domain=${encodeURIComponent(gitDomain)}`, {
headers: userPubkeyHex ? {
'X-User-Pubkey': userPubkeyHex
} : {}
});
if (localReposResponse.ok) {
const localReposData = await localReposResponse.json();
const userNpub = nip19.npubEncode(userPubkeyHex);
// Filter for repos owned by the user
for (const localRepo of localReposData) {
if (localRepo.npub === userNpub && localRepo.announcement) {
const repoKey = `${localRepo.npub}/${localRepo.repoName}`;
if (!processedRepos.has(repoKey)) {
repos.push({
event: localRepo.announcement,
npub: localRepo.npub,
repoName: localRepo.repoName,
transferred: false
});
processedRepos.add(repoKey);
}
}
}
}
} catch (err) {
console.warn('Failed to load local repos for my repos:', err);
}
// Fetch repos that were transferred FROM this user (where they were original owner)
// Search for transfer events where this user is the 'from' pubkey

27
src/routes/repos/[npub]/[repo]/+page.svelte

@ -1387,7 +1387,30 @@ @@ -1387,7 +1387,30 @@
{/if}
<main class="repo-view">
{#if state.clone.isCloned === false && (canUseApiFallback || state.clone.apiFallbackAvailable === null)}
{#if state.repoNotFound}
<div class="repo-not-found">
<div class="not-found-content">
<img src="/icons/alert-circle.svg" alt="Not Found" class="not-found-icon" />
<h2>Repository Not Found</h2>
<p>The repository announcement for <strong>{state.repo}</strong> could not be found.</p>
<p>This could mean:</p>
<ul>
<li>The repository has not been announced on Nostr relays</li>
<li>The repository name is incorrect</li>
<li>The repository was deleted or removed</li>
<li>The Nostr relays are temporarily unavailable</li>
</ul>
<div class="not-found-actions">
<button onclick={() => goto('/repos')} class="button-primary">
Browse Repositories
</button>
<button onclick={() => goto('/search')} class="button-secondary">
Search Repositories
</button>
</div>
</div>
</div>
{:else if state.clone.isCloned === false && (canUseApiFallback || state.clone.apiFallbackAvailable === null)}
<div class="read-only-banner">
<div class="banner-content">
<img src="/icons/alert-circle.svg" alt="Info" class="banner-icon" />
@ -1412,7 +1435,7 @@ @@ -1412,7 +1435,7 @@
{/if}
<!-- Tabs - only show if we have repo data (header/clone section would be visible) -->
{#if repoOwnerPubkeyDerived}
{#if !state.repoNotFound && repoOwnerPubkeyDerived && repoAnnouncement}
<div class="repo-layout">
<!-- Files Tab -->
{#if state.ui.activeTab === 'files'}

49
src/routes/repos/[npub]/[repo]/+page.ts

@ -69,13 +69,45 @@ export const load: PageLoad = async ({ params, url, parent }) => { @@ -69,13 +69,45 @@ export const load: PageLoad = async ({ params, url, parent }) => {
});
if (matchingEvents.length === 0) {
return {
title: `${repo} - Repository Not Found`,
description: 'Repository announcement not found'
};
// Not found in Nostr relays - try reading from filesystem (server-side only)
// This is important for private forks that weren't published to relays
// Only attempt on server-side using SvelteKit's SSR flag
if (import.meta.env.SSR) {
try {
// Dynamic import to prevent client-side bundling
const fsModule = await import('./read-announcement-from-fs.js');
const announcementFromRepo = await fsModule.readAnnouncementFromFilesystem(npub, repo, repoOwnerPubkey);
if (announcementFromRepo) {
announcement = announcementFromRepo;
}
} catch (err) {
// If filesystem read fails, log on server-side but continue
// This is expected on client-side, so we silently continue
console.debug('Failed to read announcement from filesystem:', err);
}
}
if (!announcement) {
return {
title: `${repo} - Repository Not Found`,
description: 'Repository announcement not found',
announcement: null, // Explicitly set to null so component knows it's missing
repoNotFound: true // Flag to indicate repo not found
};
}
} else {
announcement = matchingEvents[0];
}
announcement = matchingEvents[0];
}
// Ensure announcement exists before proceeding
if (!announcement) {
return {
title: `${repo} - Repository Not Found`,
description: 'Repository announcement not found',
announcement: null,
repoNotFound: true
};
}
// Check privacy - for private repos, we'll let the API endpoints handle access control
@ -116,7 +148,10 @@ export const load: PageLoad = async ({ params, url, parent }) => { @@ -116,7 +148,10 @@ export const load: PageLoad = async ({ params, url, parent }) => {
console.error('Error loading repository metadata:', error);
return {
title: `${repo} - Repository`,
description: 'Repository'
description: 'Repository',
announcement: null, // Explicitly set to null on error
repoNotFound: true, // Flag to indicate error
error: error instanceof Error ? error.message : 'Unknown error'
};
}
};

4
src/routes/repos/[npub]/[repo]/components/TabLayout.svelte

@ -72,7 +72,6 @@ @@ -72,7 +72,6 @@
.tab-layout-wrapper {
display: flex;
flex-direction: column;
height: 100%;
width: 100%;
max-width: 100%;
min-width: 0;
@ -102,12 +101,12 @@ @@ -102,12 +101,12 @@
.tab-layout {
display: flex;
flex: 1;
min-height: 0;
min-width: 0;
width: 100%;
max-width: 100%;
gap: 1rem;
box-sizing: border-box;
min-height: 600px; /* Ensure minimum height so content isn't cut off */
}
.left-pane {
@ -116,6 +115,7 @@ @@ -116,6 +115,7 @@
max-width: 400px;
border-right: 1px solid var(--border-color);
overflow-y: auto;
overflow-x: hidden;
padding: 1rem;
box-sizing: border-box;
background: var(--bg-primary);

11
src/routes/repos/[npub]/[repo]/hooks/use-repo-effects.ts

@ -20,10 +20,21 @@ export function usePageDataEffect(state: RepoState, getPageData: () => any): () @@ -20,10 +20,21 @@ export function usePageDataEffect(state: RepoState, getPageData: () => any): ()
const data = getPageData();
if (data && state.isMounted) {
state.pageData = data || {};
// Set repoNotFound flag if announcement is missing or repoNotFound is explicitly set
if (data.repoNotFound === true || (data.announcement === null || data.announcement === undefined)) {
state.repoNotFound = true;
state.loading.main = false;
} else if (data.announcement) {
// Clear repoNotFound if we have a valid announcement
state.repoNotFound = false;
}
}
} catch (err) {
if (state.isMounted) {
console.warn('Failed to update pageData:', err);
// On error, mark as not found to prevent blank page
state.repoNotFound = true;
state.loading.main = false;
}
}
};

109
src/routes/repos/[npub]/[repo]/read-announcement-from-fs.ts

@ -0,0 +1,109 @@ @@ -0,0 +1,109 @@
/**
* Server-only utility to read repository announcements from filesystem
* This file should only be imported server-side via dynamic import
*
* Note: This file uses Node.js-only modules and must not be statically imported on the client
*/
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { simpleGit } from 'simple-git';
import logger from '$lib/services/logger.js';
import type { NostrEvent } from '$lib/types/nostr.js';
// Resolve GIT_REPO_ROOT to absolute path (handles both relative and absolute paths)
const repoRootEnv = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
const repoRoot = resolve(repoRootEnv);
/**
* Read announcement from filesystem (nostr/repo-events.jsonl)
* Returns null if not found or on error
* Server-side only - do not call on client
*/
export async function readAnnouncementFromFilesystem(npub: string, repoName: string, expectedPubkey: string): Promise<NostrEvent | null> {
// Guard against client-side execution
if (typeof process === 'undefined' || typeof process.env === 'undefined') {
return null;
}
try {
const repoPath = join(repoRoot, npub, `${repoName}.git`);
if (!existsSync(repoPath)) {
return null;
}
const git = simpleGit(repoPath);
// Get the most recent commit that modified repo-events.jsonl
const logOutput = await git.raw(['log', '--all', '--format=%H', '--reverse', '--', 'nostr/repo-events.jsonl']).catch(() => '');
const commitHashes = logOutput.trim().split('\n').filter(Boolean);
if (commitHashes.length === 0) {
return null;
}
const mostRecentCommit = commitHashes[commitHashes.length - 1];
// Read the file content from git
const fileContent = await git.show([`${mostRecentCommit}:nostr/repo-events.jsonl`]).catch(() => null);
if (!fileContent) {
return null;
}
// Parse repo-events.jsonl to find the most recent announcement
let announcementEvent: NostrEvent | null = null;
let latestTimestamp = 0;
try {
const lines = fileContent.trim().split('\n').filter(Boolean);
for (const line of lines) {
try {
const entry = JSON.parse(line);
if (entry.type === 'announcement' && entry.event && entry.timestamp) {
// Verify the announcement is for the expected pubkey
if (entry.event.pubkey === expectedPubkey) {
if (entry.timestamp > latestTimestamp) {
latestTimestamp = entry.timestamp;
announcementEvent = entry.event;
}
}
}
} catch {
continue;
}
}
} catch (parseError) {
logger.debug({ error: parseError, npub, repoName }, 'Failed to parse repo-events.jsonl');
return null;
}
if (!announcementEvent) {
return null;
}
// Validate the announcement (case-insensitive repo name matching)
const { validateAnnouncementEvent } = await import('$lib/services/nostr/repo-verification.js');
const dTag = announcementEvent.tags.find(t => t[0] === 'd')?.[1];
// Check if d-tag matches repo name (case-insensitive)
if (!dTag || dTag.toLowerCase() !== repoName.toLowerCase()) {
logger.debug({ npub, repoName, dTag }, 'Announcement d-tag does not match repo name (case-insensitive)');
return null;
}
const validation = validateAnnouncementEvent(announcementEvent, repoName);
if (!validation.valid) {
logger.debug({ error: validation.error, npub, repoName }, 'Announcement validation failed');
return null;
}
return announcementEvent;
} catch (error) {
logger.debug({ error, npub, repoName }, 'Error reading announcement from filesystem');
return null;
}
}

67
src/routes/repos/[npub]/[repo]/services/repo-operations.ts

@ -350,9 +350,38 @@ export async function forkRepository( @@ -350,9 +350,38 @@ export async function forkRepository(
const truncatedNpub = state.npub.length > 16 ? `${state.npub.slice(0, 12)}...` : state.npub;
console.log(`[Fork UI] Starting ${localOnly ? 'local-only ' : ''}fork of ${truncatedNpub}/${state.repo}...`);
// Create a proof event to verify write access if needed
let proofEvent: any = null;
try {
const { createProofEvent } = await import('$lib/services/nostr/relay-write-proof.js');
const { signEventWithNIP07 } = await import('$lib/services/nostr/nip07-signer.js');
const { DEFAULT_NOSTR_RELAYS } = await import('$lib/config.js');
const { NostrClient } = await import('$lib/services/nostr/nostr-client.js');
if (state.user.pubkeyHex) {
const proofEventTemplate = createProofEvent(
state.user.pubkeyHex,
`gitrepublic-fork-proof-${Date.now()}`
);
proofEvent = await signEventWithNIP07(proofEventTemplate);
// Publish the event to relays so server can verify it
const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
await nostrClient.publishEvent(proofEvent, DEFAULT_NOSTR_RELAYS);
// Wait a moment for the event to propagate
await new Promise(resolve => setTimeout(resolve, 500));
}
} catch (proofErr) {
console.warn('[Fork] Failed to create proof event, continuing without it:', proofErr);
// Continue without proof event - server will check cache or return helpful error
}
const data = await apiPost<{
success?: boolean;
message?: string;
redirect?: string;
fork?: {
npub: string;
repo: string;
@ -366,23 +395,31 @@ export async function forkRepository( @@ -366,23 +395,31 @@ export async function forkRepository(
eventName?: string;
}>(`/api/repos/${state.npub}/${state.repo}/forks`, {
userPubkey: state.user.pubkey,
localOnly
localOnly,
proofEvent
});
if (data.success !== false && data.fork) {
const message = data.message || (data.fork.localOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${data.fork.publishedTo?.announcement || 0} relay(s).`);
console.log(`[Fork UI] ✓ ${message}`);
// Security: Truncate npub in logs
const truncatedForkNpub = data.fork.npub.length > 16 ? `${data.fork.npub.slice(0, 12)}...` : data.fork.npub;
console.log(`[Fork UI] - Fork location: /repos/${truncatedForkNpub}/${data.fork.repo}`);
console.log(`[Fork UI] - Local-only: ${data.fork.localOnly || false}`);
console.log(`[Fork UI] - Announcement ID: ${data.fork.announcementId}`);
console.log(`[Fork UI] - Ownership Transfer ID: ${data.fork.ownershipTransferId}`);
alert(`${message}\n\nRedirecting to your fork...`);
goto(`/repos/${data.fork.npub}/${data.fork.repo}`);
if (data.success !== false) {
if (data.redirect) {
// Redirect to signup page to publish the fork announcement
console.log(`[Fork UI] ✓ Fork repository created! Redirecting to signup page to publish announcement...`);
goto(data.redirect);
} else if (data.fork) {
// Legacy: Fork was fully created and published (shouldn't happen with new flow)
const message = data.message || (data.fork.localOnly
? 'Local-only fork created successfully! This fork is private and only exists on this server.'
: `Repository forked successfully! Published to ${data.fork.publishedTo?.announcement || 0} relay(s).`);
console.log(`[Fork UI] ✓ ${message}`);
// Security: Truncate npub in logs
const truncatedForkNpub = data.fork.npub.length > 16 ? `${data.fork.npub.slice(0, 12)}...` : data.fork.npub;
console.log(`[Fork UI] - Fork location: /repos/${truncatedForkNpub}/${data.fork.repo}`);
console.log(`[Fork UI] - Local-only: ${data.fork.localOnly || false}`);
console.log(`[Fork UI] - Announcement ID: ${data.fork.announcementId}`);
console.log(`[Fork UI] - Ownership Transfer ID: ${data.fork.ownershipTransferId}`);
alert(`${message}\n\nRedirecting to your fork...`);
goto(`/repos/${data.fork.npub}/${data.fork.repo}`);
}
} else {
const errorMessage = data.error || 'Failed to fork repository';
const errorDetails = data.details ? `\n\nDetails: ${data.details}` : '';

192
src/routes/signup/+page.svelte

@ -10,6 +10,7 @@ @@ -10,6 +10,7 @@
import { nip19 } from 'nostr-tools';
import { userStore } from '../../lib/stores/user-store.js';
import { hasUnlimitedAccess, isLoggedIn } from '../../lib/utils/user-access.js';
import { sanitizeRepoNameForFilesystem } from '../../lib/utils/input-validation.js';
let nip07Available = $state(false);
let loading = $state(false);
@ -67,6 +68,15 @@ @@ -67,6 +68,15 @@
const repoParam = urlParams.get('repo');
const repoTagParam = urlParams.get('repoTag');
const npubParam = urlParams.get('npub') || originalOwnerParam;
const forkParam = urlParams.get('fork');
const forkOriginalRepoParam = urlParams.get('forkOriginalRepo');
const forkNameParam = urlParams.get('forkName');
const forkDescriptionParam = urlParams.get('forkDescription');
const localOnlyParam = urlParams.get('localOnly');
const cloneUrlsParam = urlParams.get('cloneUrls');
const visibilityParam = urlParams.get('visibility');
const projectRelaysParam = urlParams.get('projectRelays');
const earliestCommitParam = urlParams.get('earliestCommit');
// Handle transfer flow (step 4)
if (transferParam === 'true' && originalOwnerParam && repoParam && repoTagParam) {
@ -321,12 +331,54 @@ @@ -321,12 +331,54 @@
// Pre-fill repo name
repoName = repoParam;
// Try to fetch existing announcement to pre-fill other fields
try {
const decoded = nip19.decode(npubParam);
if (decoded.type === 'npub') {
const pubkey = decoded.data as string;
const events = await nostrClient.fetchEvents([
// Handle fork parameters (from fork creation flow)
if (forkParam === 'true' && forkOriginalRepoParam) {
isFork = true;
forkOriginalRepo = forkOriginalRepoParam;
// Pre-fill fork name and description from URL params
if (forkNameParam) {
repoName = forkNameParam;
}
if (forkDescriptionParam) {
description = forkDescriptionParam;
}
// Pre-fill clone URLs from URL params
if (cloneUrlsParam) {
cloneUrls = cloneUrlsParam.split(',').filter(url => url.trim()).length > 0
? cloneUrlsParam.split(',').filter(url => url.trim())
: [''];
}
// Pre-fill visibility
if (visibilityParam && ['public', 'unlisted', 'restricted', 'private'].includes(visibilityParam)) {
visibility = visibilityParam as typeof visibility;
}
// Pre-fill project relays
if (projectRelaysParam) {
projectRelays = projectRelaysParam.split(',').filter(r => r.trim()).length > 0
? projectRelaysParam.split(',').filter(r => r.trim())
: [''];
}
// Pre-fill earliest commit
if (earliestCommitParam) {
earliestCommit = earliestCommitParam;
}
// If localOnly is set, ensure visibility is private
if (localOnlyParam === 'true') {
visibility = 'private';
}
} else {
// Try to fetch existing announcement to pre-fill other fields (not a fork)
try {
const decoded = nip19.decode(npubParam);
if (decoded.type === 'npub') {
const pubkey = decoded.data as string;
const events = await nostrClient.fetchEvents([
{
kinds: [KIND.REPO_ANNOUNCEMENT],
authors: [pubkey],
@ -549,18 +601,19 @@ @@ -549,18 +601,19 @@
cloneUrls = [''];
}
}
}
} catch (err) {
console.warn('Failed to pre-fill form from query params:', err);
// Still set basic info
const gitDomain = $page.data.gitDomain || 'localhost:6543';
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1');
// Only add clone URL if not localhost
if (!isLocalhost) {
cloneUrls = [`https://${gitDomain}/${npubParam}/${repoParam}.git`];
} else {
cloneUrls = [''];
}
} catch (err) {
console.warn('Failed to pre-fill form from query params:', err);
// Still set basic info
const gitDomain = $page.data.gitDomain || 'localhost:6543';
const isLocalhost = gitDomain.startsWith('localhost') || gitDomain.startsWith('127.0.0.1');
// Only add clone URL if not localhost
if (!isLocalhost) {
cloneUrls = [`https://${gitDomain}/${npubParam}/${repoParam}.git`];
} else {
cloneUrls = [''];
}
}
}
}
@ -1631,14 +1684,15 @@ @@ -1631,14 +1684,15 @@
const pubkey = await getPublicKeyWithNIP07();
const npub = nip19.npubEncode(pubkey);
// Normalize repo name to d-tag format
const dTag = repoName
.toLowerCase()
.trim()
.replace(/[^\w\s-]/g, '')
.replace(/\s+/g, '-')
.replace(/-+/g, '-')
.replace(/^-+|-+$/g, '');
// Normalize repo name to d-tag format (filesystem-safe)
// The d-tag is used as the repository identifier and must be filesystem-safe
const dTag = sanitizeRepoNameForFilesystem(repoName);
if (!dTag || dTag.length === 0) {
error = 'Invalid repository name. Please use only alphanumeric characters, hyphens, and dots.';
loading = false;
return;
}
// Get git domain from layout data
const gitDomain = $page.data.gitDomain || 'localhost:6543';
@ -2199,6 +2253,65 @@ @@ -2199,6 +2253,65 @@
console.log('Using relays for publishing:', userRelays);
// For private visibility, skip publishing to relays - just save to repo
// For forks, the repo already exists, so we need to trigger provisioning to save the announcement
if (visibility === 'private') {
console.log('Private visibility detected - skipping relay publishing. Triggering provisioning to save announcement to repository.');
// For forks, the repository already exists, so we need to trigger provisioning
// to save the announcement file to the repo
// Do this asynchronously (fire-and-forget) to avoid blocking the redirect
if (isFork) {
const userNpub = nip19.npubEncode(pubkey);
console.log(`Triggering provisioning for fork: ${userNpub}/${dTag}`);
// Call the clone endpoint to trigger provisioning (it will save the announcement to the existing repo)
// Pass the announcement event directly since it's private and not on relays
// Do this asynchronously without awaiting to avoid blocking redirect
fetch(`/api/repos/${userNpub}/${dTag}/clone`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
announcementEvent: signedEvent // Pass the signed announcement event directly
})
})
.then(cloneResponse => {
if (cloneResponse.ok) {
console.log('Fork provisioning completed successfully - announcement saved to repository');
} else {
return cloneResponse.json().catch(() => ({ error: 'Unknown error' })).then(errorData => {
console.warn('Fork provisioning had issues (repo may still work):', errorData);
});
}
})
.catch(provisionErr => {
console.warn('Failed to trigger fork provisioning (repo may still work):', provisionErr);
// Continue anyway - the repo exists and can be accessed
});
}
success = true;
// Redirect to the newly created repository page
const userNpub = nip19.npubEncode(pubkey);
// Check if this is a transfer completion (from query params)
const urlParams = $page.url.searchParams;
const isTransfer = urlParams.get('transfer') === 'true';
setTimeout(() => {
// Invalidate all caches and redirect
if (isTransfer) {
// After transfer, redirect to repos page to see updated state
goto('/repos', { invalidateAll: true, replaceState: false });
} else {
goto(`/repos/${userNpub}/${dTag}`, { invalidateAll: true, replaceState: false });
}
}, 1000);
return;
}
// Publish announcement file event first (so it's available when server provisions)
console.log('Publishing announcement file event...');
await publishWithRetry(nostrClient, signedAnnouncementFileEvent, userRelays, 2).catch(err => {
@ -3047,3 +3160,30 @@ @@ -3047,3 +3160,30 @@
</main>
</div>
<style>
/* Ensure the signup page can scroll to the bottom */
:global(body) {
overflow-y: auto !important;
height: auto !important;
min-height: 100vh;
}
:global(html) {
overflow-y: auto !important;
height: auto !important;
}
.container {
min-height: auto;
height: auto;
overflow: visible;
padding-bottom: 3rem; /* Extra padding at bottom to ensure content is accessible */
}
.container main {
min-height: auto;
height: auto;
overflow: visible;
padding-bottom: 2rem;
}
</style>

2
vite.config.ts

@ -54,6 +54,8 @@ export default defineConfig({ @@ -54,6 +54,8 @@ export default defineConfig({
noExternal: [],
external: [
'simple-git',
'@kwsites/file-exists',
'@kwsites/promise-deferred',
'child_process',
'fs',
'fs/promises',

Loading…
Cancel
Save