Compare commits

..

7 Commits

Author SHA1 Message Date
Silberengel 52de94d089 bug-fix 2 weeks ago
Silberengel 34ae76bffe bug-fixes 2 weeks ago
Silberengel e8e14ede79 bug-fixes 2 weeks ago
Silberengel ada4c25047 make relay timeouts more efficient 2 weeks ago
Silberengel e12b4263d7 more-muted replyt-to 2 weeks ago
Silberengel 70873862a0 bug-fixes 2 weeks ago
Silberengel 6f38dc9e20 administer the repos 2 weeks ago
  1. 1
      docker-compose.yml
  2. 7
      nostr/commit-signatures.jsonl
  3. 94
      src/app.html
  4. 39
      src/lib/components/CommentRenderer.svelte
  5. 90
      src/lib/components/NavBar.svelte
  6. 35
      src/lib/services/git/file-manager.ts
  7. 76
      src/lib/services/git/file-manager/commit-operations.ts
  8. 272
      src/lib/services/git/repo-manager.ts
  9. 221
      src/lib/services/nostr/nostr-client.ts
  10. 2
      src/lib/services/nostr/relay-write-proof.ts
  11. 106
      src/lib/utils/admin-check.ts
  12. 15
      src/lib/utils/nostr-links.ts
  13. 423
      src/routes/admin/repos/+page.svelte
  14. 47
      src/routes/api/admin/check/+server.ts
  15. 146
      src/routes/api/admin/repos/+server.ts
  16. 20
      src/routes/api/repos/[npub]/[repo]/clone/+server.ts
  17. 26
      src/routes/api/repos/[npub]/[repo]/delete/+server.ts
  18. 7
      src/routes/api/repos/[npub]/[repo]/readme/+server.ts
  19. 14
      src/routes/api/repos/list/+server.ts
  20. 18
      src/routes/repos/[npub]/[repo]/+page.svelte
  21. 15
      src/routes/repos/[npub]/[repo]/components/DocsTab.svelte
  22. 47
      src/routes/repos/[npub]/[repo]/services/commit-operations.ts
  23. 115
      src/routes/repos/[npub]/[repo]/services/repo-operations.ts
  24. 8
      src/routes/repos/[npub]/[repo]/utils/api-client.ts
  25. 19
      src/routes/repos/[npub]/[repo]/utils/safe-wrappers.ts
  26. 401
      src/routes/users/[npub]/+page.svelte

1
docker-compose.yml

@ -26,6 +26,7 @@ services:
- GIT_DOMAIN=${GIT_DOMAIN:-gitrepublic.imwald.eu} # Set to your domain for production (without https://) - GIT_DOMAIN=${GIT_DOMAIN:-gitrepublic.imwald.eu} # Set to your domain for production (without https://)
- NOSTR_RELAYS=${NOSTR_RELAYS:-wss://theforest.nostr1.com} - NOSTR_RELAYS=${NOSTR_RELAYS:-wss://theforest.nostr1.com}
- NOSTRGIT_SECRET_KEY=${NOSTRGIT_SECRET_KEY:-} - NOSTRGIT_SECRET_KEY=${NOSTRGIT_SECRET_KEY:-}
- ADMIN_NPUB=${ADMIN_NPUB:-npub12umrfdjgvdxt45g0y3ghwcyfagssjrv5qlm3t6pu2aa5vydwdmwq8q0z04}
- PORT=6543 - PORT=6543
volumes: volumes:
# Persist git repositories # Persist git repositories

7
nostr/commit-signatures.jsonl

@ -121,3 +121,10 @@
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772270859,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes and fallback relay"]],"content":"Signed commit: bug-fixes and fallback relay","id":"1d85d0c5e1451c90bca5d59e08043f29adeaad4db4ac5495c8e9a4247775780f","sig":"a1960b76c78db9f64dad20378d26f500ffc09f1f6d137314db548470202712222a1d391f682146ba281fd23355c574fcbb260310db61b3458bba3dec0c724a18"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772270859,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes and fallback relay"]],"content":"Signed commit: bug-fixes and fallback relay","id":"1d85d0c5e1451c90bca5d59e08043f29adeaad4db4ac5495c8e9a4247775780f","sig":"a1960b76c78db9f64dad20378d26f500ffc09f1f6d137314db548470202712222a1d391f682146ba281fd23355c574fcbb260310db61b3458bba3dec0c724a18"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772271656,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"f4a5e0d3e2aa7d0d99803f26008ab68e40551e36362bb6d04acf639c5b78d959","sig":"59da9e59a6fb5648f4c889e0045b571e0d2d66a555100d60dec373455309a640bea89e4bb3a42a0e502aa4d2091e4b698203721e79b346ff30e6b2bcdc5f48b3"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772271656,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"f4a5e0d3e2aa7d0d99803f26008ab68e40551e36362bb6d04acf639c5b78d959","sig":"59da9e59a6fb5648f4c889e0045b571e0d2d66a555100d60dec373455309a640bea89e4bb3a42a0e502aa4d2091e4b698203721e79b346ff30e6b2bcdc5f48b3"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772274086,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"32794e047f06902ad610f918834efb113f41eace26a53a3f0fad083b9d8323dc","sig":"3859f0de3de0f8a742b6fbe7709c5a5625f4d5612a936fd81f38a7e1231ee810b50a69c1ed5d23c8a6670b4cbc9ea3d4bd39d6fa9e6207802f45995689b924a9"} {"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772274086,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"32794e047f06902ad610f918834efb113f41eace26a53a3f0fad083b9d8323dc","sig":"3859f0de3de0f8a742b6fbe7709c5a5625f4d5612a936fd81f38a7e1231ee810b50a69c1ed5d23c8a6670b4cbc9ea3d4bd39d6fa9e6207802f45995689b924a9"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772293551,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","remove polling"]],"content":"Signed commit: remove polling","id":"40f01e84f96661bb7fea13aa63c7da428118061b0a1470a11890d4f9cd6d685b","sig":"dbb6947defac6c7f92a3cf6f72352a94ffe2c4b33e65f8410518a40406c93f1f5a3e13e81f2f04f676d826e6cf03ec802328f5228300f80a8114fa3fd26eaeff"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772296288,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","administer the repos"]],"content":"Signed commit: administer the repos","id":"8825fb9bd01e099c1369f0c9ea1429dedd0a0116d103b4a640752c0a830fbc61","sig":"676f0817f817204ad910a70540399f71743a54453ae209535dcb30356d042b049138d9cfdeec08c4b7da03bb6bb51c71477bbf8d2f58bd4b602b9f69af4b3405"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772298906,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"6aa4dcd1b3d8a933710a6eb43321aa4faaba56598c735a634069c882c83b4f03","sig":"80ce253e890e8e84c8138e004bc2aaea402379d9aa67f62793ac7a4b344de6a7223f46fc733b240215a983a3a9b574ea8d0858a184f06df58ee66212ba58ee53"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772299137,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","more-muted replyt-to"]],"content":"Signed commit: more-muted replyt-to","id":"fc0a91b526083b640d8116592fcac064fcf3cec9625b48dbd41c3877b2fe5444","sig":"998273d70d827ffbb939b4c149ff88e11c9f3aae3c5ddee78d860710f7fbff42c5ceed9433367b530bdc2869f9d382eb449537f813cf745c49f1a87a36926502"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772299733,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","make relay timeouts more efficient\nsuppress diff on initial commit"]],"content":"Signed commit: make relay timeouts more efficient\nsuppress diff on initial commit","id":"5fbc2dfb13acab011df5a394a022267e69bbe908e696c4389e0c07ba83d58a0d","sig":"daf46d563c413e2481be2cbd2b00d3015cf601e19fe0a191ffbb18c2c07508b17e34ebda5c903a1391914f991cecd7a7a4e809fcba45e1f14ebab674117eb53c"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772300935,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"3618c494d594408165ebf461e290676817ab6cc8b0b076ccc02b35a487ae8da1","sig":"d9106ce1318e703df1c366835be69c0cab12fba3a9be0fed944b17e55fd3b44f3fc9d45b32366d1d237452f099ab3b07f8ad6199660972ce571ec23ae264e873"}
{"kind":1640,"pubkey":"573634b648634cbad10f2451776089ea21090d9407f715e83c577b4611ae6edc","created_at":1772302842,"tags":[["author","Silberengel","silberengel7@protonmail.com"],["message","bug-fixes"]],"content":"Signed commit: bug-fixes","id":"5c4b680a04363718d8de6aa05b824d30417221a9095be57bb9a7c2cf01c5af59","sig":"51ffa554e83a6a3c4ca97cffc7eca67e770ca822e43e9e78692bafcd63401c4df84e1fe030592e63982b509d3cfa8bfbd57c6b4257661b0f43adedef335c7575"}

94
src/app.html

@ -39,6 +39,100 @@
} }
// Other unhandled rejections will still be logged by the browser // Other unhandled rejections will still be logged by the browser
}); });
// Suppress WebSocket connection errors for nostr relays
// These errors occur when relays are down or unreachable, which is expected behavior
// SimplePool from nostr-tools will retry connections, causing repeated console errors
(function() {
// List of known nostr relay URLs to suppress errors for
const nostrRelayPatterns = [
'wss://orly-relay.imwald.eu',
'wss://nostr.sovbit.host',
'wss://nostr21.com',
'wss://theforest.nostr1.com',
'wss://nostr.land',
'wss://relay.damus.io',
'wss://thecitadel.nostr1.com',
'wss://freelay.sovbit.host',
'wss://bevos.nostr1.com',
'wss://relay.primal.net',
'wss://nostr.mom',
'wss://relay.snort.social',
'wss://aggr.nostr.land'
];
// Helper function to check if an error message is a nostr relay connection error
function isNostrRelayConnectionError(message) {
if (typeof message !== 'string') return false;
return nostrRelayPatterns.some(pattern =>
message.includes(pattern) && (
message.includes('Verbindung') || // German: "connection"
message.includes('connection') ||
message.includes('kann keine Verbindung') || // German: "cannot establish connection"
message.includes('cannot establish') ||
message.includes('WebSocket') ||
message.includes('wss://') ||
message.includes('aufbauen') // German: "establish"
)
);
}
// Store original console methods
const originalConsoleError = console.error;
const originalConsoleWarn = console.warn;
// Override console.error to filter out WebSocket connection errors for nostr relays
console.error = function(...args) {
const message = args.join(' ');
// Suppress nostr relay connection errors (they're expected when relays are down)
if (isNostrRelayConnectionError(message)) {
// Optionally log at debug level instead of error level
if (typeof console.debug === 'function') {
console.debug('[Nostr Relay] Connection failed (expected):', ...args);
}
return; // Don't log the error
}
// For all other errors, use the original console.error
originalConsoleError.apply(console, args);
};
// Also override console.warn for consistency
console.warn = function(...args) {
const message = args.join(' ');
// Suppress nostr relay connection warnings too
if (isNostrRelayConnectionError(message)) {
if (typeof console.debug === 'function') {
console.debug('[Nostr Relay] Connection warning (expected):', ...args);
}
return;
}
originalConsoleWarn.apply(console, args);
};
// Handle window.onerror for global error catching
const originalOnError = window.onerror;
window.onerror = function(message, source, lineno, colno, error) {
const errorMessage = String(message || '');
// Suppress nostr relay connection errors
if (isNostrRelayConnectionError(errorMessage)) {
// Return true to prevent default error handling
return true;
}
// For other errors, call original handler if it exists
if (originalOnError) {
return originalOnError.call(this, message, source, lineno, colno, error);
}
return false;
};
})();
</script> </script>
%sveltekit.head% %sveltekit.head%
</head> </head>

39
src/lib/components/CommentRenderer.svelte

@ -15,7 +15,7 @@
import EventCopyButton from '$lib/components/EventCopyButton.svelte'; import EventCopyButton from '$lib/components/EventCopyButton.svelte';
import { import {
processContentWithNostrLinks, processContentWithNostrLinks,
getReferencedEventFromDiscussion, getReferencedEventWithTagType,
formatDiscussionTime, formatDiscussionTime,
type ProcessedContentPart type ProcessedContentPart
} from '$lib/utils/nostr-links.js'; } from '$lib/utils/nostr-links.js';
@ -42,10 +42,17 @@
nested = false nested = false
}: Props = $props(); }: Props = $props();
const referencedEvent = $derived(commentEvent const referencedEventWithTag = $derived(commentEvent
? getReferencedEventFromDiscussion(commentEvent, eventCache) ? getReferencedEventWithTagType(commentEvent, eventCache)
: undefined); : undefined);
const referencedEvent = $derived(referencedEventWithTag?.event);
const referencedTagType = $derived(referencedEventWithTag?.tagType);
const referencedLabel = $derived(
referencedTagType === 'q' ? 'Quoting:' : 'Reply-To:'
);
const contentParts = $derived(processContentWithNostrLinks(comment.content, eventCache, profileCache)); const contentParts = $derived(processContentWithNostrLinks(comment.content, eventCache, profileCache));
</script> </script>
@ -68,7 +75,8 @@
{#if referencedEvent} {#if referencedEvent}
<div class="referenced-event"> <div class="referenced-event">
<div class="referenced-event-header"> <div class="referenced-event-header">
<UserBadge pubkey={referencedEvent.pubkey} disableLink={true} /> <span class="referenced-event-label">{referencedLabel}</span>
<UserBadge pubkey={referencedEvent.pubkey} disableLink={true} inline={true} />
<span class="referenced-event-time">{formatDiscussionTime(referencedEvent.created_at)}</span> <span class="referenced-event-time">{formatDiscussionTime(referencedEvent.created_at)}</span>
</div> </div>
<div class="referenced-event-content">{referencedEvent.content || '(No content)'}</div> <div class="referenced-event-content">{referencedEvent.content || '(No content)'}</div>
@ -169,26 +177,35 @@
margin-bottom: 0.75rem; margin-bottom: 0.75rem;
padding: 0.5rem; padding: 0.5rem;
background: var(--bg-secondary, var(--bg-primary)); background: var(--bg-secondary, var(--bg-primary));
color: var(--text-primary); color: var(--text-muted, var(--text-secondary));
border-radius: 4px; border-radius: 4px;
border-left: 2px solid var(--border-color); border-left: 2px solid var(--border-light, var(--border-color));
opacity: 0.8;
} }
.referenced-event-header { .referenced-event-header {
display: flex; display: flex;
align-items: center; align-items: center;
gap: 0.5rem; gap: 0.375rem;
margin-bottom: 0.25rem; margin-bottom: 0.25rem;
font-size: 0.875rem; font-size: 0.75rem;
color: var(--text-muted, var(--text-secondary));
}
.referenced-event-label {
font-weight: 500;
color: var(--text-muted, var(--text-secondary));
} }
.referenced-event-time { .referenced-event-time {
color: var(--text-secondary); color: var(--text-muted, var(--text-secondary));
font-size: 0.7rem;
} }
.referenced-event-content { .referenced-event-content {
font-size: 0.9rem; font-size: 0.8rem;
color: var(--text-primary); color: var(--text-muted, var(--text-secondary));
line-height: 1.4;
} }
.nostr-link-event { .nostr-link-event {

90
src/lib/components/NavBar.svelte

@ -11,9 +11,11 @@
import { determineUserLevel, decodePubkey } from '../services/nostr/user-level-service.js'; import { determineUserLevel, decodePubkey } from '../services/nostr/user-level-service.js';
let userPubkey = $state<string | null>(null); let userPubkey = $state<string | null>(null);
let userPubkeyHex = $state<string | null>(null);
let mobileMenuOpen = $state(false); let mobileMenuOpen = $state(false);
let nip07Available = $state(false); // Track NIP-07 availability (client-side only) let nip07Available = $state(false); // Track NIP-07 availability (client-side only)
let isClient = $state(false); // Track if we're on the client let isClient = $state(false); // Track if we're on the client
let isUserAdmin = $state(false);
// Component mount tracking to prevent state updates after destruction // Component mount tracking to prevent state updates after destruction
let isMounted = $state(true); let isMounted = $state(true);
@ -35,13 +37,22 @@
if (isMounted) { if (isMounted) {
userStore.reset(); userStore.reset();
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
} }
} else if (isMounted) { } else if (isMounted) {
userPubkey = currentUser.userPubkey; userPubkey = currentUser.userPubkey;
userPubkeyHex = currentUser.userPubkeyHex;
// Check admin status asynchronously
if (currentUser.userPubkeyHex) {
checkAdminStatus(currentUser.userPubkeyHex);
}
updateActivity(); updateActivity();
} }
} else if (isMounted) { } else if (isMounted) {
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
} }
} catch (err) { } catch (err) {
// Ignore errors during destruction // Ignore errors during destruction
@ -66,10 +77,17 @@
if (currentState && currentState.userPubkey && currentState.userPubkeyHex && isMounted) { if (currentState && currentState.userPubkey && currentState.userPubkeyHex && isMounted) {
// User is logged in - restore state (already synced by $effect, but ensure it's set) // User is logged in - restore state (already synced by $effect, but ensure it's set)
userPubkey = currentState.userPubkey; userPubkey = currentState.userPubkey;
userPubkeyHex = currentState.userPubkeyHex;
// Check admin status asynchronously
if (currentState.userPubkeyHex) {
checkAdminStatus(currentState.userPubkeyHex);
}
// Update activity to extend session // Update activity to extend session
updateActivity(); updateActivity();
} else if (isMounted) { } else if (isMounted) {
// User not logged in - check auth // User not logged in - check auth
userPubkeyHex = null;
isUserAdmin = false;
checkAuth(); checkAuth();
} }
} catch (err) { } catch (err) {
@ -172,19 +190,76 @@
if (!currentState || !currentState.userPubkey) { if (!currentState || !currentState.userPubkey) {
if (isMounted) { if (isMounted) {
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
} }
return; return;
} }
if (isNIP07Available() && isMounted) { if (isNIP07Available() && isMounted) {
userPubkey = await getPublicKeyWithNIP07(); userPubkey = await getPublicKeyWithNIP07();
// Convert to hex if needed
if (userPubkey) {
if (/^[0-9a-f]{64}$/i.test(userPubkey)) {
userPubkeyHex = userPubkey.toLowerCase();
} else {
try {
const decoded = nip19.decode(userPubkey);
if (decoded.type === 'npub') {
userPubkeyHex = decoded.data as string;
}
} catch {
userPubkeyHex = null;
}
}
if (userPubkeyHex && isMounted) {
checkAdminStatus(userPubkeyHex);
}
}
} else if (isMounted) { } else if (isMounted) {
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
} }
} catch (err) { } catch (err) {
if (isMounted) { if (isMounted) {
console.log('NIP-07 not available or user not connected'); console.log('NIP-07 not available or user not connected');
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
}
}
}
async function checkAdminStatus(pubkeyHex: string) {
if (!isMounted || typeof window === 'undefined' || !pubkeyHex) {
if (isMounted) {
isUserAdmin = false;
}
return;
}
try {
console.log('[NavBar] Checking admin status for:', pubkeyHex.substring(0, 16) + '...');
const response = await fetch('/api/admin/check', {
headers: {
'X-User-Pubkey': pubkeyHex
}
});
if (response.ok && isMounted) {
const data = await response.json();
console.log('[NavBar] Admin check result:', data);
isUserAdmin = data.isAdmin === true;
console.log('[NavBar] isUserAdmin set to:', isUserAdmin);
} else if (isMounted) {
console.warn('[NavBar] Admin check failed:', response.status, response.statusText);
isUserAdmin = false;
}
} catch (err) {
if (isMounted) {
console.warn('[NavBar] Failed to check admin status:', err);
isUserAdmin = false;
} }
} }
} }
@ -256,6 +331,16 @@
levelResult.error || null levelResult.error || null
); );
// Update local state
if (isMounted) {
userPubkey = levelResult.userPubkey;
userPubkeyHex = levelResult.userPubkeyHex;
// Check admin status after login
if (levelResult.userPubkeyHex) {
checkAdminStatus(levelResult.userPubkeyHex);
}
}
// Update activity tracking on successful login // Update activity tracking on successful login
if (isMounted) { if (isMounted) {
updateActivity(); updateActivity();
@ -309,6 +394,8 @@
if (typeof window === 'undefined' || !isMounted) return; if (typeof window === 'undefined' || !isMounted) return;
if (isMounted) { if (isMounted) {
userPubkey = null; userPubkey = null;
userPubkeyHex = null;
isUserAdmin = false;
// Reset user store // Reset user store
userStore.reset(); userStore.reset();
// Clear activity tracking // Clear activity tracking
@ -348,6 +435,9 @@
<a href="/signup" class:active={isActive('/signup')} onclick={() => closeMobileMenu()}>Register</a> <a href="/signup" class:active={isActive('/signup')} onclick={() => closeMobileMenu()}>Register</a>
<a href="/docs" class:active={isActive('/docs')} onclick={() => closeMobileMenu()}>Docs</a> <a href="/docs" class:active={isActive('/docs')} onclick={() => closeMobileMenu()}>Docs</a>
<a href="/api-docs" class:active={isActive('/api-docs')} onclick={() => closeMobileMenu()}>API Docs</a> <a href="/api-docs" class:active={isActive('/api-docs')} onclick={() => closeMobileMenu()}>API Docs</a>
{#if isUserAdmin}
<a href="/admin/repos" class:active={isActive('/admin/repos')} onclick={() => closeMobileMenu()}>Admin</a>
{/if}
</div> </div>
</nav> </nav>
<div class="auth-section"> <div class="auth-section">

35
src/lib/services/git/file-manager.ts

@ -445,10 +445,37 @@ export class FileManager {
logger.info({ npub, repoName, branchName }, '[FileManager.createBranch] Step 2: Creating empty commit'); logger.info({ npub, repoName, branchName }, '[FileManager.createBranch] Step 2: Creating empty commit');
// Create an empty commit pointing to the empty tree with author information // Create an empty commit pointing to the empty tree with author information
// Use --author flag to specify author identity (required when git config is not set) // git commit-tree doesn't support --author flag, so we use environment variables
const authorString = `${authorName} <${authorEmail}>`; const { spawn } = await import('child_process');
const commitHash = await git.raw(['commit-tree', '-m', `Initial commit on ${branchName}`, '--author', authorString, emptyTreeHash]); const commit = await new Promise<string>((resolve, reject) => {
const commit = commitHash.trim(); const env = {
...process.env,
GIT_AUTHOR_NAME: authorName,
GIT_AUTHOR_EMAIL: authorEmail,
GIT_COMMITTER_NAME: authorName,
GIT_COMMITTER_EMAIL: authorEmail
};
const proc = spawn('git', ['commit-tree', '-m', `Initial commit on ${branchName}`, emptyTreeHash], {
cwd: repoPath,
env
});
let output = '';
proc.stdout.on('data', (data) => { output += data.toString(); });
proc.stderr.on('data', (data) => {
const error = data.toString();
if (error.trim()) {
logger.warn({ npub, repoName, branchName, error }, '[FileManager.createBranch] commit-tree stderr');
}
});
proc.on('close', (code) => {
if (code === 0) {
resolve(output.trim());
} else {
reject(new Error(`commit-tree failed with code ${code}: ${output || 'no output'}`));
}
});
proc.on('error', reject);
});
logger.info({ npub, repoName, branchName, commit }, '[FileManager.createBranch] Step 2 complete: empty commit created'); logger.info({ npub, repoName, branchName, commit }, '[FileManager.createBranch] Step 2 complete: empty commit created');
logger.info({ npub, repoName, branchName, commit }, '[FileManager.createBranch] Step 3: Creating branch ref pointing to empty commit'); logger.info({ npub, repoName, branchName, commit }, '[FileManager.createBranch] Step 3: Creating branch ref pointing to empty commit');

76
src/lib/services/git/file-manager/commit-operations.ts

@ -85,17 +85,83 @@ export async function getCommitHistory(options: CommitHistoryOptions): Promise<C
// First try with the specified branch // First try with the specified branch
logOptions.from = branch; logOptions.from = branch;
log = await git.log(logOptions); log = await git.log(logOptions);
// If log.all is empty but we know there are commits, try --all as fallback
if (!log.all || log.all.length === 0) {
logger.debug({ npub, repoName, branch }, 'git.log() returned empty results, trying --all fallback');
delete logOptions.from;
log = await git.log(logOptions);
}
} catch (branchErr) { } catch (branchErr) {
// If branch doesn't exist or is ambiguous, try --all // If branch doesn't exist or is ambiguous, try --all
const errorMsg = branchErr instanceof Error ? branchErr.message : String(branchErr); const errorMsg = branchErr instanceof Error ? branchErr.message : String(branchErr);
if (errorMsg.includes('ambiguous') || errorMsg.includes('unknown') || errorMsg.includes('does not exist')) { logger.debug({ npub, repoName, branch, error: errorMsg }, 'git.log() failed, trying --all fallback');
logger.debug({ npub, repoName, branch, error: errorMsg }, 'Branch does not exist or is ambiguous, trying --all'); try {
delete logOptions.from; delete logOptions.from;
log = await git.log(logOptions); log = await git.log(logOptions);
} else { } catch (allErr) {
// Re-throw if it's a different error // If --all also fails, try using raw git command as last resort
throw branchErr; logger.debug({ npub, repoName, branch, error: allErr }, 'git.log() with --all also failed, trying raw git command');
try {
const rawLog = await git.raw(['log', '--all', `--max-count=${limit}`, '--format=%H|%s|%an|%ae|%ai', ...(path ? ['--', path] : [])]);
if (rawLog && rawLog.trim()) {
// Parse raw log output
const lines = rawLog.trim().split('\n').filter(l => l.trim());
const commits = lines.map(line => {
const [hash, ...rest] = line.split('|');
const message = rest.slice(0, -3).join('|'); // Message might contain |
const authorName = rest[rest.length - 3];
const authorEmail = rest[rest.length - 2];
const date = rest[rest.length - 1];
return {
hash: hash || '',
message: message || '',
author: `${authorName || 'Unknown'} <${authorEmail || ''}>`,
date: date || new Date().toISOString(),
files: [] // Can't get files from raw log easily
};
}).filter(c => c.hash);
logger.operation('Commit history retrieved via raw git', { npub, repoName, count: commits.length });
return commits;
}
} catch (rawErr) {
logger.error({ error: rawErr, npub, repoName, branch }, 'All methods failed to get commit history');
throw branchErr; // Throw original error
}
}
}
// Ensure log.all exists and has data
if (!log || !log.all || log.all.length === 0) {
logger.warn({ npub, repoName, branch, logResult: log }, 'git.log() returned empty results despite commits existing');
// Try one more time with raw command
try {
const rawLog = await git.raw(['log', '--all', `--max-count=${limit}`, '--format=%H|%s|%an|%ae|%ai', ...(path ? ['--', path] : [])]);
if (rawLog && rawLog.trim()) {
const lines = rawLog.trim().split('\n').filter(l => l.trim());
const commits = lines.map(line => {
const [hash, ...rest] = line.split('|');
const message = rest.slice(0, -3).join('|');
const authorName = rest[rest.length - 3];
const authorEmail = rest[rest.length - 2];
const date = rest[rest.length - 1];
return {
hash: hash || '',
message: message || '',
author: `${authorName || 'Unknown'} <${authorEmail || ''}>`,
date: date || new Date().toISOString(),
files: []
};
}).filter(c => c.hash);
logger.operation('Commit history retrieved via raw git (fallback)', { npub, repoName, count: commits.length });
return commits;
}
} catch (rawErr) {
logger.error({ error: rawErr, npub, repoName, branch }, 'Raw git command also failed');
} }
return [];
} }
const commits = log.all.map(commit => ({ const commits = log.all.map(commit => ({

272
src/lib/services/git/repo-manager.ts

@ -10,7 +10,7 @@
*/ */
import { existsSync, mkdirSync, accessSync, constants } from 'fs'; import { existsSync, mkdirSync, accessSync, constants } from 'fs';
import { join } from 'path'; import { join, resolve } from 'path';
import { spawn } from 'child_process'; import { spawn } from 'child_process';
import type { NostrEvent } from '../../types/nostr.js'; import type { NostrEvent } from '../../types/nostr.js';
import { GIT_DOMAIN } from '../../config.js'; import { GIT_DOMAIN } from '../../config.js';
@ -213,6 +213,11 @@ export class RepoManager {
announcementEvent: NostrEvent, announcementEvent: NostrEvent,
preferredDefaultBranch?: string preferredDefaultBranch?: string
): Promise<void> { ): Promise<void> {
// Declare variables outside try block so they're accessible in finally
const { FileManager } = await import('./file-manager.js');
const fileManager = new FileManager(this.repoRoot);
let workDir: string | undefined;
try { try {
// Get default branch from preferred branch, git config, environment, or use 'master' // Get default branch from preferred branch, git config, environment, or use 'master'
// Check preferred branch first (from user settings), then git's init.defaultBranch config // Check preferred branch first (from user settings), then git's init.defaultBranch config
@ -291,22 +296,56 @@ You can use this read-me file to explain the purpose of this repo to everyone wh
Your commits will all be signed by your Nostr keys and saved to the event files in the ./nostr folder. Your commits will all be signed by your Nostr keys and saved to the event files in the ./nostr folder.
`; `;
// Use FileManager to create the initial branch and files // Create both README.md and announcement in a single initial commit
const { FileManager } = await import('./file-manager.js'); // We'll use a worktree to write both files and commit them together
const fileManager = new FileManager(this.repoRoot); // If no branches exist, we'll create an orphan branch directly in the worktree
logger.info({ npub, repoName, defaultBranch }, 'Creating worktree for initial commit');
const { writeFile: writeFileFs, mkdir: mkdirFs } = await import('fs/promises');
const { join } = await import('path');
const { spawn } = await import('child_process');
// If no branches exist, create an orphan branch
// We already checked for existing branches above, so if existingBranches is empty, create one
if (existingBranches.length === 0) { if (existingBranches.length === 0) {
// Create orphan branch first (pass undefined for fromBranch to create orphan) // No branches exist - create worktree with orphan branch
await fileManager.createBranch(npub, repoName, defaultBranch, undefined); // We need to create the worktree manually with --orphan flag
} logger.info({ npub, repoName, defaultBranch }, 'No branches exist, creating orphan branch in worktree');
// Create a temporary worktree directory
// Use absolute path to ensure git worktree can find it
const worktreeBase = resolve(this.repoRoot, 'worktrees', npub, repoName);
await mkdirFs(worktreeBase, { recursive: true });
workDir = resolve(worktreeBase, `worktree-${Date.now()}`);
// Create worktree with orphan branch
// Note: --orphan requires -b flag to specify the branch name
// git worktree add requires an absolute path
await new Promise<void>((resolvePromise, reject) => {
const proc = spawn('git', ['worktree', 'add', '--orphan', '-b', defaultBranch, workDir!], {
cwd: repoPath,
stdio: ['ignore', 'pipe', 'pipe']
});
// Create both README.md and announcement in the initial commit let stdout = '';
// We'll use a worktree to write both files and commit them together let stderr = '';
const workDir = await fileManager.getWorktree(repoPath, defaultBranch, npub, repoName); proc.stdout?.on('data', (data: Buffer) => { stdout += data.toString(); });
const { writeFile: writeFileFs } = await import('fs/promises'); proc.stderr?.on('data', (data: Buffer) => { stderr += data.toString(); });
const { join } = await import('path');
proc.on('close', (code: number | null) => {
if (code === 0) {
resolvePromise();
} else {
reject(new Error(`git worktree add --orphan failed with code ${code}: ${stderr || stdout}`));
}
});
proc.on('error', reject);
});
logger.info({ npub, repoName, defaultBranch, workDir }, 'Orphan branch worktree created successfully');
} else {
// Branch exists - use normal worktree
workDir = await fileManager.getWorktree(repoPath, defaultBranch, npub, repoName);
logger.info({ npub, repoName, defaultBranch, workDir }, 'Worktree created successfully');
}
// Write README.md // Write README.md
const readmePath = join(workDir, 'README.md'); const readmePath = join(workDir, 'README.md');
@ -335,19 +374,35 @@ Your commits will all be signed by your Nostr keys and saved to the event files
logger.warn({ repoPath, npub, repoName, error: configError }, 'Failed to set git config, commit may fail'); logger.warn({ repoPath, npub, repoName, error: configError }, 'Failed to set git config, commit may fail');
} }
// Commit files together // Commit files together with "Initial commit to GitRepublic" message
await workGit.commit('Initial commit', filesToAdd, { // This will be the first and only commit on the branch
await workGit.commit('Initial commit to GitRepublic', filesToAdd, {
'--author': `${authorName} <${authorEmail}>` '--author': `${authorName} <${authorEmail}>`
}); });
// Clean up worktree
await fileManager.removeWorktree(repoPath, workDir);
logger.info({ npub, repoName, branch: defaultBranch }, 'Created initial branch and README.md'); logger.info({ npub, repoName, branch: defaultBranch }, 'Created initial branch and README.md');
} catch (err) { } catch (err) {
// Log but don't fail - initial README creation is nice-to-have // This is a critical error - we need the initial branch and commit for the repo to be usable
const sanitizedErr = sanitizeError(err); const sanitizedErr = sanitizeError(err);
logger.warn({ error: sanitizedErr, repoPath, npub, repoName }, 'Failed to create initial branch and README, continuing anyway'); logger.error({
error: sanitizedErr,
repoPath,
npub,
repoName,
errorMessage: err instanceof Error ? err.message : String(err),
errorStack: err instanceof Error ? err.stack : undefined
}, 'CRITICAL: Failed to create initial branch and README - repository will be empty');
// Re-throw so caller can handle it appropriately
throw err;
} finally {
// Clean up worktree (always, even on error)
if (workDir) {
try {
await fileManager.removeWorktree(repoPath, workDir);
} catch (cleanupErr) {
logger.warn({ error: cleanupErr, workDir, repoPath }, 'Failed to clean up worktree (non-critical)');
}
}
} }
} }
@ -513,34 +568,70 @@ Your commits will all be signed by your Nostr keys and saved to the event files
): Promise<{ success: boolean; needsAnnouncement?: boolean; announcement?: NostrEvent; error?: string; cloneUrls?: string[]; remoteUrls?: string[] }> { ): Promise<{ success: boolean; needsAnnouncement?: boolean; announcement?: NostrEvent; error?: string; cloneUrls?: string[]; remoteUrls?: string[] }> {
const repoPath = join(this.repoRoot, npub, `${repoName}.git`); const repoPath = join(this.repoRoot, npub, `${repoName}.git`);
// If repo already exists, check if it has an announcement // If repo already exists, check if it has commits (not just the directory)
if (existsSync(repoPath)) { if (existsSync(repoPath)) {
const hasAnnouncement = await this.announcementManager.hasAnnouncementInRepoFile(repoPath); try {
if (hasAnnouncement) { const git = simpleGit(repoPath);
return { success: true }; // Check if repo has any commits
} const commitCountStr = await git.raw(['rev-list', '--count', '--all']).catch(() => '0');
const commitCount = parseInt(commitCountStr.trim(), 10);
const hasCommits = !isNaN(commitCount) && commitCount > 0;
if (hasCommits) {
// Repo has commits, check if it has an announcement
const hasAnnouncement = await this.announcementManager.hasAnnouncementInRepoFile(repoPath);
if (hasAnnouncement) {
return { success: true };
}
// Repo exists but no announcement - use provided announcement or try to fetch from relays // Repo has commits but no announcement - use provided announcement or try to fetch from relays
let announcementToUse: NostrEvent | null | undefined = announcementEvent; let announcementToUse: NostrEvent | null | undefined = announcementEvent;
if (!announcementToUse) { if (!announcementToUse) {
const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js'); const { requireNpubHex: requireNpubHexUtil } = await import('../../utils/npub-utils.js');
const repoOwnerPubkey = requireNpubHexUtil(npub); const repoOwnerPubkey = requireNpubHexUtil(npub);
announcementToUse = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName); announcementToUse = await this.announcementManager.fetchAnnouncementFromRelays(repoOwnerPubkey, repoName);
} }
if (announcementToUse) { if (announcementToUse) {
// Save announcement to repo asynchronously (non-blocking) // Save announcement to repo asynchronously (non-blocking)
// We have the announcement from relays, so this is just for offline papertrail this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementToUse)
this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementToUse) .catch((err) => {
.catch((err) => { logger.warn({ error: err, repoPath, eventId: announcementToUse?.id },
logger.warn({ error: err, repoPath, eventId: announcementToUse?.id }, 'Failed to save announcement to repo (non-blocking, announcement available from relays)');
'Failed to save announcement to repo (non-blocking, announcement available from relays)'); });
}); return { success: true, announcement: announcementToUse };
return { success: true, announcement: announcementToUse }; }
// Repo has commits but no announcement found - needs announcement
return { success: false, needsAnnouncement: true };
} else {
// Repo exists but is empty - remove it so we can clone fresh
logger.info({ npub, repoName }, 'Repository exists but is empty, removing to clone fresh');
try {
const { rmSync } = await import('fs');
rmSync(repoPath, { recursive: true, force: true });
logger.info({ npub, repoName }, 'Removed empty repository directory');
} catch (rmErr) {
logger.warn({ error: rmErr, npub, repoName }, 'Failed to remove empty repository, will try to fetch into it');
// Continue - might be able to fetch into existing empty repo
}
// Fall through to fetch from remotes below
}
} catch (err) {
// Error checking commits - assume empty and try to fetch
logger.warn({ error: err, npub, repoName }, 'Error checking if repo has commits, will try to fetch from remotes');
// Try to remove and clone fresh
try {
const { rmSync } = await import('fs');
rmSync(repoPath, { recursive: true, force: true });
logger.info({ npub, repoName }, 'Removed repository directory after error checking commits');
} catch (rmErr) {
logger.warn({ error: rmErr, npub, repoName }, 'Failed to remove repository after error');
}
// Fall through to fetch from remotes below
} }
// Repo exists but no announcement found - needs announcement // Repo exists but is empty - continue to fetch from remotes below
return { success: false, needsAnnouncement: true };
} }
// If no announcement provided, try to fetch from relays // If no announcement provided, try to fetch from relays
@ -778,13 +869,92 @@ Your commits will all be signed by your Nostr keys and saved to the event files
throw new Error('Repository clone completed but repository path does not exist'); throw new Error('Repository clone completed but repository path does not exist');
} }
// Ensure announcement is saved to nostr/repo-events.jsonl (non-blocking - repo is usable without it) // After cloning, ensure default branch, README, and announcement are committed
// Fire and forget - we have the announcement from relays, so this is just for offline papertrail try {
this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementEvent) const repoGit = simpleGit(repoPath);
.catch((verifyError) => {
// Announcement file creation is optional - log but don't fail // Check if repo has any commits
logger.warn({ error: verifyError, npub, repoName }, 'Failed to ensure announcement in repo, but repository is usable'); let hasCommits = false;
}); try {
const commitCountStr = await repoGit.raw(['rev-list', '--count', '--all']).catch(() => '0');
const commitCount = parseInt(commitCountStr.trim(), 10);
hasCommits = !isNaN(commitCount) && commitCount > 0;
} catch {
hasCommits = false;
}
// Get default branch preference
let defaultBranch = preferredDefaultBranch || process.env.DEFAULT_BRANCH || 'master';
// Check existing branches
let existingBranches: string[] = [];
try {
const branches = await repoGit.branch(['-a']);
existingBranches = branches.all
.map(b => b.replace(/^remotes\/origin\//, '').replace(/^remotes\//, '').replace(/^refs\/heads\//, ''))
.filter(b => !b.includes('HEAD'));
existingBranches = [...new Set(existingBranches)];
// If we have a preferred branch and it exists, use it
if (preferredDefaultBranch && existingBranches.includes(preferredDefaultBranch)) {
defaultBranch = preferredDefaultBranch;
} else if (existingBranches.length > 0) {
// Prefer existing branches that match common defaults
const preferredBranches = preferredDefaultBranch
? [preferredDefaultBranch, defaultBranch, 'main', 'master', 'dev']
: [defaultBranch, 'main', 'master', 'dev'];
for (const preferred of preferredBranches) {
if (existingBranches.includes(preferred)) {
defaultBranch = preferred;
break;
}
}
// If no match, use the first existing branch
if (!existingBranches.includes(defaultBranch)) {
defaultBranch = existingBranches[0];
}
}
} catch {
// No branches exist yet
}
// If repo has no commits, create initial branch and commit README + announcement
if (!hasCommits) {
logger.info({ npub, repoName, defaultBranch }, 'Repository has no commits, creating initial branch and commit');
try {
await this.createInitialBranchAndReadme(repoPath, npub, repoName, announcementEvent, preferredDefaultBranch);
logger.info({ npub, repoName, defaultBranch }, 'Successfully created initial branch and commit');
} catch (createError) {
logger.error({
error: createError,
npub,
repoName,
defaultBranch,
errorMessage: createError instanceof Error ? createError.message : String(createError),
errorStack: createError instanceof Error ? createError.stack : undefined
}, 'Failed to create initial branch and commit - this is critical');
// Re-throw so the outer catch can handle it
throw createError;
}
} else {
// Repo has commits - ensure default branch exists and README/announcement are committed
logger.info({ npub, repoName, defaultBranch, hasCommits }, 'Repository has commits, ensuring default branch and files');
// Ensure announcement is committed (blocking - we want it in the repo)
// This will use worktrees to checkout the default branch and commit
await this.announcementManager.ensureAnnouncementInRepo(repoPath, announcementEvent, undefined, preferredDefaultBranch);
// Ensure README exists and is committed (also uses worktrees)
await this.ensureReadmeExists(repoPath, npub, repoName, announcementEvent, preferredDefaultBranch);
}
} catch (postCloneError) {
// Log but don't fail - repo is cloned and usable
logger.warn({
error: postCloneError,
npub,
repoName
}, 'Failed to set up default branch/README/announcement after clone, but repository is usable');
}
logger.info({ npub, repoName }, 'Successfully fetched repository on-demand'); logger.info({ npub, repoName }, 'Successfully fetched repository on-demand');
return { success: true, announcement: announcementEvent }; return { success: true, announcement: announcementEvent };

221
src/lib/services/nostr/nostr-client.ts

@ -188,7 +188,7 @@ export class NostrClient {
} }
} }
async fetchEvents(filters: NostrFilter[]): Promise<NostrEvent[]> { async fetchEvents(filters: NostrFilter[], isWriteVerification: boolean = false): Promise<NostrEvent[]> {
// Strategy: Check persistent cache first, return immediately if available // Strategy: Check persistent cache first, return immediately if available
// Then fetch from relays in background and merge results // Then fetch from relays in background and merge results
@ -206,7 +206,8 @@ export class NostrClient {
logger.debug({ filters, cachedCount: memoryCached.length }, 'Returning cached events from memory'); logger.debug({ filters, cachedCount: memoryCached.length }, 'Returning cached events from memory');
// Return cached events immediately, but also fetch from relays in background to update cache // Return cached events immediately, but also fetch from relays in background to update cache
this.fetchAndMergeFromRelays(filters, memoryCached).catch(err => { // Background fetches are always normal (not write verification)
this.fetchAndMergeFromRelays(filters, memoryCached, false).catch(err => {
logger.debug({ error: err, filters }, 'Background fetch failed, using cached events'); logger.debug({ error: err, filters }, 'Background fetch failed, using cached events');
}); });
@ -219,7 +220,8 @@ export class NostrClient {
logger.debug({ filters, cachedCount: cachedEvents.length }, 'Returning cached events from IndexedDB'); logger.debug({ filters, cachedCount: cachedEvents.length }, 'Returning cached events from IndexedDB');
// Return cached events immediately, but also fetch from relays in background to update cache // Return cached events immediately, but also fetch from relays in background to update cache
this.fetchAndMergeFromRelays(filters, cachedEvents).catch(err => { // Background fetches are always normal (not write verification)
this.fetchAndMergeFromRelays(filters, cachedEvents, false).catch(err => {
logger.debug({ error: err, filters }, 'Background fetch failed, using cached events'); logger.debug({ error: err, filters }, 'Background fetch failed, using cached events');
}); });
@ -234,7 +236,7 @@ export class NostrClient {
} }
// 3. No cache available (or search query), fetch from relays // 3. No cache available (or search query), fetch from relays
return this.fetchAndMergeFromRelays(filters, []); return this.fetchAndMergeFromRelays(filters, [], isWriteVerification);
} }
/** /**
@ -312,8 +314,11 @@ export class NostrClient {
* Fetch events from relays and merge with existing events * Fetch events from relays and merge with existing events
* Never deletes valid events, only appends/integrates new ones * Never deletes valid events, only appends/integrates new ones
* Automatically falls back to fallback relays if primary relays fail * Automatically falls back to fallback relays if primary relays fail
* @param filters - Filters to query
* @param existingEvents - Existing events to merge with
* @param isWriteVerification - If true, uses full timeout (8s). If false, uses dynamic timeout (2s after first response)
*/ */
private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[]): Promise<NostrEvent[]> { private async fetchAndMergeFromRelays(filters: NostrFilter[], existingEvents: NostrEvent[], isWriteVerification: boolean = false): Promise<NostrEvent[]> {
const events: NostrEvent[] = []; const events: NostrEvent[] = [];
// Sanitize all filters before sending to relays // Sanitize all filters before sending to relays
@ -322,53 +327,177 @@ export class NostrClient {
// Use nostr-tools SimplePool to fetch from all relays in parallel // Use nostr-tools SimplePool to fetch from all relays in parallel
// SimplePool handles connection management, retries, and error handling automatically // SimplePool handles connection management, retries, and error handling automatically
try { try {
// querySync takes a single filter, so we query each filter and combine results // For write verification, use full timeout. For normal fetches, use dynamic timeout
// Wrap each query individually to catch errors from individual relays if (isWriteVerification) {
const queryPromises = sanitizedFilters.map(filter => // Write verification: use full 8 second timeout
this.pool.querySync(this.relays, filter, { maxWait: 8000 }) const queryPromises = sanitizedFilters.map(filter =>
.catch(err => { this.pool.querySync(this.relays, filter, { maxWait: 8000 })
// Log individual relay errors but don't fail the entire request .catch(err => {
logger.debug({ error: err, filter, relays: this.relays }, 'Primary relay query failed, trying fallback'); logger.debug({ error: err, filter, relays: this.relays }, 'Primary relay query failed, trying fallback');
return []; // Return empty array for failed queries return []; // Return empty array for failed queries
}) })
); );
const results = await Promise.allSettled(queryPromises); const results = await Promise.allSettled(queryPromises);
let hasResults = false; let hasResults = false;
for (const result of results) { for (const result of results) {
if (result.status === 'fulfilled' && result.value.length > 0) { if (result.status === 'fulfilled' && result.value.length > 0) {
events.push(...result.value); events.push(...result.value);
hasResults = true; hasResults = true;
} else if (result.status === 'rejected') { } else if (result.status === 'rejected') {
// Log rejected promises (shouldn't happen since we catch above, but just in case) logger.debug({ error: result.reason }, 'Query promise rejected');
logger.debug({ error: result.reason }, 'Query promise rejected'); }
} }
}
// If no results from primary relays and we have fallback relays, try them // If no results from primary relays and we have fallback relays, try them
if (!hasResults && events.length === 0 && FALLBACK_NOSTR_RELAYS.length > 0) { if (!hasResults && events.length === 0 && FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: this.relays, fallbackRelays: FALLBACK_NOSTR_RELAYS }, 'No results from primary relays, trying fallback relays'); logger.debug({ primaryRelays: this.relays, fallbackRelays: FALLBACK_NOSTR_RELAYS }, 'No results from primary relays, trying fallback relays');
try { try {
const fallbackPromises = sanitizedFilters.map(filter => const fallbackPromises = sanitizedFilters.map(filter =>
this.pool.querySync(FALLBACK_NOSTR_RELAYS, filter, { maxWait: 8000 }) this.pool.querySync(FALLBACK_NOSTR_RELAYS, filter, { maxWait: 8000 })
.catch(err => { .catch(err => {
logger.debug({ error: err, filter }, 'Fallback relay query failed'); logger.debug({ error: err, filter }, 'Fallback relay query failed');
return []; return [];
}) })
); );
const fallbackResults = await Promise.allSettled(fallbackPromises); const fallbackResults = await Promise.allSettled(fallbackPromises);
for (const result of fallbackResults) { for (const result of fallbackResults) {
if (result.status === 'fulfilled') { if (result.status === 'fulfilled') {
events.push(...result.value); events.push(...result.value);
}
}
if (events.length > 0) {
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, eventCount: events.length }, 'Successfully fetched events from fallback relays');
} }
} catch (fallbackErr) {
logger.debug({ error: fallbackErr }, 'Fallback relay query failed completely');
} }
}
} else {
// Normal fetches: dynamic timeout - 2 seconds after first relay responds
let firstResponseTime: number | null = null;
const DYNAMIC_TIMEOUT_MS = 2000; // 2 seconds after first response
// Create queries for all filters
const baseQueryPromises = sanitizedFilters.map(filter =>
this.pool.querySync(this.relays, filter, { maxWait: 8000 })
.catch(err => {
logger.debug({ error: err, filter, relays: this.relays }, 'Primary relay query failed');
return []; // Return empty array for failed queries
})
);
if (events.length > 0) { // Wrap each query to track first response and apply dynamic timeout
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, eventCount: events.length }, 'Successfully fetched events from fallback relays'); const queryPromises = baseQueryPromises.map((queryPromise, index) => {
return Promise.race([
queryPromise.then((results) => {
// Track when first response arrives (across all queries)
const now = Date.now();
if (firstResponseTime === null) {
firstResponseTime = now;
logger.debug({ filterIndex: index, firstResponseTime: now }, 'First relay responded, starting 2s timeout for other relays');
}
return results;
}),
// Dynamic timeout: if first response has arrived, timeout after 2 seconds from that point
new Promise<NostrEvent[]>((resolve) => {
const checkTimeout = () => {
if (firstResponseTime !== null) {
const elapsed = Date.now() - firstResponseTime;
if (elapsed >= DYNAMIC_TIMEOUT_MS) {
// Timeout reached - return empty array (query from faster relay already got results)
resolve([]);
} else {
// Check again after remaining time
setTimeout(checkTimeout, DYNAMIC_TIMEOUT_MS - elapsed);
}
} else {
// First response hasn't arrived yet, check again in 100ms
setTimeout(checkTimeout, 100);
}
};
checkTimeout();
// Maximum timeout of 8 seconds to prevent hanging forever
setTimeout(() => resolve([]), 8000);
})
]);
});
const results = await Promise.allSettled(queryPromises);
let hasResults = false;
for (const result of results) {
if (result.status === 'fulfilled' && result.value.length > 0) {
events.push(...result.value);
hasResults = true;
} else if (result.status === 'rejected') {
logger.debug({ error: result.reason }, 'Query promise rejected');
}
}
// If no results from primary relays and we have fallback relays, try them (with dynamic timeout too)
if (!hasResults && events.length === 0 && FALLBACK_NOSTR_RELAYS.length > 0) {
logger.debug({ primaryRelays: this.relays, fallbackRelays: FALLBACK_NOSTR_RELAYS }, 'No results from primary relays, trying fallback relays');
try {
// Reset first response time for fallback relays
firstResponseTime = null;
// Create queries for all filters on fallback relays
const fallbackBaseQueryPromises = sanitizedFilters.map(filter =>
this.pool.querySync(FALLBACK_NOSTR_RELAYS, filter, { maxWait: 8000 })
.catch(err => {
logger.debug({ error: err, filter }, 'Fallback relay query failed');
return [];
})
);
// Wrap each query to track first response and apply dynamic timeout
const fallbackQueryPromises = fallbackBaseQueryPromises.map((queryPromise, index) => {
return Promise.race([
queryPromise.then((results) => {
const now = Date.now();
if (firstResponseTime === null) {
firstResponseTime = now;
logger.debug({ filterIndex: index, firstResponseTime: now }, 'First fallback relay responded, starting 2s timeout');
}
return results;
}),
new Promise<NostrEvent[]>((resolve) => {
const checkTimeout = () => {
if (firstResponseTime !== null) {
const elapsed = Date.now() - firstResponseTime;
if (elapsed >= DYNAMIC_TIMEOUT_MS) {
resolve([]);
} else {
setTimeout(checkTimeout, DYNAMIC_TIMEOUT_MS - elapsed);
}
} else {
setTimeout(checkTimeout, 100);
}
};
checkTimeout();
setTimeout(() => resolve([]), 8000);
})
]);
});
const fallbackResults = await Promise.allSettled(fallbackQueryPromises);
for (const result of fallbackResults) {
if (result.status === 'fulfilled') {
events.push(...result.value);
}
}
if (events.length > 0) {
logger.info({ fallbackRelays: FALLBACK_NOSTR_RELAYS, eventCount: events.length }, 'Successfully fetched events from fallback relays');
}
} catch (fallbackErr) {
logger.debug({ error: fallbackErr }, 'Fallback relay query failed completely');
} }
} catch (fallbackErr) {
logger.debug({ error: fallbackErr }, 'Fallback relay query failed completely');
} }
} }
} catch (err) { } catch (err) {

2
src/lib/services/nostr/relay-write-proof.ts

@ -121,7 +121,7 @@ export async function verifyRelayWriteProof(
authors: [userPubkey], authors: [userPubkey],
limit: 1 limit: 1
} }
]); ], true); // Pass true for isWriteVerification to use full timeout
if (events.length > 0) { if (events.length > 0) {
break; // Found the event, no need to retry break; // Found the event, no need to retry

106
src/lib/utils/admin-check.ts

@ -0,0 +1,106 @@
/**
* Utility for checking admin access
* Admin is determined by ADMIN_NPUB environment variable
*/
import { nip19 } from 'nostr-tools';
/**
* Get admin npub from environment variable
* Defaults to the npub set in docker-compose.yml if not explicitly set
*/
function getAdminNpub(): string | null {
if (typeof process === 'undefined') return null;
const adminNpub = process.env?.ADMIN_NPUB;
// If not set, use the default from docker-compose.yml
if (!adminNpub || adminNpub.trim().length === 0) {
const defaultAdminNpub = 'npub12umrfdjgvdxt45g0y3ghwcyfagssjrv5qlm3t6pu2aa5vydwdmwq8q0z04';
console.log('[admin-check] ADMIN_NPUB not set, using default:', defaultAdminNpub);
return defaultAdminNpub;
}
return adminNpub.trim();
}
/**
* Get admin pubkey hex from environment variable
*/
function getAdminPubkeyHex(): string | null {
const adminNpub = getAdminNpub();
if (!adminNpub) {
if (typeof process !== 'undefined') {
console.log('[admin-check] No ADMIN_NPUB environment variable set');
}
return null;
}
try {
const decoded = nip19.decode(adminNpub);
if (decoded.type === 'npub') {
const hex = decoded.data as string;
if (typeof process !== 'undefined') {
console.log('[admin-check] Admin npub decoded to hex:', hex.substring(0, 16) + '...');
}
return hex;
}
} catch (err) {
// Invalid npub format
if (typeof process !== 'undefined') {
console.warn('[admin-check] Failed to decode admin npub:', err);
}
}
return null;
}
/**
* Check if a user is an admin
* @param userPubkey - The user's pubkey in hex format or npub format
* @returns true if the user is an admin
*/
export function isAdmin(userPubkey: string | null): boolean {
if (!userPubkey) return false;
const adminPubkeyHex = getAdminPubkeyHex();
if (!adminPubkeyHex) return false;
// Convert user pubkey to hex if it's an npub
let userPubkeyHex: string | null = null;
// Check if it's already hex format
if (/^[0-9a-f]{64}$/i.test(userPubkey)) {
userPubkeyHex = userPubkey.toLowerCase();
} else {
// Try to decode as npub
try {
const decoded = nip19.decode(userPubkey);
if (decoded.type === 'npub') {
userPubkeyHex = (decoded.data as string).toLowerCase();
}
} catch {
// Invalid format
return false;
}
}
if (!userPubkeyHex) return false;
const isAdminUser = userPubkeyHex === adminPubkeyHex.toLowerCase();
if (typeof process !== 'undefined') {
console.log('[admin-check] Checking admin status:');
console.log('[admin-check] User pubkey hex:', userPubkeyHex);
console.log('[admin-check] Admin pubkey hex:', adminPubkeyHex.toLowerCase());
console.log('[admin-check] Match:', isAdminUser);
}
return isAdminUser;
}
/**
* Check if admin is configured
*/
export function isAdminConfigured(): boolean {
return getAdminNpub() !== null;
}

15
src/lib/utils/nostr-links.ts

@ -381,11 +381,18 @@ export function getReferencedEventFromDiscussion(
event: NostrEvent, event: NostrEvent,
eventCache: Map<string, NostrEvent> eventCache: Map<string, NostrEvent>
): NostrEvent | undefined { ): NostrEvent | undefined {
return getReferencedEventWithTagType(event, eventCache)?.event;
}
export function getReferencedEventWithTagType(
event: NostrEvent,
eventCache: Map<string, NostrEvent>
): { event: NostrEvent; tagType: 'e' | 'a' | 'q' } | undefined {
// Check e-tag // Check e-tag
const eTag = event.tags.find(t => t[0] === 'e' && t[1])?.[1]; const eTag = event.tags.find(t => t[0] === 'e' && t[1])?.[1];
if (eTag) { if (eTag) {
const referenced = eventCache.get(eTag); const referenced = eventCache.get(eTag);
if (referenced) return referenced; if (referenced) return { event: referenced, tagType: 'e' };
} }
// Check a-tag // Check a-tag
@ -396,18 +403,20 @@ export function getReferencedEventFromDiscussion(
const kind = parseInt(parts[0]); const kind = parseInt(parts[0]);
const pubkey = parts[1]; const pubkey = parts[1];
const dTag = parts[2]; const dTag = parts[2];
return Array.from(eventCache.values()).find(e => const referenced = Array.from(eventCache.values()).find(e =>
e.kind === kind && e.kind === kind &&
e.pubkey === pubkey && e.pubkey === pubkey &&
e.tags.find(t => t[0] === 'd' && t[1] === dTag) e.tags.find(t => t[0] === 'd' && t[1] === dTag)
); );
if (referenced) return { event: referenced, tagType: 'a' };
} }
} }
// Check q-tag // Check q-tag
const qTag = event.tags.find(t => t[0] === 'q' && t[1])?.[1]; const qTag = event.tags.find(t => t[0] === 'q' && t[1])?.[1];
if (qTag) { if (qTag) {
return eventCache.get(qTag); const referenced = eventCache.get(qTag);
if (referenced) return { event: referenced, tagType: 'q' };
} }
return undefined; return undefined;

423
src/routes/admin/repos/+page.svelte

@ -0,0 +1,423 @@
<script lang="ts">
import { onMount } from 'svelte';
import { goto } from '$app/navigation';
import { userStore } from '$lib/stores/user-store.js';
interface AdminRepo {
npub: string;
repoName: string;
fullPath: string;
size: number;
lastModified: number;
createdAt: number;
}
let repos = $state<AdminRepo[]>([]);
let loading = $state(true);
let error = $state<string | null>(null);
let deleting = $state<Set<string>>(new Set());
let totalSize = $state(0);
// Check if user has admin access via API
let accessChecked = $state(false);
let hasAccess = $state(false);
async function checkAdminAccess() {
if (typeof window === 'undefined') return;
const user = $userStore;
if (!user || !user.userPubkeyHex) {
hasAccess = false;
accessChecked = true;
// Redirect to repos page (not splash) if not logged in
setTimeout(() => {
goto('/repos');
}, 100);
return;
}
try {
const response = await fetch('/api/admin/check', {
headers: {
'X-User-Pubkey': user.userPubkeyHex
}
});
if (response.ok) {
const data = await response.json();
hasAccess = data.isAdmin === true;
console.log('[Admin] Admin check result:', data.isAdmin, 'for user:', user.userPubkeyHex.substring(0, 16) + '...');
} else {
hasAccess = false;
console.warn('[Admin] Admin check failed:', response.status);
}
} catch (err) {
console.warn('[Admin] Failed to check admin status:', err);
hasAccess = false;
} finally {
accessChecked = true;
// Redirect to repos page (not splash) if user doesn't have admin access
// But only if they're logged in - if not logged in, already redirected above
if (!hasAccess && user && user.userPubkeyHex) {
setTimeout(() => {
goto('/repos');
}, 100);
}
}
}
// Check admin access and load repos on mount
onMount(() => {
checkAdminAccess().then(() => {
// Only load repos if user has access
if (hasAccess) {
loadRepos();
}
});
});
async function loadRepos() {
loading = true;
error = null;
try {
const user = $userStore;
if (!user?.userPubkeyHex) {
throw new Error('Not authenticated');
}
const response = await fetch('/api/admin/repos', {
headers: {
'X-User-Pubkey': user.userPubkeyHex
}
});
if (!response.ok) {
const data = await response.json().catch(() => ({ error: response.statusText }));
throw new Error(data.error || `Failed to load repositories: ${response.statusText}`);
}
const data = await response.json();
repos = data.repos || [];
totalSize = data.totalSize || 0;
} catch (e) {
error = e instanceof Error ? e.message : 'Failed to load repositories';
console.error('[Admin] Failed to load repos:', e);
} finally {
loading = false;
}
}
async function deleteRepo(npub: string, repoName: string) {
const repoKey = `${npub}/${repoName}`;
if (!confirm(`Are you sure you want to delete ${repoName}? This action cannot be undone.`)) {
return;
}
deleting.add(repoKey);
error = null;
try {
const user = $userStore;
if (!user?.userPubkeyHex) {
throw new Error('Not authenticated');
}
const response = await fetch(`/api/repos/${npub}/${repoName}/delete`, {
method: 'DELETE',
headers: {
'X-User-Pubkey': user.userPubkeyHex
}
});
if (!response.ok) {
const data = await response.json().catch(() => ({ error: response.statusText }));
throw new Error(data.error || `Failed to delete repository: ${response.statusText}`);
}
// Remove from list
repos = repos.filter(r => !(r.npub === npub && r.repoName === repoName));
totalSize = repos.reduce((sum, repo) => sum + repo.size, 0);
} catch (e) {
error = e instanceof Error ? e.message : 'Failed to delete repository';
console.error('[Admin] Failed to delete repo:', e);
alert(error);
} finally {
deleting.delete(repoKey);
}
}
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return Math.round(bytes / Math.pow(k, i) * 100) / 100 + ' ' + sizes[i];
}
function formatDate(timestamp: number): string {
return new Date(timestamp).toLocaleString();
}
</script>
<svelte:head>
<title>Admin - Repositories</title>
</svelte:head>
<div class="admin-page">
<div class="admin-header">
<h1>Repository Administration</h1>
<button onclick={loadRepos} disabled={loading} class="refresh-button">
{loading ? 'Loading...' : 'Refresh'}
</button>
</div>
{#if !accessChecked}
<div class="loading">Checking access...</div>
{:else if !hasAccess}
<div class="error-message">Access denied. Admin privileges required.</div>
{:else}
{#if error}
<div class="error-message">
{error}
</div>
{/if}
{#if loading}
<div class="loading">Loading repositories...</div>
{:else}
<div class="stats">
<div class="stat">
<span class="stat-label">Total Repositories:</span>
<span class="stat-value">{repos.length}</span>
</div>
<div class="stat">
<span class="stat-label">Total Size:</span>
<span class="stat-value">{formatBytes(totalSize)}</span>
</div>
</div>
<div class="repos-table-container">
<table class="repos-table">
<thead>
<tr>
<th>Owner (npub)</th>
<th>Repository Name</th>
<th>Size</th>
<th>Last Modified</th>
<th>Created</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{#each repos as repo (repo.npub + repo.repoName)}
<tr>
<td class="npub-cell">
<code>{repo.npub.substring(0, 20)}...</code>
</td>
<td class="repo-name-cell">
<a href="/repos/{repo.npub}/{repo.repoName}" target="_blank">
{repo.repoName}
</a>
</td>
<td>{formatBytes(repo.size)}</td>
<td>{formatDate(repo.lastModified)}</td>
<td>{formatDate(repo.createdAt)}</td>
<td class="actions-cell">
<button
onclick={() => deleteRepo(repo.npub, repo.repoName)}
disabled={deleting.has(`${repo.npub}/${repo.repoName}`)}
class="delete-button"
title="Delete repository"
>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M3 6h18M19 6v14a2 2 0 0 1-2 2H7a2 2 0 0 1-2-2V6m3 0V4a2 2 0 0 1 2-2h4a2 2 0 0 1 2 2v2"/>
<line x1="10" y1="11" x2="10" y2="17"/>
<line x1="14" y1="11" x2="14" y2="17"/>
</svg>
</button>
</td>
</tr>
{:else}
<tr>
<td colspan="6" class="empty-message">No repositories found</td>
</tr>
{/each}
</tbody>
</table>
</div>
{/if}
{/if}
</div>
<style>
.admin-page {
max-width: 1400px;
margin: 0 auto;
padding: 2rem;
}
.admin-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 2rem;
}
.admin-header h1 {
margin: 0;
font-size: 2rem;
}
.refresh-button {
padding: 0.5rem 1rem;
background: var(--button-primary);
color: var(--accent-text, #ffffff);
border: none;
border-radius: 4px;
cursor: pointer;
font-size: 0.9rem;
font-family: 'IBM Plex Serif', serif;
transition: background 0.2s ease;
}
.refresh-button:hover:not(:disabled) {
background: var(--button-primary-hover);
}
.refresh-button:disabled {
opacity: 0.6;
cursor: not-allowed;
}
.error-message {
background: var(--error-bg);
color: var(--error-text);
padding: 1rem;
border-radius: 4px;
margin-bottom: 1rem;
border: 1px solid var(--error-text);
}
.loading {
text-align: center;
padding: 2rem;
color: var(--text-muted);
}
.stats {
display: flex;
gap: 2rem;
margin-bottom: 2rem;
padding: 1rem;
background: var(--bg-secondary);
border: 1px solid var(--border-color);
border-radius: 4px;
}
.stat {
display: flex;
flex-direction: column;
gap: 0.25rem;
}
.stat-label {
font-size: 0.875rem;
color: var(--text-muted);
}
.stat-value {
font-size: 1.25rem;
font-weight: bold;
color: var(--text-primary);
}
.repos-table-container {
overflow-x: auto;
background: var(--card-bg);
border: 1px solid var(--border-color);
border-radius: 4px;
box-shadow: 0 2px 4px var(--shadow-color-light);
}
.repos-table {
width: 100%;
border-collapse: collapse;
}
.repos-table thead {
background: var(--bg-secondary);
}
.repos-table th {
padding: 1rem;
text-align: left;
font-weight: 600;
border-bottom: 2px solid var(--border-color);
color: var(--text-primary);
}
.repos-table td {
padding: 1rem;
border-bottom: 1px solid var(--border-light);
color: var(--text-primary);
}
.repos-table tbody tr:hover {
background: var(--bg-tertiary);
}
.npub-cell code {
font-size: 0.875rem;
color: var(--text-secondary);
background: var(--bg-secondary);
padding: 0.25rem 0.5rem;
border-radius: 3px;
border: 1px solid var(--border-color);
}
.repo-name-cell a {
color: var(--link-color);
text-decoration: none;
font-weight: 500;
}
.repo-name-cell a:hover {
color: var(--link-hover);
text-decoration: underline;
}
.actions-cell {
text-align: center;
}
.delete-button {
background: transparent;
border: none;
color: var(--error-text);
cursor: pointer;
padding: 0.5rem;
border-radius: 4px;
display: inline-flex;
align-items: center;
justify-content: center;
transition: background 0.2s;
}
.delete-button:hover:not(:disabled) {
background: var(--error-bg);
}
.delete-button:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.empty-message {
text-align: center;
padding: 2rem;
color: var(--text-muted);
}
</style>

47
src/routes/api/admin/check/+server.ts

@ -0,0 +1,47 @@
/**
* API endpoint to check if current user is admin
*/
import { json } from '@sveltejs/kit';
import type { RequestHandler } from '@sveltejs/kit';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { isAdmin } from '$lib/utils/admin-check.js';
import { nip19 } from 'nostr-tools';
import logger from '$lib/services/logger.js';
export const GET: RequestHandler = async (event) => {
const requestContext = extractRequestContext(event);
let userPubkeyHex = requestContext.userPubkeyHex;
// If we don't have hex, try to get from header and decode if it's an npub
if (!userPubkeyHex) {
const userPubkey = event.request.headers.get('X-User-Pubkey') ||
event.request.headers.get('x-user-pubkey');
if (userPubkey) {
// Check if it's already hex
if (/^[0-9a-f]{64}$/i.test(userPubkey)) {
userPubkeyHex = userPubkey.toLowerCase();
} else {
// Try to decode as npub
try {
const decoded = nip19.decode(userPubkey);
if (decoded.type === 'npub') {
userPubkeyHex = decoded.data as string;
}
} catch (err) {
logger.debug({ error: err, userPubkey }, 'Failed to decode user pubkey as npub');
}
}
}
}
if (!userPubkeyHex) {
return json({ isAdmin: false });
}
const adminStatus = isAdmin(userPubkeyHex);
logger.debug({ userPubkeyHex: userPubkeyHex.substring(0, 16) + '...', isAdmin: adminStatus }, 'Admin check result');
return json({ isAdmin: adminStatus });
};

146
src/routes/api/admin/repos/+server.ts

@ -0,0 +1,146 @@
/**
* Admin API endpoint for listing all repositories
* Only accessible to users with unlimited access
*/
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from '@sveltejs/kit';
import { readdir, stat } from 'fs/promises';
import { join, resolve } from 'path';
import { existsSync } from 'fs';
import { extractRequestContext } from '$lib/utils/api-context.js';
import { isAdmin } from '$lib/utils/admin-check.js';
import { handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import logger from '$lib/services/logger.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT
: '/repos';
interface AdminRepoItem {
npub: string;
repoName: string;
fullPath: string;
size: number;
lastModified: number;
createdAt: number;
}
/**
* Scan filesystem for all repositories (admin view)
*/
async function scanAllRepos(): Promise<AdminRepoItem[]> {
const repos: AdminRepoItem[] = [];
if (!existsSync(repoRoot)) {
return repos;
}
try {
// Read all user directories
const userDirs = await readdir(repoRoot);
for (const userDir of userDirs) {
const userPath = join(repoRoot, userDir);
// Skip if not a directory or doesn't look like an npub
if (!userDir.startsWith('npub') || userDir.length < 60) continue;
try {
const stats = await stat(userPath);
if (!stats.isDirectory()) continue;
// Read repos for this user
const repoFiles = await readdir(userPath);
for (const repoFile of repoFiles) {
if (!repoFile.endsWith('.git')) continue;
const repoName = repoFile.replace(/\.git$/, '');
const repoPath = join(userPath, repoFile);
try {
const repoStats = await stat(repoPath);
if (!repoStats.isDirectory()) continue;
// Calculate directory size (approximate - just count files)
let size = 0;
try {
const calculateSize = async (dir: string): Promise<number> => {
let total = 0;
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
if (entry.name !== '.' && entry.name !== '..') {
total += await calculateSize(fullPath);
}
} else {
try {
const fileStats = await stat(fullPath);
total += fileStats.size;
} catch {
// Ignore errors for individual files
}
}
}
return total;
};
size = await calculateSize(repoPath);
} catch {
// If size calculation fails, just use 0
size = 0;
}
repos.push({
npub: userDir,
repoName,
fullPath: repoPath,
size,
lastModified: repoStats.mtime.getTime(),
createdAt: repoStats.birthtime.getTime() || repoStats.ctime.getTime()
});
} catch (err) {
logger.warn({ error: err, repoPath }, 'Failed to stat repo');
}
}
} catch (err) {
logger.warn({ error: err, userPath }, 'Failed to read user directory');
}
}
} catch (err) {
logger.error({ error: err }, 'Failed to scan repos');
throw err;
}
// Sort by last modified (most recent first)
repos.sort((a, b) => b.lastModified - a.lastModified);
return repos;
}
export const GET: RequestHandler = async (event) => {
try {
const requestContext = extractRequestContext(event);
const userPubkeyHex = requestContext.userPubkeyHex;
if (!userPubkeyHex) {
return handleAuthorizationError('Authentication required');
}
// Check if user is admin
if (!isAdmin(userPubkeyHex)) {
return handleAuthorizationError('Admin access required');
}
const repos = await scanAllRepos();
return json({
repos,
total: repos.length,
totalSize: repos.reduce((sum, repo) => sum + repo.size, 0)
});
} catch (err) {
return handleApiError(err, { operation: 'listAdminRepos' }, 'Failed to list repositories');
}
};

20
src/routes/api/repos/[npub]/[repo]/clone/+server.ts

@ -303,10 +303,22 @@ export const POST: RequestHandler = async (event) => {
}, 'Repository announcement clone URLs'); }, 'Repository announcement clone URLs');
// Attempt to clone the repository // Attempt to clone the repository
logger.debug({ npub, repo, preferredDefaultBranch, hasAnnouncement: !!announcementEvent }, 'Calling fetchRepoOnDemand');
const result = await repoManager.fetchRepoOnDemand(npub, repo, announcementEvent, preferredDefaultBranch); const result = await repoManager.fetchRepoOnDemand(npub, repo, announcementEvent, preferredDefaultBranch);
logger.debug({
npub,
repo,
success: result.success,
error: result.error,
needsAnnouncement: result.needsAnnouncement,
cloneUrls: result.cloneUrls?.length || 0,
remoteUrls: result.remoteUrls?.length || 0
}, 'fetchRepoOnDemand result');
if (!result.success) { if (!result.success) {
if (result.needsAnnouncement) { if (result.needsAnnouncement) {
logger.error({ npub, repo }, 'Clone failed: Repository announcement is required');
throw handleValidationError( throw handleValidationError(
'Repository announcement is required. Please provide an announcement event or create one.', 'Repository announcement is required. Please provide an announcement event or create one.',
{ operation: 'cloneRepo', npub, repo } { operation: 'cloneRepo', npub, repo }
@ -323,16 +335,18 @@ export const POST: RequestHandler = async (event) => {
} else if (result.remoteUrls && result.remoteUrls.length === 0) { } else if (result.remoteUrls && result.remoteUrls.length === 0) {
errorMessage += ' No accessible remote clone URLs found.'; errorMessage += ' No accessible remote clone URLs found.';
} else if (result.cloneUrls && result.cloneUrls.length > 0) { } else if (result.cloneUrls && result.cloneUrls.length > 0) {
errorMessage += ` Attempted to clone from: ${result.cloneUrls.join(', ')}`; errorMessage += ` Attempted to clone from: ${result.cloneUrls.slice(0, 3).join(', ')}${result.cloneUrls.length > 3 ? '...' : ''}`;
} }
logger.error({ logger.error({
npub, npub,
repo, repo,
error: result.error, error: result.error,
errorMessage,
cloneUrls: result.cloneUrls, cloneUrls: result.cloneUrls,
remoteUrls: result.remoteUrls remoteUrls: result.remoteUrls,
}, 'Failed to clone repository'); needsAnnouncement: result.needsAnnouncement
}, 'Failed to clone repository - fetchRepoOnDemand returned success: false');
throw handleApiError( throw handleApiError(
new Error(result.error || 'Failed to clone repository from remote URLs'), new Error(result.error || 'Failed to clone repository from remote URLs'),

26
src/routes/api/repos/[npub]/[repo]/delete/+server.ts

@ -12,38 +12,14 @@ import { createRepoGetHandler } from '$lib/utils/api-handlers.js';
import type { RepoRequestContext } from '$lib/utils/api-context.js'; import type { RepoRequestContext } from '$lib/utils/api-context.js';
import { handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js'; import { handleApiError, handleAuthorizationError } from '$lib/utils/error-handler.js';
import { auditLogger } from '$lib/services/security/audit-logger.js'; import { auditLogger } from '$lib/services/security/audit-logger.js';
import { nip19 } from 'nostr-tools';
import logger from '$lib/services/logger.js'; import logger from '$lib/services/logger.js';
import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js'; import { repoCache, RepoCache } from '$lib/services/git/repo-cache.js';
import { isAdmin } from '$lib/utils/admin-check.js';
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? process.env.GIT_REPO_ROOT ? process.env.GIT_REPO_ROOT
: '/repos'; : '/repos';
// Admin pubkeys (can be set via environment variable)
const ADMIN_PUBKEYS = (typeof process !== 'undefined' && process.env?.ADMIN_PUBKEYS
? process.env.ADMIN_PUBKEYS.split(',').map(p => p.trim()).filter(p => p.length > 0)
: []) as string[];
/**
* Check if user is admin
*/
function isAdmin(userPubkeyHex: string | null): boolean {
if (!userPubkeyHex) return false;
return ADMIN_PUBKEYS.some(adminPubkey => {
// Support both hex and npub formats
try {
const decoded = nip19.decode(adminPubkey);
if (decoded.type === 'npub') {
return decoded.data === userPubkeyHex;
}
} catch {
// Not an npub, compare as hex
}
return adminPubkey.toLowerCase() === userPubkeyHex.toLowerCase();
});
}
/** /**
* Check if user is repo owner * Check if user is repo owner
*/ */

7
src/routes/api/repos/[npub]/[repo]/readme/+server.ts

@ -120,11 +120,16 @@ export const GET: RequestHandler = createRepoGetHandler(
return json({ found: false }); return json({ found: false });
} }
// Determine content type
const isMarkdown = readmePath?.toLowerCase().endsWith('.md') || readmePath?.toLowerCase().endsWith('.markdown');
const isAsciiDoc = readmePath?.toLowerCase().endsWith('.adoc') || readmePath?.toLowerCase().endsWith('.asciidoc');
return json({ return json({
found: true, found: true,
content: readmeContent, content: readmeContent,
path: readmePath, path: readmePath,
isMarkdown: readmePath?.toLowerCase().endsWith('.md') || readmePath?.toLowerCase().endsWith('.markdown') type: isMarkdown ? 'markdown' : (isAsciiDoc ? 'asciidoc' : 'text'),
isMarkdown: isMarkdown // Keep for backward compatibility
}); });
}, },
{ operation: 'getReadme', requireRepoExists: false, requireRepoAccess: false } // README should be publicly accessible { operation: 'getReadme', requireRepoExists: false, requireRepoAccess: false } // README should be publicly accessible

14
src/routes/api/repos/list/+server.ts

@ -15,10 +15,16 @@ import { extractRequestContext } from '$lib/utils/api-context.js';
import logger from '$lib/services/logger.js'; import logger from '$lib/services/logger.js';
import type { NostrEvent } from '$lib/types/nostr.js'; import type { NostrEvent } from '$lib/types/nostr.js';
import type { RequestEvent } from '@sveltejs/kit'; import type { RequestEvent } from '@sveltejs/kit';
import { existsSync } from 'fs';
import { join, resolve } from 'path';
const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS); const nostrClient = new NostrClient(DEFAULT_NOSTR_RELAYS);
const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS); const maintainerService = new MaintainerService(DEFAULT_NOSTR_RELAYS);
const repoRoot = typeof process !== 'undefined' && process.env?.GIT_REPO_ROOT
? resolve(process.env.GIT_REPO_ROOT)
: resolve('/repos');
interface RepoListItem { interface RepoListItem {
event: NostrEvent; event: NostrEvent;
npub: string; npub: string;
@ -90,6 +96,14 @@ export const GET: RequestHandler = async (event) => {
npub = nip19.npubEncode(event.pubkey); npub = nip19.npubEncode(event.pubkey);
} }
// Only include repos that actually exist locally on the server
// This ensures deleted repos don't show up in the list
const repoPath = join(repoRoot, npub, `${dTag}.git`);
if (!existsSync(repoPath)) {
logger.debug({ npub, repoName: dTag, repoPath }, 'Skipping repo - does not exist locally');
continue;
}
repos.push({ repos.push({
event, event,
npub, npub,

18
src/routes/repos/[npub]/[repo]/+page.svelte

@ -1410,24 +1410,6 @@
</div> </div>
</div> </div>
{/if} {/if}
{#if state.error}
<div class="state.error">
<div class="state.error-message">
<strong>Error:</strong> {state.error}
</div>
{#if state.error.includes('not cloned locally') && hasUnlimitedAccess($userStore.userLevel)}
<div class="state.error-actions">
<button
class="clone-button-inline"
onclick={cloneRepository}
disabled={state.clone.cloning || state.clone.checking}
>
{state.clone.cloning ? 'Cloning...' : (state.clone.checking ? 'Checking...' : 'Clone to Server')}
</button>
</div>
{/if}
</div>
{/if}
<!-- Tabs - only show if we have repo data (header/clone section would be visible) --> <!-- Tabs - only show if we have repo data (header/clone section would be visible) -->
{#if repoOwnerPubkeyDerived} {#if repoOwnerPubkeyDerived}

15
src/routes/repos/[npub]/[repo]/components/DocsTab.svelte

@ -88,20 +88,29 @@
// ALWAYS load README FIRST and display immediately if available // ALWAYS load README FIRST and display immediately if available
// README is standard documentation and should always be shown // README is standard documentation and should always be shown
try { try {
const readmeResponse = await fetch(`/api/repos/${npub}/${repo}/readme?ref=${currentBranch || 'HEAD'}`); const readmeUrl = `/api/repos/${npub}/${repo}/readme?ref=${currentBranch || 'HEAD'}`;
logger.debug({ npub, repo, branch: currentBranch, url: readmeUrl }, 'Fetching README');
const readmeResponse = await fetch(readmeUrl);
if (readmeResponse.ok) { if (readmeResponse.ok) {
const readmeData = await readmeResponse.json(); const readmeData = await readmeResponse.json();
logger.debug({ npub, repo, readmeData }, 'README API response');
if (readmeData.content) { if (readmeData.content) {
documentationContent = readmeData.content; documentationContent = readmeData.content;
documentationKind = readmeData.type || 'markdown'; documentationKind = readmeData.type || 'markdown';
selectedDoc = 'README.md'; selectedDoc = 'README.md';
hasReadme = true; hasReadme = true;
loading = false; // Stop showing loading once README is loaded loading = false; // Stop showing loading once README is loaded
logger.debug({ npub, repo }, 'README loaded and displayed'); logger.debug({ npub, repo, contentLength: readmeData.content.length }, 'README loaded and displayed');
} else if (readmeData.found === false) {
logger.debug({ npub, repo, branch: currentBranch }, 'README not found in repository');
} else {
logger.warn({ npub, repo, readmeData }, 'README API returned unexpected format');
} }
} else {
logger.debug({ npub, repo, status: readmeResponse.status, statusText: readmeResponse.statusText }, 'README API request failed');
} }
} catch (readmeErr) { } catch (readmeErr) {
logger.debug({ error: readmeErr, npub, repo }, 'No README found'); logger.debug({ error: readmeErr, npub, repo, branch: currentBranch }, 'Error fetching README');
} }
// Now check for docs folder in the background (but don't replace README) // Now check for docs folder in the background (but don't replace README)

47
src/routes/repos/[npub]/[repo]/services/commit-operations.ts

@ -17,6 +17,15 @@ export async function loadCommitHistory(
state: RepoState, state: RepoState,
callbacks: CommitOperationsCallbacks callbacks: CommitOperationsCallbacks
): Promise<void> { ): Promise<void> {
// Skip if repo is not cloned and no API fallback available
if (state.clone.isCloned === false && !state.clone.apiFallbackAvailable) {
state.loading.commits = false;
state.error = null;
state.git.commits = [];
console.log('[loadCommitHistory] Skipping - repo not cloned and no API fallback available');
return;
}
state.loading.commits = true; state.loading.commits = true;
state.error = null; state.error = null;
try { try {
@ -74,7 +83,21 @@ export async function loadCommitHistory(
} }
} catch (err) { } catch (err) {
console.error('[loadCommitHistory] Error loading commits:', err); console.error('[loadCommitHistory] Error loading commits:', err);
state.error = err instanceof Error ? err.message : 'Failed to load commit history'; const errorMessage = err instanceof Error ? err.message : 'Failed to load commit history';
// Handle 404 gracefully - repo not cloned
if (errorMessage.includes('404') || errorMessage.includes('not found') || errorMessage.includes('Repository not found')) {
// If repo is not cloned, this is expected - don't set error
if (state.clone.isCloned === false) {
state.error = null;
state.git.commits = [];
console.log('[loadCommitHistory] Repo not cloned - commits unavailable');
} else {
state.error = errorMessage;
}
} else {
state.error = errorMessage;
}
} finally { } finally {
state.loading.commits = false; state.loading.commits = false;
} }
@ -135,9 +158,25 @@ export async function viewDiff(
// Normalize commit hash (handle both 'hash' and 'sha' properties) // Normalize commit hash (handle both 'hash' and 'sha' properties)
const getCommitHash = (c: any) => c.hash || c.sha || ''; const getCommitHash = (c: any) => c.hash || c.sha || '';
const commitIndex = state.git.commits.findIndex(c => getCommitHash(c) === commitHash); const commitIndex = state.git.commits.findIndex(c => getCommitHash(c) === commitHash);
const parentHash = commitIndex >= 0
? (state.git.commits[commitIndex + 1] ? getCommitHash(state.git.commits[commitIndex + 1]) : `${commitHash}^`) // Determine parent hash: if this is the last commit (initial commit), use empty tree
: `${commitHash}^`; // Otherwise, use the next commit in the list or the parent commit
let parentHash: string;
if (commitIndex >= 0) {
// Check if this is the last commit (initial commit with no parent)
if (commitIndex === state.git.commits.length - 1) {
// This is the initial commit - use empty tree hash
// Git's empty tree hash: 4b825dc642cb6eb9a060e54bf8d69288fbee4904
parentHash = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
} else {
// Use the next commit (which is the parent in reverse chronological order)
parentHash = getCommitHash(state.git.commits[commitIndex + 1]);
}
} else {
// Commit not found in list, try to use parent (but this might fail for initial commit)
// We'll let the API handle the error
parentHash = `${commitHash}^`;
}
const diffData = await apiRequest<Array<{ const diffData = await apiRequest<Array<{
file: string; file: string;

115
src/routes/repos/[npub]/[repo]/services/repo-operations.ts

@ -215,30 +215,115 @@ export async function cloneRepository(
requestBody.defaultBranch = defaultBranch; requestBody.defaultBranch = defaultBranch;
} }
const data = await apiPost<{ alreadyExists?: boolean }>(`/api/repos/${state.npub}/${state.repo}/clone`, requestBody); logger.debug({
npub: state.npub,
repo: state.repo,
hasProofEvent: !!proofEvent,
defaultBranch
}, '[Clone] Sending clone request to server');
const cloneUrl = `/api/repos/${state.npub}/${state.repo}/clone`;
logger.debug({ url: cloneUrl }, '[Clone] POST request URL');
const data = await apiPost<{ alreadyExists?: boolean; success?: boolean; message?: string }>(cloneUrl, requestBody);
logger.debug({ data }, '[Clone] Clone request successful');
// Check if the response indicates success
if (data.success === false) {
const errorMsg = data.message || 'Failed to clone repository';
logger.error({ npub: state.npub, repo: state.repo, data }, '[Clone] Clone endpoint returned success: false');
alert(`Error: ${errorMsg}`);
throw new Error(errorMsg);
}
if (data.alreadyExists) { if (data.alreadyExists) {
alert('Repository already exists locally.'); alert('Repository already exists locally.');
// Force refresh clone status // Force refresh clone status
await callbacks.checkCloneStatus(true); await callbacks.checkCloneStatus(true);
} else { } else {
alert('Repository cloned successfully! The repository is now available on this server.'); alert('Repository clone initiated. Waiting for clone to complete...');
// Force refresh clone status
await callbacks.checkCloneStatus(true); // Start polling for clone status with a delay
// Reset API fallback status since repo is now cloned // Clone operation may take time, so we poll every 2 seconds
state.clone.apiFallbackAvailable = false; const pollInterval = 2000; // 2 seconds
// Reload data to use the cloned repo instead of API const maxAttempts = 30; // Maximum 60 seconds (30 * 2s)
await Promise.all([ let attempts = 0;
callbacks.loadBranches(), let pollTimer: ReturnType<typeof setInterval> | null = null;
callbacks.loadFiles(state.files.currentPath),
callbacks.loadReadme(), const pollCloneStatus = async () => {
callbacks.loadTags(), attempts++;
callbacks.loadCommitHistory() logger.debug({ attempts, maxAttempts }, '[Clone] Polling clone status');
]);
// Check clone status
await callbacks.checkCloneStatus(true);
// If repo is now cloned, stop polling and reload data
if (state.clone.isCloned === true) {
if (pollTimer) {
clearInterval(pollTimer);
pollTimer = null;
}
logger.info({ attempts }, '[Clone] Repository confirmed cloned, reloading data');
// Reset API fallback status since repo is now cloned
state.clone.apiFallbackAvailable = false;
// Reload data to use the cloned repo instead of API
await Promise.all([
callbacks.loadBranches(),
callbacks.loadFiles(state.files.currentPath),
callbacks.loadReadme(),
callbacks.loadTags(),
callbacks.loadCommitHistory()
]);
logger.info('[Clone] Repository data reloaded successfully');
} else if (attempts >= maxAttempts) {
// Stop polling after max attempts
if (pollTimer) {
clearInterval(pollTimer);
pollTimer = null;
}
logger.warn({ attempts }, '[Clone] Polling timeout - clone may still be in progress');
alert('Clone operation is taking longer than expected. The repository may still be cloning in the background. Please refresh the page in a moment.');
}
};
// Start polling after initial delay of 2 seconds
setTimeout(() => {
pollCloneStatus(); // First check immediately after delay
pollTimer = setInterval(pollCloneStatus, pollInterval);
}, pollInterval);
} }
} catch (err) { } catch (err) {
const errorMessage = err instanceof Error ? err.message : 'Failed to clone repository'; const errorMessage = err instanceof Error ? err.message : 'Failed to clone repository';
alert(`Error: ${errorMessage}`); logger.error({
error: err,
npub: state.npub,
repo: state.repo,
errorMessage,
errorStack: err instanceof Error ? err.stack : undefined
}, '[Clone] Clone request failed');
// Extract more detailed error message if available
let userFriendlyMessage = errorMessage;
if (err instanceof Error) {
// Check if it's a network error
if (errorMessage.includes('fetch') || errorMessage.includes('network') || errorMessage.includes('Failed to fetch')) {
userFriendlyMessage = 'Network error: Unable to connect to the server. Please check your connection and try again.';
} else if (errorMessage.includes('403') || errorMessage.includes('Forbidden')) {
userFriendlyMessage = 'Access denied: You do not have permission to clone this repository. Please verify you have unlimited access.';
} else if (errorMessage.includes('404') || errorMessage.includes('Not found')) {
userFriendlyMessage = 'Repository not found: The repository announcement may not exist or the repository name may be incorrect.';
} else if (errorMessage.includes('timeout') || errorMessage.includes('timed out')) {
userFriendlyMessage = 'Clone operation timed out. The repository may be too large or the remote server may be slow. Please try again.';
}
}
alert(`Error cloning repository: ${userFriendlyMessage}`);
console.error('Error cloning repository:', err); console.error('Error cloning repository:', err);
} finally { } finally {
state.clone.cloning = false; state.clone.cloning = false;

8
src/routes/repos/[npub]/[repo]/utils/api-client.ts

@ -62,7 +62,13 @@ export async function apiRequest<T>(
// Ignore parsing errors // Ignore parsing errors
} }
} }
logger.error({ url, status: response.status, error: errorMessage }, '[API] Request failed');
// 404s are expected when repo isn't cloned - log as debug, not error
if (response.status === 404) {
logger.debug({ url, status: response.status, error: errorMessage }, '[API] Request failed (404 - expected for uncloned repos)');
} else {
logger.error({ url, status: response.status, error: errorMessage }, '[API] Request failed');
}
throw new Error(errorMessage); throw new Error(errorMessage);
} }

19
src/routes/repos/[npub]/[repo]/utils/safe-wrappers.ts

@ -5,15 +5,30 @@
/** /**
* Safely execute an async function, returning a resolved promise if window is undefined * Safely execute an async function, returning a resolved promise if window is undefined
*
* This function is designed to:
* 1. Prevent SSR errors by checking for window availability
* 2. Catch and log errors without crashing the app
* 3. Return resolved promises even on error to prevent unhandled rejections
*
* Note: Errors are logged but not re-thrown to prevent unhandled promise rejections
* in event handlers. The wrapped functions should handle their own errors (e.g., show alerts).
*/ */
export function safeAsync<T>( export function safeAsync<T>(
fn: () => Promise<T> fn: () => Promise<T>
): Promise<T | void> { ): Promise<T | void> {
if (typeof window === 'undefined') return Promise.resolve(); if (typeof window === 'undefined') return Promise.resolve();
try { try {
return fn(); return fn().catch((err) => {
// Log async errors but don't re-throw to prevent unhandled rejections
// The wrapped functions should handle their own errors (e.g., show alerts)
console.error('Error in safe async function:', err);
// Return resolved promise to prevent unhandled rejection
return Promise.resolve();
});
} catch (err) { } catch (err) {
console.warn('Error in safe async function:', err); // Synchronous errors - log and return resolved promise to prevent crashes
console.warn('Synchronous error in safe async function:', err);
return Promise.resolve(); return Promise.resolve();
} }
} }

401
src/routes/users/[npub]/+page.svelte

@ -13,8 +13,9 @@
import { userStore } from '$lib/stores/user-store.js'; import { userStore } from '$lib/stores/user-store.js';
import { fetchUserProfile, extractProfileData } from '$lib/utils/user-profile.js'; import { fetchUserProfile, extractProfileData } from '$lib/utils/user-profile.js';
import { combineRelays } from '$lib/config.js'; import { combineRelays } from '$lib/config.js';
import { KIND, isEphemeralKind, isReplaceableKind } from '$lib/types/nostr.js'; import { KIND, isEphemeralKind, isReplaceableKind, isAddressableKind } from '$lib/types/nostr.js';
import { hasUnlimitedAccess } from '$lib/utils/user-access.js'; import { hasUnlimitedAccess } from '$lib/utils/user-access.js';
import { eventCache } from '$lib/services/nostr/event-cache.js';
const npub = ($page.params as { npub?: string }).npub || ''; const npub = ($page.params as { npub?: string }).npub || '';
@ -60,6 +61,48 @@
return quotedEvents.find(e => e.id === eventId); return quotedEvents.find(e => e.id === eventId);
}; };
// Helper to get author name from pubkey
function getAuthorName(pubkey: string): string {
// Try to find profile event in nostrLinkEvents cache
// Check both by profile key and by iterating values
const profileByKey = nostrLinkEvents.get(`profile:${pubkey}`);
let profileEvent = profileByKey || Array.from(nostrLinkEvents.values()).find(
e => e.kind === 0 && e.pubkey === pubkey
);
// If not found in nostrLinkEvents, try global eventCache
if (!profileEvent) {
try {
const cachedProfile = eventCache.getProfile(pubkey);
if (cachedProfile) {
profileEvent = cachedProfile;
}
} catch {
// eventCache not available, continue
}
}
if (profileEvent) {
try {
const profile = JSON.parse(profileEvent.content);
const name = profile.display_name || profile.name;
if (name && name.trim()) return name.trim();
} catch {
// Try tags if JSON parse fails
const nameTag = profileEvent.tags.find(t => t[0] === 'name' || t[0] === 'display_name')?.[1];
if (nameTag && nameTag.trim()) return nameTag.trim();
}
}
// Fallback to shortened pubkey
try {
const npub = nip19.npubEncode(pubkey);
return npub.slice(0, 12) + '...';
} catch {
return pubkey.slice(0, 8) + '...';
}
}
// Referenced events cache for activity (a-tags and e-tags) - use array for better reactivity // Referenced events cache for activity (a-tags and e-tags) - use array for better reactivity
let referencedEvents = $state<NostrEvent[]>([]); let referencedEvents = $state<NostrEvent[]>([]);
@ -163,6 +206,7 @@
} }
// Fetch events // Fetch events
const loadedEvents: NostrEvent[] = [];
if (eventIds.length > 0) { if (eventIds.length > 0) {
try { try {
const events = await Promise.race([ const events = await Promise.race([
@ -172,6 +216,7 @@
for (const event of events) { for (const event of events) {
nostrLinkEvents.set(event.id, event); nostrLinkEvents.set(event.id, event);
loadedEvents.push(event);
} }
} catch { } catch {
// Ignore fetch errors // Ignore fetch errors
@ -194,6 +239,7 @@
if (events.length > 0) { if (events.length > 0) {
nostrLinkEvents.set(events[0].id, events[0]); nostrLinkEvents.set(events[0].id, events[0]);
loadedEvents.push(events[0]);
} }
} catch { } catch {
// Ignore fetch errors // Ignore fetch errors
@ -201,6 +247,38 @@
} }
} }
} }
// Load profiles for authors of loaded events
if (loadedEvents.length > 0) {
const authorPubkeys = new Set<string>();
for (const event of loadedEvents) {
if (event.pubkey) {
authorPubkeys.add(event.pubkey);
}
}
// Fetch profiles for all authors
if (authorPubkeys.size > 0) {
try {
const profiles = await Promise.race([
nostrClient.fetchEvents([{ kinds: [0], authors: Array.from(authorPubkeys), limit: authorPubkeys.size }]),
new Promise<NostrEvent[]>((resolve) => setTimeout(() => resolve([]), 10000))
]);
// Store profiles in cache (use a special key format: `profile:${pubkey}`)
for (const profile of profiles) {
// Store with a key that includes the pubkey so we can find it
nostrLinkEvents.set(`profile:${profile.pubkey}`, profile);
// Also store by ID if it has one
if (profile.id) {
nostrLinkEvents.set(profile.id, profile);
}
}
} catch {
// Ignore profile fetch errors
}
}
}
} }
// Get event from nostr: link // Get event from nostr: link
@ -555,6 +633,19 @@ i *
return true; return true;
} }
// Exclude addressable events (30000-39999) that are not repo-related
// Only allow known repo-related addressable events: REPO_ANNOUNCEMENT, REPO_STATE, BRANCH_PROTECTION
if (isAddressableKind(event.kind)) {
const allowedAddressableKinds: number[] = [
KIND.REPO_ANNOUNCEMENT,
KIND.REPO_STATE,
KIND.BRANCH_PROTECTION
];
if (!allowedAddressableKinds.includes(event.kind)) {
return true;
}
}
// Exclude specific regular kinds that are not repo-related: // Exclude specific regular kinds that are not repo-related:
// Kind 1: Keep this one in, just for the user's convenience // Kind 1: Keep this one in, just for the user's convenience
@ -866,41 +957,99 @@ i *
return date.toLocaleDateString(); return date.toLocaleDateString();
} }
// Helper function to check if an event is repo-related
function isRepoRelatedEvent(event: NostrEvent): boolean {
const repoRelatedKinds: number[] = [
KIND.REPO_ANNOUNCEMENT,
KIND.REPO_STATE,
KIND.PATCH,
KIND.PULL_REQUEST,
KIND.PULL_REQUEST_UPDATE,
KIND.ISSUE,
KIND.STATUS_OPEN,
KIND.STATUS_APPLIED,
KIND.STATUS_CLOSED,
KIND.STATUS_DRAFT,
KIND.COMMIT_SIGNATURE,
KIND.OWNERSHIP_TRANSFER,
KIND.RELEASE,
KIND.COMMENT,
KIND.THREAD,
KIND.BRANCH_PROTECTION,
KIND.HIGHLIGHT
];
return repoRelatedKinds.includes(event.kind);
}
// Helper function to check if an event references a repo via a-tag
function eventReferencesRepo(event: NostrEvent, repoATags: Set<string>): boolean {
const aTags = event.tags.filter(t => t[0] === 'a' && t[1]);
for (const aTag of aTags) {
if (aTag[1] && repoATags.has(aTag[1])) {
return true;
}
}
return false;
}
// Helper function to check if an event has p-tag or q-tag referencing the user
function eventReferencesUser(event: NostrEvent, userPubkey: string): boolean {
// Check p-tags
const pTags = event.tags.filter(t => t[0] === 'p' && t[1]);
for (const pTag of pTags) {
if (pTag[1] && pTag[1].toLowerCase() === userPubkey.toLowerCase()) {
return true;
}
}
// Check q-tags
const qTags = event.tags.filter(t => t[0] === 'q' && t[1]);
for (const qTag of qTags) {
if (qTag[1] && qTag[1].toLowerCase() === userPubkey.toLowerCase()) {
return true;
}
}
return false;
}
async function loadActivity() { async function loadActivity() {
if (!profileOwnerPubkeyHex || loadingActivity || activityLoaded) return; if (!profileOwnerPubkeyHex || loadingActivity || activityLoaded) return;
const userPubkey = profileOwnerPubkeyHex; // Store in local variable for type safety const userPubkey = profileOwnerPubkeyHex; // Store in local variable for type safety
loadingActivity = true; loadingActivity = true;
try { try {
// Step 1: Fetch repo announcements in parallel (reduced limit) // Step 1: Fetch all repo announcements where user is owner or maintainer
const [repoAnnouncements, allAnnouncements] = await Promise.all([ const [repoAnnouncements, allAnnouncements] = await Promise.all([
nostrClient.fetchEvents([ nostrClient.fetchEvents([
{ {
kinds: [KIND.REPO_ANNOUNCEMENT], kinds: [KIND.REPO_ANNOUNCEMENT],
authors: [userPubkey], authors: [userPubkey],
limit: 50 // Reduced from 100 limit: 100
} }
]), ]),
nostrClient.fetchEvents([ nostrClient.fetchEvents([
{ {
kinds: [KIND.REPO_ANNOUNCEMENT], kinds: [KIND.REPO_ANNOUNCEMENT],
'#p': [userPubkey], '#p': [userPubkey],
limit: 50 // Reduced from 100 limit: 100
} }
]) ])
]); ]);
// Step 2: Extract a-tags from repo announcements // Step 2: Extract a-tags from repos where user is owner or maintainer
const aTags = new Set<string>(); const repoATags = new Set<string>();
// Add a-tags from repos owned by user
for (const announcement of repoAnnouncements) { for (const announcement of repoAnnouncements) {
const dTag = announcement.tags.find(t => t[0] === 'd')?.[1]; const dTag = announcement.tags.find(t => t[0] === 'd')?.[1];
if (dTag) { if (dTag) {
const aTag = `${KIND.REPO_ANNOUNCEMENT}:${announcement.pubkey}:${dTag}`; const aTag = `${KIND.REPO_ANNOUNCEMENT}:${announcement.pubkey}:${dTag}`;
aTags.add(aTag); repoATags.add(aTag);
} }
} }
// Step 3: Check for repos where user is a maintainer // Add a-tags from repos where user is a maintainer
for (const announcement of allAnnouncements) { for (const announcement of allAnnouncements) {
const maintainersTag = announcement.tags.find(t => t[0] === 'maintainers'); const maintainersTag = announcement.tags.find(t => t[0] === 'maintainers');
if (maintainersTag) { if (maintainersTag) {
@ -920,43 +1069,95 @@ i *
const dTag = announcement.tags.find(t => t[0] === 'd')?.[1]; const dTag = announcement.tags.find(t => t[0] === 'd')?.[1];
if (dTag) { if (dTag) {
const aTag = `${KIND.REPO_ANNOUNCEMENT}:${announcement.pubkey}:${dTag}`; const aTag = `${KIND.REPO_ANNOUNCEMENT}:${announcement.pubkey}:${dTag}`;
aTags.add(aTag); repoATags.add(aTag);
} }
} }
} }
} }
// Step 4: Fetch events that reference the user or their repos (reduced limits) // If user has no repos, return empty activity
if (repoATags.size === 0) {
activityEvents = [];
activityLoaded = true;
loadingActivity = false;
return;
}
// Step 3: Define repo-related event kinds
const repoRelatedKinds = [
KIND.REPO_ANNOUNCEMENT,
KIND.REPO_STATE,
KIND.PATCH,
KIND.PULL_REQUEST,
KIND.PULL_REQUEST_UPDATE,
KIND.ISSUE,
KIND.STATUS_OPEN,
KIND.STATUS_APPLIED,
KIND.STATUS_CLOSED,
KIND.STATUS_DRAFT,
KIND.COMMIT_SIGNATURE,
KIND.OWNERSHIP_TRANSFER,
KIND.RELEASE,
KIND.COMMENT,
KIND.THREAD,
KIND.BRANCH_PROTECTION,
KIND.HIGHLIGHT
];
// Step 4: Fetch events that:
// - Have p-tags or q-tags referencing the user
// - AND reference repos where user is owner/maintainer (via a-tags)
// - AND are repo-related kinds
const filters: any[] = []; const filters: any[] = [];
// Events with user in p-tag // Events with user in p-tag AND repo a-tags AND repo-related kinds
filters.push({ filters.push({
'#p': [userPubkey], '#p': [userPubkey],
limit: 100 // Reduced from 200 '#a': Array.from(repoATags),
kinds: repoRelatedKinds,
limit: 200
}); });
// Events with user in q-tag // Events with user in q-tag AND repo a-tags AND repo-related kinds
filters.push({ filters.push({
'#q': [userPubkey], '#q': [userPubkey],
limit: 100 // Reduced from 200 '#a': Array.from(repoATags),
kinds: repoRelatedKinds,
limit: 200
}); });
// Events with repo a-tags
if (aTags.size > 0) {
filters.push({
'#a': Array.from(aTags),
limit: 100 // Reduced from 200
});
}
const allActivityEvents = await Promise.race([ const allActivityEvents = await Promise.race([
nostrClient.fetchEvents(filters), nostrClient.fetchEvents(filters),
new Promise<NostrEvent[]>((resolve) => setTimeout(() => resolve([]), 15000)) // 15s timeout new Promise<NostrEvent[]>((resolve) => setTimeout(() => resolve([]), 15000)) // 15s timeout
]); ]);
// Step 5: Deduplicate, filter, and sort by created_at (newest first) // Step 5: Additional filtering to ensure events:
// - Reference the user via p-tag or q-tag
// - Reference a repo the user owns/maintains via a-tag
// - Are repo-related kinds
const eventMap = new Map<string, NostrEvent>(); const eventMap = new Map<string, NostrEvent>();
for (const event of allActivityEvents) { for (const event of allActivityEvents) {
// Skip user's own events
if (event.pubkey === userPubkey) {
continue;
}
// Must be repo-related
if (!isRepoRelatedEvent(event)) {
continue;
}
// Must reference the user via p-tag or q-tag
if (!eventReferencesUser(event, userPubkey)) {
continue;
}
// Must reference a repo the user owns/maintains
if (!eventReferencesRepo(event, repoATags)) {
continue;
}
// Apply standard exclusions
if (shouldExcludeEvent(event, userPubkey, true)) { if (shouldExcludeEvent(event, userPubkey, true)) {
continue; continue;
} }
@ -967,7 +1168,7 @@ i *
} }
} }
// Sort by created_at descending and limit to 50 (reduced from 200) // Sort by created_at descending and limit to 50
activityEvents = Array.from(eventMap.values()) activityEvents = Array.from(eventMap.values())
.sort((a, b) => b.created_at - a.created_at) .sort((a, b) => b.created_at - a.created_at)
.slice(0, 50); .slice(0, 50);
@ -1925,7 +2126,8 @@ i *
{#if quotedEvent} {#if quotedEvent}
<div class="quoted-event"> <div class="quoted-event">
<div class="quoted-event-header"> <div class="quoted-event-header">
<UserBadge pubkey={quotedEvent.pubkey} disableLink={true} /> <span class="quoted-event-label">Quoting:</span>
<UserBadge pubkey={quotedEvent.pubkey} disableLink={true} inline={true} />
<span class="quoted-event-time">{formatMessageTime(quotedEvent.created_at)}</span> <span class="quoted-event-time">{formatMessageTime(quotedEvent.created_at)}</span>
</div> </div>
<div class="quoted-event-content">{quotedEvent.content || '(No content)'}</div> <div class="quoted-event-content">{quotedEvent.content || '(No content)'}</div>
@ -1984,7 +2186,7 @@ i *
{:else if part.type === 'event' && part.event} {:else if part.type === 'event' && part.event}
<div class="nostr-link-event"> <div class="nostr-link-event">
<div class="nostr-link-event-header"> <div class="nostr-link-event-header">
<UserBadge pubkey={part.event.pubkey} disableLink={true} /> <span class="nostr-link-event-author">{getAuthorName(part.event.pubkey)}</span>
<span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span> <span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span>
</div> </div>
<div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div> <div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div>
@ -2039,13 +2241,22 @@ i *
<div class="zap-amount">{zapData.amount}</div> <div class="zap-amount">{zapData.amount}</div>
<div class="zap-details"> <div class="zap-details">
{#if zapData.senderPubkey} {#if zapData.senderPubkey}
<span>From <UserBadge pubkey={zapData.senderPubkey} disableLink={true} /></span> <span class="zap-detail-item">
<span class="zap-detail-label">From</span>
<span class="zap-detail-value">{getAuthorName(zapData.senderPubkey)}</span>
</span>
{/if} {/if}
{#if zapData.recipientPubkey && zapData.recipientPubkey !== profileOwnerPubkeyHex} {#if zapData.recipientPubkey && zapData.recipientPubkey !== profileOwnerPubkeyHex}
<span>To <UserBadge pubkey={zapData.recipientPubkey} disableLink={true} /></span> <span class="zap-detail-item">
<span class="zap-detail-label">To</span>
<span class="zap-detail-value">{getAuthorName(zapData.recipientPubkey)}</span>
</span>
{/if} {/if}
{#if zapData.eventId} {#if zapData.eventId}
<span>on event {zapData.eventId.slice(0, 8)}...</span> <span class="zap-detail-item">
<span class="zap-detail-label">on event</span>
<span class="zap-detail-value">{zapData.eventId.slice(0, 8)}...</span>
</span>
{/if} {/if}
{#if zapData.comment} {#if zapData.comment}
<div class="zap-comment">{zapData.comment}</div> <div class="zap-comment">{zapData.comment}</div>
@ -2078,7 +2289,7 @@ i *
{:else if part.type === 'event' && part.event} {:else if part.type === 'event' && part.event}
<div class="nostr-link-event"> <div class="nostr-link-event">
<div class="nostr-link-event-header"> <div class="nostr-link-event-header">
<UserBadge pubkey={part.event.pubkey} disableLink={true} /> <span class="nostr-link-event-author">{getAuthorName(part.event.pubkey)}</span>
<span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span> <span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span>
</div> </div>
<div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div> <div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div>
@ -2153,7 +2364,7 @@ i *
{:else if part.type === 'event' && part.event} {:else if part.type === 'event' && part.event}
<div class="nostr-link-event"> <div class="nostr-link-event">
<div class="nostr-link-event-header"> <div class="nostr-link-event-header">
<UserBadge pubkey={part.event.pubkey} disableLink={true} /> <span class="nostr-link-event-author">{getAuthorName(part.event.pubkey)}</span>
<span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span> <span class="nostr-link-event-time">{formatMessageTime(part.event.created_at)}</span>
</div> </div>
<div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div> <div class="nostr-link-event-content">{part.event.content || getEventContext(part.event)}</div>
@ -3038,17 +3249,33 @@ i *
.message-icon { .message-icon {
width: 1.25rem; width: 1.25rem;
height: 1.25rem; height: 1.25rem;
filter: var(--icon-filter, none); filter: var(--icon-filter, brightness(0) saturate(100%) invert(1));
opacity: 0.8;
}
:global([data-theme="light"]) .message-icon {
filter: brightness(0) saturate(100%);
opacity: 0.7;
}
:global([data-theme="dark"]) .message-icon,
:global([data-theme="black"]) .message-icon {
filter: brightness(0) saturate(100%) invert(1);
opacity: 0.8;
}
.message-action-button:hover .message-icon {
opacity: 1;
} }
.quoted-event { .quoted-event {
margin-bottom: 1rem; margin-bottom: 0.75rem;
padding: 0.75rem; padding: 0.5rem;
background: var(--bg-secondary); background: var(--bg-secondary, var(--bg-primary));
border: 1px solid var(--border-color); color: var(--text-muted, var(--text-secondary));
border-left: 3px solid var(--accent, #007bff); border-radius: 4px;
border-radius: 0.375rem; border-left: 2px solid var(--border-light, var(--border-color));
font-size: 0.875rem; opacity: 0.8;
} }
:global([data-theme="light"]) .quoted-event { :global([data-theme="light"]) .quoted-event {
@ -3069,50 +3296,35 @@ i *
.quoted-event-header { .quoted-event-header {
display: flex; display: flex;
align-items: center; align-items: center;
gap: 0.5rem; gap: 0.375rem;
margin-bottom: 0.5rem; margin-bottom: 0.25rem;
flex-wrap: wrap; font-size: 0.75rem;
color: var(--text-muted, var(--text-secondary));
}
.quoted-event-label {
font-weight: 500;
color: var(--text-muted, var(--text-secondary));
} }
.quoted-event-time { .quoted-event-time {
font-size: 0.75rem; color: var(--text-muted, var(--text-secondary));
color: var(--text-muted); font-size: 0.7rem;
margin-left: auto; margin-left: auto;
} }
.quoted-event-content { .quoted-event-content {
color: var(--text-secondary); font-size: 0.8rem;
color: var(--text-muted, var(--text-secondary));
line-height: 1.4;
white-space: pre-wrap; white-space: pre-wrap;
word-wrap: break-word; word-wrap: break-word;
overflow-wrap: break-word; overflow-wrap: break-word;
line-height: 1.5;
max-height: 8rem; max-height: 8rem;
overflow: hidden; overflow: hidden;
position: relative; position: relative;
} }
.quoted-event-content::after {
content: '';
position: absolute;
bottom: 0;
left: 0;
right: 0;
height: 2rem;
background: linear-gradient(to bottom, transparent, var(--bg-secondary));
pointer-events: none;
}
:global([data-theme="light"]) .quoted-event-content::after {
background: linear-gradient(to bottom, transparent, #f5f5f5);
}
:global([data-theme="dark"]) .quoted-event-content::after {
background: linear-gradient(to bottom, transparent, rgba(255, 255, 255, 0.05));
}
:global([data-theme="black"]) .quoted-event-content::after {
background: linear-gradient(to bottom, transparent, rgba(255, 255, 255, 0.03));
}
.quoted-event-loading { .quoted-event-loading {
opacity: 0.6; opacity: 0.6;
@ -3331,27 +3543,39 @@ i *
.zap-details { .zap-details {
display: flex; display: flex;
flex-direction: column; flex-wrap: wrap;
gap: 0.25rem; gap: 0.5rem;
font-size: 0.875rem; font-size: 0.7rem;
color: var(--text-primary); color: var(--text-muted, var(--text-secondary));
line-height: 1.5; line-height: 1.4;
margin-top: 0.25rem;
} }
.zap-details span { .zap-detail-item {
display: flex; display: inline-flex;
align-items: center; align-items: center;
gap: 0.5rem; gap: 0.25rem;
}
.zap-detail-label {
color: var(--text-muted, var(--text-secondary));
font-weight: 500;
}
.zap-detail-value {
color: var(--text-muted, var(--text-secondary));
} }
.zap-comment { .zap-comment {
margin-top: 0.5rem; margin-top: 0.375rem;
padding: 0.75rem; padding: 0.375rem 0.5rem;
background: var(--bg-secondary); background: var(--bg-secondary, var(--bg-primary));
border-radius: 0.5rem; border-radius: 4px;
border-left: 2px solid var(--accent); border-left: 2px solid var(--border-light, var(--border-color));
font-style: italic; font-style: italic;
color: var(--text-secondary); font-size: 0.75rem;
color: var(--text-muted, var(--text-secondary));
opacity: 0.8;
} }
.zap-receipt { .zap-receipt {
@ -3509,11 +3733,20 @@ i *
align-items: center; align-items: center;
gap: 0.5rem; gap: 0.5rem;
margin-bottom: 0.5rem; margin-bottom: 0.5rem;
line-height: 1;
font-size: 0.75rem;
color: var(--text-muted, var(--text-secondary));
}
.nostr-link-event-author {
font-weight: 500;
color: var(--text-muted, var(--text-secondary));
} }
.nostr-link-event-time { .nostr-link-event-time {
font-size: 0.75rem; font-size: 0.7rem;
color: var(--text-muted); color: var(--text-muted, var(--text-secondary));
margin-left: auto;
} }
.nostr-link-event-content { .nostr-link-event-content {

Loading…
Cancel
Save