Browse Source

fix discussion thread and fetch gitea files

main
Silberengel 4 weeks ago
parent
commit
19bc39a57e
  1. 353
      src/lib/services/git/api-repo-fetcher.ts
  2. 134
      src/lib/utils/api-repo-helper.ts
  3. 261
      src/routes/api/gitea-proxy/[...path]/+server.ts
  4. 12
      src/routes/api/repos/[npub]/[repo]/file/+server.ts
  5. 51
      src/routes/repos/[npub]/[repo]/+page.svelte

353
src/lib/services/git/api-repo-fetcher.ts

@ -9,6 +9,34 @@
import logger from '../logger.js'; import logger from '../logger.js';
/**
* Check if we're running on the server (Node.js) or client (browser)
*/
function isServerSide(): boolean {
return typeof process !== 'undefined' && process.versions?.node !== undefined;
}
/**
* Get the base URL for API requests
* On server-side, call APIs directly. On client-side, use proxy to avoid CORS.
*/
function getApiBaseUrl(apiPath: string, baseUrl: string, searchParams: URLSearchParams): string {
if (isServerSide()) {
// Server-side: call API directly
const cleanBaseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
const cleanApiPath = apiPath.startsWith('/') ? apiPath : `/${apiPath}`;
const queryString = searchParams.toString();
return `${cleanBaseUrl}${cleanApiPath}${queryString ? `?${queryString}` : ''}`;
} else {
// Client-side: use proxy to avoid CORS
const queryString = new URLSearchParams({
baseUrl,
...Object.fromEntries(searchParams.entries())
}).toString();
return `/api/gitea-proxy/${apiPath}?${queryString}`;
}
}
export interface ApiRepoInfo { export interface ApiRepoInfo {
name: string; name: string;
description?: string; description?: string;
@ -63,7 +91,7 @@ export function isGraspUrl(url: string): boolean {
/** /**
* Parse git URL to extract platform, owner, and repo * Parse git URL to extract platform, owner, and repo
*/ */
function parseGitUrl(url: string): { platform: GitPlatform; owner: string; repo: string; baseUrl: string } | null { export function parseGitUrl(url: string): { platform: GitPlatform; owner: string; repo: string; baseUrl: string } | null {
// Handle GRASP URLs - they use Gitea-compatible API but with npub as owner // Handle GRASP URLs - they use Gitea-compatible API but with npub as owner
if (isGraspUrl(url)) { if (isGraspUrl(url)) {
const graspMatch = url.match(/(https?:\/\/[^/]+)\/(npub1[a-z0-9]+)\/([^/]+?)(?:\.git)?\/?$/i); const graspMatch = url.match(/(https?:\/\/[^/]+)\/(npub1[a-z0-9]+)\/([^/]+?)(?:\.git)?\/?$/i);
@ -251,32 +279,139 @@ async function fetchFromGitHub(owner: string, repo: string): Promise<Partial<Api
/** /**
* Fetch repository metadata from GitLab API * Fetch repository metadata from GitLab API
* Note: This is a simplified version. For full implementation, see aitherboard's git-repo-fetcher.ts
*/ */
async function fetchFromGitLab(owner: string, repo: string, baseUrl: string): Promise<Partial<ApiRepoInfo> | null> { async function fetchFromGitLab(owner: string, repo: string, baseUrl: string): Promise<Partial<ApiRepoInfo> | null> {
try { try {
const projectPath = encodeURIComponent(`${owner}/${repo}`); const projectPath = encodeURIComponent(`${owner}/${repo}`);
const repoResponse = await fetch(`${baseUrl}/projects/${projectPath}`);
// Use proxy endpoint on client-side, direct API on server-side
const repoUrl = getApiBaseUrl(
`projects/${projectPath}`,
baseUrl,
new URLSearchParams()
);
const repoResponse = await fetch(repoUrl);
if (!repoResponse.ok) { if (!repoResponse.ok) {
if (repoResponse.status === 404) { if (repoResponse.status === 404) {
return null; return null;
} }
logger.warn({ status: repoResponse.status, owner, repo }, 'GitLab API error');
return null; return null;
} }
const repoData = await repoResponse.json(); const repoData = await repoResponse.json();
const defaultBranch = repoData.default_branch || 'master'; const defaultBranch = repoData.default_branch || 'master';
// For now, return basic info. Full implementation would fetch branches, commits, files // Fetch branches and commits in parallel
const [branchesResponse, commitsResponse] = await Promise.all([
fetch(getApiBaseUrl(
`projects/${projectPath}/repository/branches`,
baseUrl,
new URLSearchParams()
)).catch(() => null),
fetch(getApiBaseUrl(
`projects/${projectPath}/repository/commits`,
baseUrl,
new URLSearchParams({ per_page: '10' })
)).catch(() => null)
]);
let branchesData: any[] = [];
let commitsData: any[] = [];
if (branchesResponse && branchesResponse.ok) {
branchesData = await branchesResponse.json();
if (!Array.isArray(branchesData)) {
logger.warn({ owner, repo }, 'GitLab branches response is not an array');
branchesData = [];
}
}
if (commitsResponse && commitsResponse.ok) {
commitsData = await commitsResponse.json();
if (!Array.isArray(commitsData)) {
logger.warn({ owner, repo }, 'GitLab commits response is not an array');
commitsData = [];
}
}
const branches: ApiBranch[] = branchesData.map((b: any) => ({
name: b.name,
commit: {
sha: b.commit.id,
message: b.commit.message.split('\n')[0],
author: b.commit.author_name,
date: b.commit.committed_date
}
}));
const commits: ApiCommit[] = commitsData.map((c: any) => ({
sha: c.id,
message: c.message.split('\n')[0],
author: c.author_name,
date: c.committed_date
}));
// Fetch file tree (simplified - GitLab tree API is more complex)
let files: ApiFile[] = [];
try {
const treeResponse = await fetch(getApiBaseUrl(
`projects/${projectPath}/repository/tree`,
baseUrl,
new URLSearchParams({ recursive: 'true', per_page: '100' })
)).catch(() => null);
if (treeResponse && treeResponse.ok) {
const treeData = await treeResponse.json();
if (Array.isArray(treeData)) {
files = treeData.map((item: any) => ({
name: item.name,
path: item.path,
type: item.type === 'tree' ? 'dir' : 'file',
size: item.size
}));
}
}
} catch (error) {
logger.warn({ error, owner, repo }, 'Failed to fetch GitLab file tree');
}
// Try to fetch README
let readme: { path: string; content: string; format: 'markdown' | 'asciidoc' } | undefined;
const readmeFiles = ['README.adoc', 'README.md', 'README.rst', 'README.txt'];
for (const readmeFile of readmeFiles) {
try {
const readmeUrl = getApiBaseUrl(
`projects/${projectPath}/repository/files/${encodeURIComponent(readmeFile)}/raw`,
baseUrl,
new URLSearchParams({ ref: defaultBranch })
);
const fileData = await fetch(readmeUrl).then(r => {
if (!r.ok) {
throw new Error('Not found');
}
return r.text();
});
readme = {
path: readmeFile,
content: fileData,
format: readmeFile.toLowerCase().endsWith('.adoc') ? 'asciidoc' : 'markdown'
};
break; // Found a README, stop searching
} catch (error) {
continue; // Try next file
}
}
return { return {
name: repoData.name, name: repoData.name,
description: repoData.description, description: repoData.description,
url: repoData.web_url, url: repoData.web_url,
defaultBranch, defaultBranch,
branches: [], branches,
commits: [], commits,
files: [], files,
readme,
platform: 'gitlab' platform: 'gitlab'
}; };
} catch (error) { } catch (error) {
@ -290,28 +425,216 @@ async function fetchFromGitLab(owner: string, repo: string, baseUrl: string): Pr
*/ */
async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Promise<Partial<ApiRepoInfo> | null> { async function fetchFromGitea(owner: string, repo: string, baseUrl: string): Promise<Partial<ApiRepoInfo> | null> {
try { try {
// URL-encode owner and repo to handle special characters
const encodedOwner = encodeURIComponent(owner); const encodedOwner = encodeURIComponent(owner);
const encodedRepo = encodeURIComponent(repo); const encodedRepo = encodeURIComponent(repo);
const repoResponse = await fetch(`${baseUrl}/repos/${encodedOwner}/${encodedRepo}`);
// Use proxy endpoint on client-side, direct API on server-side
const repoUrl = getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}`,
baseUrl,
new URLSearchParams()
);
const repoResponse = await fetch(repoUrl);
if (!repoResponse.ok) { if (!repoResponse.ok) {
if (repoResponse.status === 404) { if (repoResponse.status === 404) {
return null; return null;
} }
logger.warn({ status: repoResponse.status, owner, repo }, 'Gitea API error');
return null; return null;
} }
const repoData = await repoResponse.json(); const repoData = await repoResponse.json();
const defaultBranch = repoData.default_branch || 'master'; const defaultBranch = repoData.default_branch || 'master';
const [branchesResponse, commitsResponse] = await Promise.all([
fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/branches`,
baseUrl,
new URLSearchParams()
)).catch(() => null),
fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/commits`,
baseUrl,
new URLSearchParams({ limit: '10' })
)).catch(() => null)
]);
let branchesData: any[] = [];
let commitsData: any[] = [];
if (branchesResponse && branchesResponse.ok) {
branchesData = await branchesResponse.json();
if (!Array.isArray(branchesData)) {
logger.warn({ owner, repo }, 'Gitea branches response is not an array');
branchesData = [];
}
} else {
logger.warn({ status: branchesResponse?.status, owner, repo }, 'Gitea API error for branches');
}
if (commitsResponse && commitsResponse.ok) {
commitsData = await commitsResponse.json();
if (!Array.isArray(commitsData)) {
logger.warn({ owner, repo }, 'Gitea commits response is not an array');
commitsData = [];
}
} else {
logger.warn({ status: commitsResponse?.status, owner, repo }, 'Gitea API error for commits');
}
const branches: ApiBranch[] = branchesData.map((b: any) => {
const commitObj = b.commit || {};
return { return {
name: repoData.name, name: b.name || '',
commit: {
sha: commitObj.id || b.commit?.sha || '',
message: commitObj.message ? commitObj.message.split('\n')[0] : 'No commit message',
author: commitObj.author?.name || commitObj.author_name || 'Unknown',
date: commitObj.timestamp || commitObj.created || new Date().toISOString()
}
};
});
const commits: ApiCommit[] = commitsData.map((c: any) => {
const commitObj = c.commit || {};
return {
sha: c.sha || c.id || '',
message: commitObj.message ? commitObj.message.split('\n')[0] : 'No commit message',
author: commitObj.author?.name || commitObj.author_name || 'Unknown',
date: commitObj.timestamp || commitObj.created || new Date().toISOString()
};
});
// Fetch file tree - Gitea uses /git/trees API endpoint
let files: ApiFile[] = [];
const encodedBranch = encodeURIComponent(defaultBranch);
try {
// Try the git/trees endpoint first (more complete)
const treeResponse = await fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/git/trees/${encodedBranch}`,
baseUrl,
new URLSearchParams({ recursive: '1' })
)).catch(() => null);
if (treeResponse && treeResponse.ok) {
const treeData = await treeResponse.json();
if (treeData.tree && Array.isArray(treeData.tree)) {
files = treeData.tree
.filter((item: any) => item.type === 'blob' || item.type === 'tree')
.map((item: any) => ({
name: item.path.split('/').pop() || item.path,
path: item.path,
type: item.type === 'tree' ? 'dir' : 'file',
size: item.size
}));
}
} else {
// Fallback to contents endpoint (only root directory)
const contentsResponse = await fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/contents`,
baseUrl,
new URLSearchParams({ ref: encodedBranch })
)).catch(() => null);
if (contentsResponse && contentsResponse.ok) {
const contentsData = await contentsResponse.json();
if (Array.isArray(contentsData)) {
files = contentsData.map((item: any) => ({
name: item.name,
path: item.path || item.name,
type: item.type === 'dir' ? 'dir' : 'file',
size: item.size
}));
}
}
}
} catch (error) {
logger.warn({ error, owner, repo }, 'Failed to fetch Gitea file tree');
}
// Try to fetch README (prioritize .adoc over .md)
// First try root directory (most common case)
let readme: { path: string; content: string; format: 'markdown' | 'asciidoc' } | undefined;
const readmeFiles = ['README.adoc', 'README.md', 'README.rst', 'README.txt'];
for (const readmeFile of readmeFiles) {
try {
const encodedReadmeFile = encodeURIComponent(readmeFile);
const fileResponse = await fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/contents/${encodedReadmeFile}`,
baseUrl,
new URLSearchParams({ ref: defaultBranch })
));
if (!fileResponse.ok) throw new Error('Not found');
const fileData = await fileResponse.json();
if (fileData.content) {
// Gitea returns base64 encoded content
const content = atob(fileData.content.replace(/\s/g, ''));
readme = {
path: readmeFile,
content,
format: readmeFile.toLowerCase().endsWith('.adoc') ? 'asciidoc' : 'markdown'
};
break; // Found a README, stop searching
}
} catch (error) {
// Try next file
continue;
}
}
// If not found in root, search the file tree (case-insensitive)
if (!readme && files.length > 0) {
const readmePatterns = [/^readme\.adoc$/i, /^readme\.md$/i, /^readme\.rst$/i, /^readme\.txt$/i, /^readme$/i];
let readmePath: string | null = null;
for (const file of files) {
if (file.type === 'file') {
const fileName = file.name;
for (const pattern of readmePatterns) {
if (pattern.test(fileName)) {
readmePath = file.path;
break;
}
}
if (readmePath) break;
}
}
// If found in tree, fetch it
if (readmePath) {
try {
// URL-encode the file path segments
const encodedReadmePath = readmePath.split('/').map(segment => encodeURIComponent(segment)).join('/');
const fileResponse = await fetch(getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/contents/${encodedReadmePath}`,
baseUrl,
new URLSearchParams({ ref: encodedBranch })
));
if (!fileResponse.ok) throw new Error('Not found');
const fileData = await fileResponse.json();
if (fileData.content) {
// Gitea returns base64 encoded content
const content = atob(fileData.content.replace(/\s/g, ''));
const format = readmePath.toLowerCase().endsWith('.adoc') ? 'asciidoc' : 'markdown';
readme = {
path: readmePath,
content,
format
};
}
} catch (error) {
logger.warn({ error, readmePath, owner, repo }, 'Failed to fetch README from tree path');
}
}
}
return {
name: repoData.name || repoData.full_name?.split('/').pop() || repo,
description: repoData.description, description: repoData.description,
url: repoData.html_url || repoData.clone_url, url: repoData.html_url || repoData.clone_url || `${baseUrl.replace('/api/v1', '')}/${owner}/${repo}`,
defaultBranch, defaultBranch: repoData.default_branch || defaultBranch,
branches: [], branches,
commits: [], commits,
files: [], files,
readme,
platform: 'gitea' platform: 'gitea'
}; };
} catch (error) { } catch (error) {

134
src/lib/utils/api-repo-helper.ts

@ -3,10 +3,38 @@
* Used by endpoints to fetch repo metadata without cloning * Used by endpoints to fetch repo metadata without cloning
*/ */
import { fetchRepoMetadata, extractGitUrls } from '../services/git/api-repo-fetcher.js'; import { fetchRepoMetadata, extractGitUrls, parseGitUrl } from '../services/git/api-repo-fetcher.js';
import type { NostrEvent } from '../types/nostr.js'; import type { NostrEvent } from '../types/nostr.js';
import logger from '../services/logger.js'; import logger from '../services/logger.js';
/**
* Check if we're running on the server (Node.js) or client (browser)
*/
function isServerSide(): boolean {
return typeof process !== 'undefined' && process.versions?.node !== undefined;
}
/**
* Get the base URL for API requests
* On server-side, call APIs directly. On client-side, use proxy to avoid CORS.
*/
function getApiBaseUrl(apiPath: string, baseUrl: string, searchParams: URLSearchParams): string {
if (isServerSide()) {
// Server-side: call API directly
const cleanBaseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
const cleanApiPath = apiPath.startsWith('/') ? apiPath : `/${apiPath}`;
const queryString = searchParams.toString();
return `${cleanBaseUrl}${cleanApiPath}${queryString ? `?${queryString}` : ''}`;
} else {
// Client-side: use proxy to avoid CORS
const queryString = new URLSearchParams({
baseUrl,
...Object.fromEntries(searchParams.entries())
}).toString();
return `/api/gitea-proxy/${apiPath}?${queryString}`;
}
}
/** /**
* Try to fetch repository metadata via API from clone URLs * Try to fetch repository metadata via API from clone URLs
* Returns null if API fetching fails or no clone URLs available * Returns null if API fetching fails or no clone URLs available
@ -54,3 +82,107 @@ export async function tryApiFetch(
return null; return null;
} }
} }
/**
* Try to fetch a single file via API from clone URLs
* Returns null if API fetching fails or no clone URLs available
*/
export async function tryApiFetchFile(
announcementEvent: NostrEvent,
npub: string,
repoName: string,
filePath: string,
ref: string = 'main'
): Promise<{ content: string; encoding: string } | null> {
try {
const cloneUrls = extractGitUrls(announcementEvent);
if (cloneUrls.length === 0) {
logger.debug({ npub, repoName, filePath }, 'No clone URLs found for API file fetch');
return null;
}
// Try each clone URL until one works
for (const url of cloneUrls) {
try {
const parsed = parseGitUrl(url);
if (!parsed) {
continue;
}
const { platform, owner, repo, baseUrl } = parsed;
const encodedOwner = encodeURIComponent(owner);
const encodedRepo = encodeURIComponent(repo);
const encodedRef = encodeURIComponent(ref);
// URL-encode the file path segments
const encodedFilePath = filePath.split('/').map(segment => encodeURIComponent(segment)).join('/');
let fileUrl: string;
if (platform === 'gitea') {
// Gitea: /api/v1/repos/{owner}/{repo}/contents/{path}?ref={ref}
fileUrl = getApiBaseUrl(
`repos/${encodedOwner}/${encodedRepo}/contents/${encodedFilePath}`,
baseUrl,
new URLSearchParams({ ref: encodedRef })
);
} else if (platform === 'gitlab') {
// GitLab: /api/v4/projects/{owner}%2F{repo}/repository/files/{path}/raw?ref={ref}
const projectPath = encodeURIComponent(`${owner}/${repo}`);
fileUrl = getApiBaseUrl(
`projects/${projectPath}/repository/files/${encodedFilePath}/raw`,
baseUrl,
new URLSearchParams({ ref: encodedRef })
);
} else if (platform === 'github') {
// GitHub: /repos/{owner}/{repo}/contents/{path}?ref={ref}
fileUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${encodedFilePath}?ref=${encodedRef}`;
} else {
// Unsupported platform
continue;
}
const response = await fetch(fileUrl);
if (!response.ok) {
if (response.status === 404) {
// File not found, try next URL
continue;
}
logger.debug({ status: response.status, url, filePath }, 'API file fetch failed');
continue;
}
const fileData = await response.json();
// Handle different response formats
if (platform === 'gitea' || platform === 'github') {
// Gitea and GitHub return base64 encoded content
if (fileData.content) {
const content = atob(fileData.content.replace(/\s/g, ''));
return {
content,
encoding: 'base64'
};
}
} else if (platform === 'gitlab') {
// GitLab raw endpoint returns plain text
const content = await response.text();
return {
content,
encoding: 'text'
};
}
} catch (err) {
logger.debug({ error: err, url, filePath }, 'API file fetch failed for URL, trying next');
continue;
}
}
return null;
} catch (err) {
logger.warn({ error: err, npub, repoName, filePath }, 'Error attempting API file fetch');
return null;
}
}

261
src/routes/api/gitea-proxy/[...path]/+server.ts

@ -0,0 +1,261 @@
/**
* Proxy endpoint for Git hosting API requests (Gitea, GitLab, etc.) to avoid CORS issues
* Usage: /api/gitea-proxy/{apiPath}?baseUrl={baseUrl}
* Examples:
* - Gitea: /api/gitea-proxy/repos/owner/repo/contents/README.md?baseUrl=https://gitea.example.com/api/v1&ref=master
* - GitLab: /api/gitea-proxy/projects/owner%2Frepo/repository/files/path/to/file/raw?baseUrl=https://gitlab.com/api/v4&ref=master
*/
import type { RequestHandler } from './$types';
const CORS_HEADERS = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type'
} as const;
function createErrorResponse(message: string, status: number): Response {
return new Response(JSON.stringify({ error: message }), {
status,
headers: {
'Content-Type': 'application/json',
...CORS_HEADERS
}
});
}
function buildTargetUrl(baseUrl: string, apiPath: string, searchParams: URLSearchParams): string {
// Ensure baseUrl doesn't have a trailing slash
const cleanBaseUrl = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl;
// Handle GitLab API paths
// GitLab format: projects/{owner}/{repo}/repository/files/{file_path}/raw
// - Project path (owner/repo) MUST be encoded as owner%2Frepo
// - File path MUST be URL-encoded with %2F for slashes (GitLab API requirement)
let processedPath = apiPath;
if (apiPath.startsWith('projects/')) {
const parts = apiPath.split('/');
if (parts.length >= 2) {
// Determine if project path is already encoded (contains %2F) or split across parts
let encodedProjectPath: string;
if (parts[1].includes('%2F') || parts[1].includes('%2f')) {
// Project path is already encoded in parts[1] (e.g., "owner%2Frepo")
encodedProjectPath = parts[1];
// Remaining parts start from index 2
const remainingParts = parts.slice(2);
// Check if this is a file path request
const filesIndex = remainingParts.indexOf('files');
if (filesIndex !== -1 && filesIndex < remainingParts.length - 1) {
// This is a file path: projects/{encodedProjectPath}/repository/files/{file_path}/raw
// Extract file path segments (everything between 'files' and 'raw')
const filePathParts = remainingParts.slice(filesIndex + 1, remainingParts.length - 1);
// Decode any already-encoded segments first
const decodedSegments = filePathParts.map(segment => {
try {
return decodeURIComponent(segment);
} catch {
return segment;
}
});
// Join with slashes to get the actual file path
// GitLab API accepts file paths with actual slashes / in the URL path
// Only encode individual segments if they contain special characters, but keep slashes as /
const filePath = decodedSegments
.map(segment => {
// Only encode if segment contains characters that need encoding (but not slashes)
const needsEncoding = /[^a-zA-Z0-9._/-]/.test(segment);
return needsEncoding ? encodeURIComponent(segment) : segment;
})
.join('/'); // Use actual slashes, NOT %2F
// Reconstruct: projects/{encodedProjectPath}/repository/files/{filePath}/raw
// Project path uses %2F (required), file path uses actual / (no %2F)
const beforeFiles = `projects/${encodedProjectPath}/repository/files`;
const lastPart = remainingParts[remainingParts.length - 1]; // 'raw'
processedPath = `${beforeFiles}/${filePath}/${lastPart}`;
} else {
// Not a file path, just reconstruct with encoded project path
processedPath = `projects/${encodedProjectPath}${remainingParts.length > 0 ? '/' + remainingParts.join('/') : ''}`;
}
} else if (parts.length >= 3) {
// Project path is split: parts[1] = owner, parts[2] = repo
const projectPath = `${parts[1]}/${parts[2]}`;
encodedProjectPath = encodeURIComponent(projectPath); // Creates owner%2Frepo
// Remaining parts start from index 3
const remainingParts = parts.slice(3);
// Check if this is a file path request
const filesIndex = remainingParts.indexOf('files');
if (filesIndex !== -1 && filesIndex < remainingParts.length - 1) {
// This is a file path: projects/{owner}/{repo}/repository/files/{file_path}/raw
// Extract file path segments (everything between 'files' and 'raw')
const filePathParts = remainingParts.slice(filesIndex + 1, remainingParts.length - 1);
// Decode any already-encoded segments first
const decodedSegments = filePathParts.map(segment => {
try {
return decodeURIComponent(segment);
} catch {
return segment;
}
});
// Join with slashes to get the actual file path
// GitLab API accepts file paths with actual slashes / in the URL path
// Only encode individual segments if they contain special characters, but keep slashes as /
const filePath = decodedSegments
.map(segment => {
// Only encode if segment contains characters that need encoding (but not slashes)
const needsEncoding = /[^a-zA-Z0-9._/-]/.test(segment);
return needsEncoding ? encodeURIComponent(segment) : segment;
})
.join('/'); // Use actual slashes, NOT %2F
// Reconstruct: projects/{encodedProjectPath}/repository/files/{filePath}/raw
// Project path uses %2F (required), file path uses actual / (no %2F)
const beforeFiles = `projects/${encodedProjectPath}/repository/files`;
const lastPart = remainingParts[remainingParts.length - 1]; // 'raw'
processedPath = `${beforeFiles}/${filePath}/${lastPart}`;
} else {
// Not a file path, just reconstruct with encoded project path
processedPath = `projects/${encodedProjectPath}${remainingParts.length > 0 ? '/' + remainingParts.join('/') : ''}`;
}
}
}
}
// Ensure processedPath starts with a slash
const cleanApiPath = processedPath.startsWith('/') ? processedPath : `/${processedPath}`;
// Construct query string (excluding baseUrl)
const queryParts: string[] = [];
for (const [key, value] of searchParams.entries()) {
if (key !== 'baseUrl') {
queryParts.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);
}
}
const queryString = queryParts.length > 0 ? `?${queryParts.join('&')}` : '';
// Construct the full URL as a string
// We must construct as string to preserve %2F encoding
// Using URL constructor would decode %2F, which we don't want
const fullUrl = `${cleanBaseUrl}${cleanApiPath}${queryString}`;
return fullUrl;
}
export const GET: RequestHandler = async ({ params, url }) => {
try {
// Handle special endpoints
const apiPath = Array.isArray(params.path) ? params.path.join('/') : params.path;
// Special endpoint: raw-file (for direct file fetching)
if (apiPath === 'raw-file') {
const targetUrl = url.searchParams.get('url');
if (!targetUrl) {
return createErrorResponse('Missing url query parameter for raw-file', 400);
}
const response = await fetch(targetUrl, {
method: 'GET',
headers: {
'Accept': 'text/plain, text/html, */*',
'User-Agent': 'GitRepublic/1.0'
}
});
if (!response.ok) {
return createErrorResponse(`Failed to fetch file: ${response.status}`, response.status);
}
const contentType = response.headers.get('content-type') || 'text/plain';
const body = await response.text();
return new Response(body, {
status: response.status,
statusText: response.statusText,
headers: {
'Content-Type': contentType,
...CORS_HEADERS
}
});
}
// Standard Gitea/GitLab API proxy handling
const baseUrl = url.searchParams.get('baseUrl');
if (!baseUrl) {
return createErrorResponse('Missing baseUrl query parameter', 400);
}
if (!apiPath) {
return createErrorResponse('Missing API path', 400);
}
const targetUrl = buildTargetUrl(baseUrl, apiPath, url.searchParams);
// Use fetch with the URL string directly
// fetch() will handle the URL correctly, preserving %2F encoding
const response = await fetch(targetUrl, {
method: 'GET',
headers: {
'Accept': 'application/json',
'User-Agent': 'GitRepublic/1.0'
}
});
const contentType = response.headers.get('content-type') || 'application/json';
const body = await response.text();
// Log error responses for debugging
if (!response.ok) {
// Skip logging 404s for README file requests - these are expected when trying multiple file extensions
const isReadmeRequest = apiPath.includes('contents') &&
(apiPath.toLowerCase().includes('readme') || apiPath.toLowerCase().includes('readme'));
if (response.status === 404 && isReadmeRequest) {
// Silently skip - expected for README attempts
} else if (response.status === 404) {
// Log other 404s with context
console.warn('[Gitea Proxy] 404 Not Found:', {
apiPath,
targetUrl,
baseUrl,
body: body.substring(0, 200)
});
} else {
// Log non-404 errors
console.error('[Gitea Proxy] Error response:', response.status, response.statusText);
console.error('[Gitea Proxy] Request URL:', targetUrl);
console.error('[Gitea Proxy] Response body:', body.substring(0, 500));
}
}
return new Response(body, {
status: response.status,
statusText: response.statusText,
headers: {
'Content-Type': contentType,
...CORS_HEADERS
}
});
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error';
console.error('Git hosting proxy error:', message);
return createErrorResponse(message, 500);
}
};
export const OPTIONS: RequestHandler = async () => {
return new Response(null, {
status: 204,
headers: CORS_HEADERS
});
};

12
src/routes/api/repos/[npub]/[repo]/file/+server.ts

@ -61,9 +61,15 @@ export const GET: RequestHandler = async ({ params, url, request }: { params: {
if (events.length > 0) { if (events.length > 0) {
// Try API-based fetching first (no cloning) // Try API-based fetching first (no cloning)
// For file endpoint, we can't easily fetch individual files via API without cloning const { tryApiFetchFile } = await import('$lib/utils/api-repo-helper.js');
// So we return 404 with helpful message const fileContent = await tryApiFetchFile(events[0], npub, repo, filePath, ref);
return error(404, 'Repository is not cloned locally. To view files, privileged users can clone this repository using the "Clone to Server" button.');
if (fileContent) {
return json(fileContent);
}
// API fetch failed - repo is not cloned and API fetch didn't work
return error(404, 'Repository is not cloned locally and could not fetch file via API. Privileged users can clone this repository using the "Clone to Server" button.');
} else { } else {
return error(404, 'Repository announcement not found in Nostr'); return error(404, 'Repository announcement not found in Nostr');
} }

51
src/routes/repos/[npub]/[repo]/+page.svelte

@ -539,6 +539,20 @@
} }
} }
// Helper function to count all replies recursively (including nested ones)
function countAllReplies(comments: Array<{ replies?: Array<any> }> | undefined): number {
if (!comments || comments.length === 0) {
return 0;
}
let count = comments.length;
for (const comment of comments) {
if (comment.replies && comment.replies.length > 0) {
count += countAllReplies(comment.replies);
}
}
return count;
}
async function checkCloneStatus() { async function checkCloneStatus() {
if (checkingCloneStatus || isRepoCloned !== null) return; if (checkingCloneStatus || isRepoCloned !== null) return;
@ -1265,8 +1279,12 @@
return; return;
} }
// Update currentBranch to first available branch if 'main' doesn't exist // Update currentBranch to first available branch if 'main' doesn't exist
if (branches.length > 0 && !branches.includes(currentBranch)) { if (branches.length > 0) {
currentBranch = branches[0]; // Branches can be an array of objects with .name property or array of strings
const branchNames = branches.map((b: any) => typeof b === 'string' ? b : b.name);
if (!branchNames.includes(currentBranch)) {
currentBranch = branchNames[0];
}
} }
await loadFiles(); await loadFiles();
await checkAuth(); await checkAuth();
@ -1529,8 +1547,12 @@
}); });
if (response.ok) { if (response.ok) {
branches = await response.json(); branches = await response.json();
if (branches.length > 0 && !branches.includes(currentBranch)) { if (branches.length > 0) {
currentBranch = branches[0]; // Branches can be an array of objects with .name property or array of strings
const branchNames = branches.map((b: any) => typeof b === 'string' ? b : b.name);
if (!branchNames.includes(currentBranch)) {
currentBranch = branchNames[0];
}
} }
} else if (response.status === 404) { } else if (response.status === 404) {
// Repository not provisioned yet - set error message and flag // Repository not provisioned yet - set error message and flag
@ -1585,7 +1607,13 @@
loading = true; loading = true;
error = null; error = null;
try { try {
const url = `/api/repos/${npub}/${repo}/file?path=${encodeURIComponent(filePath)}&ref=${currentBranch}`; // Ensure currentBranch is a string (branch name), not an object
const branchName = typeof currentBranch === 'string'
? currentBranch
: (typeof currentBranch === 'object' && currentBranch !== null && 'name' in currentBranch
? (currentBranch as { name: string }).name
: 'main');
const url = `/api/repos/${npub}/${repo}/file?path=${encodeURIComponent(filePath)}&ref=${branchName}`;
const response = await fetch(url); const response = await fetch(url);
if (!response.ok) { if (!response.ok) {
@ -2877,7 +2905,8 @@
{#if discussion.type === 'thread'} {#if discussion.type === 'thread'}
<span class="discussion-type">Thread</span> <span class="discussion-type">Thread</span>
{#if hasComments} {#if hasComments}
<span class="comment-count">{discussion.comments!.length} {discussion.comments!.length === 1 ? 'reply' : 'replies'}</span> {@const totalReplies = countAllReplies(discussion.comments)}
<span class="comment-count">{totalReplies} {totalReplies === 1 ? 'reply' : 'replies'}</span>
{/if} {/if}
{:else} {:else}
<span class="discussion-type">Comments</span> <span class="discussion-type">Comments</span>
@ -2904,8 +2933,9 @@
</div> </div>
{/if} {/if}
{#if discussion.type === 'thread' && isExpanded && hasComments} {#if discussion.type === 'thread' && isExpanded && hasComments}
{@const totalReplies = countAllReplies(discussion.comments)}
<div class="comments-section"> <div class="comments-section">
<h4>Replies ({discussion.comments!.length})</h4> <h4>Replies ({totalReplies})</h4>
{#each discussion.comments! as comment} {#each discussion.comments! as comment}
<div class="comment-item"> <div class="comment-item">
<div class="comment-meta"> <div class="comment-meta">
@ -2988,8 +3018,9 @@
{/each} {/each}
</div> </div>
{:else if discussion.type === 'comments' && hasComments} {:else if discussion.type === 'comments' && hasComments}
{@const totalReplies = countAllReplies(discussion.comments)}
<div class="comments-section"> <div class="comments-section">
<h4>Comments ({discussion.comments!.length})</h4> <h4>Comments ({totalReplies})</h4>
{#each discussion.comments! as comment} {#each discussion.comments! as comment}
<div class="comment-item"> <div class="comment-item">
<div class="comment-meta"> <div class="comment-meta">
@ -4768,8 +4799,10 @@
flex: 1; flex: 1;
display: flex; display: flex;
flex-direction: column; flex-direction: column;
overflow: hidden; overflow-y: auto;
overflow-x: hidden;
background: var(--card-bg); background: var(--card-bg);
min-height: 0; /* Allows flex child to shrink below content size */
} }
.discussions-header { .discussions-header {

Loading…
Cancel
Save