You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

758 lines
22 KiB

package server
import (
"context"
"encoding/json"
"fmt"
"net/http"
"path/filepath"
"strings"
"time"
gonostr "github.com/nbd-wtf/go-nostr"
"gitcitadel-online/internal/cache"
"gitcitadel-online/internal/generator"
"gitcitadel-online/internal/logger"
"gitcitadel-online/internal/nostr"
)
// setupRoutes sets up all HTTP routes
func (s *Server) setupRoutes(mux *http.ServeMux) {
// Static files
mux.HandleFunc("/static/", s.handleStatic)
// Media cache
mux.HandleFunc("/cache/media/", s.handleMediaCache)
// Favicon
mux.HandleFunc("/favicon.ico", s.handleFavicon)
// Main routes
mux.HandleFunc("/", s.handleLanding)
mux.HandleFunc("/wiki/", s.handleWiki)
mux.HandleFunc("/blog", s.handleBlog)
mux.HandleFunc("/articles", s.handleArticles)
mux.HandleFunc("/ebooks", s.handleEBooks)
mux.HandleFunc("/contact", s.handleContact)
mux.HandleFunc("/feed", s.handleFeed)
mux.HandleFunc("/events", s.handleEvents)
// Health and metrics
mux.HandleFunc("/health", s.handleHealth)
mux.HandleFunc("/metrics", s.handleMetrics)
// SEO
mux.HandleFunc("/sitemap.xml", s.handleSitemap)
mux.HandleFunc("/robots.txt", s.handleRobots)
// API endpoints
mux.HandleFunc("/api/contact", s.handleContactAPI)
}
// handleLanding handles the landing page
func (s *Server) handleLanding(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
s.handle404(w, r)
return
}
page, exists := s.cache.Get("/")
if !exists {
http.Error(w, "Page not ready", http.StatusServiceUnavailable)
return
}
s.servePage(w, r, page)
}
// handleWiki handles wiki article pages and wiki index
func (s *Server) handleWiki(w http.ResponseWriter, r *http.Request) {
path := r.URL.Path
// Check for event ID first (fastest lookup)
eventID := r.URL.Query().Get("e")
if eventID != "" {
// Fetch event by ID (fastest method)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
filter := gonostr.Filter{
IDs: []string{eventID},
}
event, err := s.nostrClient.FetchEvent(ctx, filter)
if err == nil && event != nil {
// Parse as wiki event
wikiEvent, err := nostr.ParseWikiEvent(event, event.Kind)
if err == nil {
// Get wiki pages for navigation (empty for now, could be fetched)
wikiPages := []generator.WikiPageInfo{}
// Generate the wiki page
html, err := s.htmlGenerator.GenerateWikiPage(wikiEvent, wikiPages, []generator.FeedItemInfo{})
if err == nil {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
}
}
}
// Fallback: Check for hash-based event reference in query parameter
// Format: kind:pubkey:dtag (e.g., "30818:dd664d5e...:nkbip-04")
ref := r.URL.Query().Get("ref")
if ref == "" {
// Try to get from fragment (though browsers don't send this, JavaScript can convert it)
ref = r.URL.Query().Get("k") + ":" + r.URL.Query().Get("a") + ":" + r.URL.Query().Get("d")
if strings.HasPrefix(ref, ":") {
ref = ""
}
}
// If we have a reference, fetch and render that specific event
if ref != "" {
parts := strings.Split(ref, ":")
if len(parts) == 3 {
var kind int
if _, err := fmt.Sscanf(parts[0], "%d", &kind); err == nil {
pubkey := parts[1]
dTag := parts[2]
// Fetch the specific wiki event
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
filter := gonostr.Filter{
Kinds: []int{kind},
Authors: []string{pubkey},
Tags: map[string][]string{
"d": {dTag},
},
Limit: 1,
}
events, err := s.nostrClient.FetchEvents(ctx, filter)
if err == nil && len(events) > 0 {
// Parse as wiki event
wikiEvent, err := nostr.ParseWikiEvent(events[0], kind)
if err == nil {
// Get wiki pages for navigation (empty for now, could be fetched)
wikiPages := []generator.WikiPageInfo{}
// Generate the wiki page
html, err := s.htmlGenerator.GenerateWikiPage(wikiEvent, wikiPages, []generator.FeedItemInfo{})
if err == nil {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
}
}
}
}
}
// Handle wiki index page (/wiki or /wiki/)
if path == "/wiki" || path == "/wiki/" {
page, exists := s.cache.Get("/wiki")
if !exists {
http.Error(w, "Page not ready", http.StatusServiceUnavailable)
return
}
s.servePage(w, r, page)
return
}
// Handle individual wiki pages (/wiki/{dTag})
page, exists := s.cache.Get(path)
if !exists {
s.handle404(w, r)
return
}
s.servePage(w, r, page)
}
// handleCachedPage handles pages that are served from cache
func (s *Server) handleCachedPage(path string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
page, exists := s.cache.Get(path)
if !exists {
http.Error(w, "Page not ready", http.StatusServiceUnavailable)
return
}
s.servePage(w, r, page)
}
}
// handleBlog handles the blog page
func (s *Server) handleBlog(w http.ResponseWriter, r *http.Request) {
s.handleCachedPage("/blog")(w, r)
}
// handleArticles handles the articles page
func (s *Server) handleArticles(w http.ResponseWriter, r *http.Request) {
s.handleCachedPage("/articles")(w, r)
}
// handleEBooks handles the e-books listing page
func (s *Server) handleEBooks(w http.ResponseWriter, r *http.Request) {
s.handleCachedPage("/ebooks")(w, r)
}
// handleFeed handles the Feed page
func (s *Server) handleFeed(w http.ResponseWriter, r *http.Request) {
s.handleCachedPage("/feed")(w, r)
}
// handleEvents handles the /events?d=... page to show events with a specific d-tag
func (s *Server) handleEvents(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodGet {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// Get d-tag from query parameter
dTag := r.URL.Query().Get("d")
if dTag == "" {
s.handle404(w, r)
return
}
// Query events with this d-tag across multiple kinds
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
// Search across wiki, blog, and longform kinds
kinds := []int{nostr.KindWiki, nostr.KindBlog, nostr.KindLongform}
var allEvents []*gonostr.Event
for _, kind := range kinds {
filter := gonostr.Filter{
Kinds: []int{kind},
Tags: map[string][]string{
"d": {dTag},
},
Limit: 100, // Get up to 100 events per kind
}
events, err := s.nostrClient.FetchEvents(ctx, filter)
if err != nil {
logger.WithFields(map[string]interface{}{
"kind": kind,
"dtag": dTag,
"error": err,
}).Warn("Failed to fetch events for d-tag")
continue
}
allEvents = append(allEvents, events...)
}
if len(allEvents) == 0 {
// No events found - show 404 or empty page
html, err := s.htmlGenerator.GenerateErrorPage(http.StatusNotFound, []generator.FeedItemInfo{})
if err != nil {
http.Error(w, "Not found", http.StatusNotFound)
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.WriteHeader(http.StatusNotFound)
w.Write([]byte(html))
return
}
// Convert events to EventCardInfo
eventCards := make([]generator.EventCardInfo, 0, len(allEvents))
for _, event := range allEvents {
// Extract title, summary, image from tags
title := dTag
summary := ""
image := ""
for _, tag := range event.Tags {
if len(tag) > 0 && len(tag) > 1 {
switch tag[0] {
case "title":
title = tag[1]
case "summary":
summary = tag[1]
case "image":
image = tag[1]
}
}
}
// Build URL based on kind:pubkey:dtag format
var url string
switch event.Kind {
case nostr.KindBlog:
// Blog uses hash format with full identifier
url = fmt.Sprintf("/blog#%d:%s:%s", event.Kind, event.PubKey, dTag)
case nostr.KindLongform:
// Articles use hash format with full identifier
url = fmt.Sprintf("/articles#%d:%s:%s", event.Kind, event.PubKey, dTag)
case nostr.KindWiki:
// Wiki uses query parameter format with event ID for faster lookup
// Fallback to ref format if needed
url = fmt.Sprintf("/wiki?e=%s&ref=%d:%s:%s", event.ID, event.Kind, event.PubKey, dTag)
default:
// Fallback
url = fmt.Sprintf("/events?d=%s", dTag)
}
// Format time
createdTime := time.Unix(int64(event.CreatedAt), 0)
timeStr := createdTime.Format("Jan 2, 2006")
timeISO := createdTime.Format(time.RFC3339)
eventCards = append(eventCards, generator.EventCardInfo{
EventID: event.ID,
Title: title,
DTag: dTag,
Author: event.PubKey,
Summary: summary,
Image: image,
Kind: event.Kind,
URL: url,
CreatedAt: int64(event.CreatedAt),
Time: timeStr,
TimeISO: timeISO,
})
}
// Generate HTML page using the HTML generator
html, err := s.htmlGenerator.GenerateEventsPage(dTag, eventCards, []generator.FeedItemInfo{})
if err != nil {
logger.WithFields(map[string]interface{}{
"dtag": dTag,
"error": err,
}).Error("Failed to generate events page")
http.Error(w, "Internal server error", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
}
// handleContact handles the contact form (GET and POST)
func (s *Server) handleContact(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodGet {
// Fetch repo announcement for embedding in page
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
var repoAnnouncement *nostr.RepoAnnouncement
var err error
if s.repoAnnouncement != "" {
repoAnnouncement, err = s.issueService.FetchRepoAnnouncement(ctx, s.repoAnnouncement)
if err != nil {
logger.Warnf("Failed to fetch repo announcement for contact page: %v", err)
// Continue without repo announcement - form will show error
}
}
// Fetch profile for npub
var profile *nostr.Profile
npub := "npub1s3ht77dq4zqnya8vjun5jp3p44pr794ru36d0ltxu65chljw8xjqd975wz"
if s.nostrClient != nil {
profile, err = s.nostrClient.FetchProfile(ctx, npub)
if err != nil {
logger.Warnf("Failed to fetch profile for contact page: %v", err)
// Continue without profile - not critical
}
}
// Render the contact form (feed items not needed - only on landing page)
html, err := s.htmlGenerator.GenerateContactPage(false, "", "", nil, repoAnnouncement, []generator.FeedItemInfo{}, profile)
if err != nil {
http.Error(w, "Failed to generate contact page", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
if r.Method != http.MethodPost {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// Parse form data
if err := r.ParseForm(); err != nil {
html, _ := s.htmlGenerator.GenerateContactPage(false, "Failed to parse form data", "", nil, nil, []generator.FeedItemInfo{}, nil)
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
subject := strings.TrimSpace(r.FormValue("subject"))
content := strings.TrimSpace(r.FormValue("content"))
labelsStr := strings.TrimSpace(r.FormValue("labels"))
// Validate required fields
if subject == "" || content == "" {
formData := map[string]string{
"subject": subject,
"content": content,
"labels": labelsStr,
}
html, _ := s.htmlGenerator.GenerateContactPage(false, "Subject and message are required", "", formData, nil, []generator.FeedItemInfo{}, nil)
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
// Parse labels
var labels []string
if labelsStr != "" {
labelParts := strings.Split(labelsStr, ",")
for _, label := range labelParts {
label = strings.TrimSpace(label)
if label != "" {
labels = append(labels, label)
}
}
}
// Fetch repo announcement
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
repoAnnouncement, err := s.issueService.FetchRepoAnnouncement(ctx, s.repoAnnouncement)
if err != nil {
logger.Errorf("Failed to fetch repo announcement: %v", err)
formData := map[string]string{
"subject": subject,
"content": content,
"labels": labelsStr,
}
html, _ := s.htmlGenerator.GenerateContactPage(false, "Failed to connect to repository. Please try again later.", "", formData, nil, []generator.FeedItemInfo{}, nil)
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
// Create issue request
issueReq := &nostr.IssueRequest{
Subject: subject,
Content: content,
Labels: labels,
}
// Publish issue (using anonymous key - server generates random key)
eventID, err := s.issueService.PublishIssue(ctx, repoAnnouncement, issueReq, "")
if err != nil {
logger.Errorf("Failed to publish issue: %v", err)
formData := map[string]string{
"subject": subject,
"content": content,
"labels": labelsStr,
}
html, _ := s.htmlGenerator.GenerateContactPage(false, "Failed to submit your message. Please try again later.", "", formData, nil, []generator.FeedItemInfo{}, nil)
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
return
}
// Success - render success page
html, err := s.htmlGenerator.GenerateContactPage(true, "", eventID, nil, repoAnnouncement, []generator.FeedItemInfo{}, nil)
if err != nil {
http.Error(w, "Failed to generate success page", http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
}
// handleContactAPI handles API requests for contact form with browser-signed events
func (s *Server) handleContactAPI(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
return
}
// Parse JSON request
var req struct {
Event *gonostr.Event `json:"event"`
AdditionalRelays []string `json:"additionalRelays,omitempty"`
}
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, fmt.Sprintf("Invalid JSON: %v", err), http.StatusBadRequest)
return
}
if req.Event == nil {
http.Error(w, "Event is required", http.StatusBadRequest)
return
}
// Validate event kind (should be kind 1621 for issues per NIP-34)
if req.Event.Kind != 1621 {
http.Error(w, fmt.Sprintf("Invalid event kind: expected 1621, got %d", req.Event.Kind), http.StatusBadRequest)
return
}
// Verify the event signature
valid, err := req.Event.CheckSignature()
if err != nil {
http.Error(w, fmt.Sprintf("Failed to check signature: %v", err), http.StatusBadRequest)
return
}
if !valid {
http.Error(w, "Invalid event signature", http.StatusBadRequest)
return
}
// Publish the signed event to contact relays
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
// Get contact relays
contactRelays := s.nostrClient.GetContactRelays()
// Combine contact relays with user's outbox relays (if provided)
allRelays := make(map[string]bool)
for _, relay := range contactRelays {
allRelays[relay] = true
}
// Add user's outbox relays (from their kind 10002 relay list)
for _, relay := range req.AdditionalRelays {
if relay != "" {
allRelays[relay] = true
}
}
// Convert map to slice
relaysToPublish := make([]string, 0, len(allRelays))
for relay := range allRelays {
relaysToPublish = append(relaysToPublish, relay)
}
// Publish to all relays (contact relays + user outbox relays)
var lastErr error
var published bool
for _, relayURL := range relaysToPublish {
relay, err := s.nostrClient.ConnectToRelay(ctx, relayURL)
if err != nil {
logger.WithFields(map[string]interface{}{
"relay": relayURL,
"error": err,
}).Warn("Failed to connect to contact relay")
lastErr = err
continue
}
err = relay.Publish(ctx, *req.Event)
// Note: SimplePool manages connections, but we close here for explicit cleanup
relay.Close()
if err != nil {
logger.WithFields(map[string]interface{}{
"relay": relayURL,
"error": err,
}).Warn("Failed to publish to contact relay")
lastErr = err
} else {
published = true
logger.WithFields(map[string]interface{}{
"relay": relayURL,
"event_id": req.Event.ID,
}).Info("Published contact event to relay")
}
}
if !published {
logger.Errorf("Failed to publish contact event to any relay: %v", lastErr)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
fmt.Fprintf(w, `{"error": "Failed to publish to any relay: %s"}`, lastErr.Error())
return
}
// Return success response
w.Header().Set("Content-Type", "application/json")
fmt.Fprintf(w, `{"success": true, "event_id": "%s"}`, req.Event.ID)
}
// handleStatic serves static files
func (s *Server) handleStatic(w http.ResponseWriter, r *http.Request) {
// Serve static files from the static directory
http.StripPrefix("/static/", http.FileServer(http.Dir("./static"))).ServeHTTP(w, r)
}
// handleFavicon serves the favicon
func (s *Server) handleFavicon(w http.ResponseWriter, r *http.Request) {
// Serve the SVG icon as favicon
http.ServeFile(w, r, "./static/GitCitadel_Icon_Black.svg")
}
// handleMediaCache serves cached media files
func (s *Server) handleMediaCache(w http.ResponseWriter, r *http.Request) {
if s.mediaCache == nil {
http.Error(w, "Media cache not available", http.StatusServiceUnavailable)
return
}
// Extract filename from path
filename := r.URL.Path[len("/cache/media/"):]
if filename == "" {
http.Error(w, "Invalid path", http.StatusBadRequest)
return
}
// Serve file from cache directory
cachePath := filepath.Join(s.mediaCache.GetCacheDir(), filename)
http.ServeFile(w, r, cachePath)
}
// handleHealth handles health check requests
func (s *Server) handleHealth(w http.ResponseWriter, r *http.Request) {
// Check if cache has pages
if s.cache.Size() == 0 {
w.WriteHeader(http.StatusServiceUnavailable)
w.Write([]byte("Not ready - cache empty"))
return
}
// Check relay connectivity with timeout
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
if err := s.nostrClient.HealthCheck(ctx, 5*time.Second); err != nil {
logger.WithField("error", err).Warn("Health check: relay connectivity check failed")
w.WriteHeader(http.StatusServiceUnavailable)
w.Write([]byte("Not ready - relay connectivity check failed"))
return
}
w.WriteHeader(http.StatusOK)
w.Write([]byte("OK"))
}
// handleMetrics handles metrics requests
func (s *Server) handleMetrics(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
fmt.Fprintf(w, "cache_size %d\n", s.cache.Size())
fmt.Fprintf(w, "feed_items %d\n", len(s.feedCache.Get()))
}
// handleSitemap handles sitemap requests
func (s *Server) handleSitemap(w http.ResponseWriter, r *http.Request) {
// Get all cached page paths
paths := s.cache.GetAllPaths()
if len(paths) == 0 {
w.Header().Set("Content-Type", "application/xml")
w.Write([]byte(`<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
</urlset>`))
return
}
// Convert cached pages to sitemap URLs
sitemapURLs := make([]generator.SitemapURL, 0, len(paths))
for _, path := range paths {
page, exists := s.cache.Get(path)
if !exists {
continue
}
// Determine priority and change frequency based on path
priority := 0.5
changeFreq := "daily"
if path == "/" {
priority = 1.0
changeFreq = "hourly"
} else if path == "/wiki" || path == "/blog" || path == "/articles" || path == "/ebooks" {
priority = 0.8
changeFreq = "daily"
} else if strings.HasPrefix(path, "/wiki/") {
priority = 0.7
changeFreq = "weekly"
}
sitemapURLs = append(sitemapURLs, generator.SitemapURL{
Path: path,
LastMod: page.LastUpdated,
ChangeFreq: changeFreq,
Priority: priority,
})
}
// Generate sitemap XML
sitemapXML := generator.GenerateSitemap(sitemapURLs, s.siteURL)
w.Header().Set("Content-Type", "application/xml")
w.Write([]byte(sitemapXML))
}
// handleRobots handles robots.txt requests
func (s *Server) handleRobots(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.Write([]byte("User-agent: *\nAllow: /\nSitemap: /sitemap.xml\n"))
}
// handle404 handles 404 errors
func (s *Server) handle404(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusNotFound)
html, err := s.htmlGenerator.GenerateErrorPage(404, []generator.FeedItemInfo{})
if err != nil {
w.Write([]byte("404 - Page Not Found"))
return
}
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Write([]byte(html))
}
// servePage serves a cached page with proper headers
func (s *Server) servePage(w http.ResponseWriter, r *http.Request, page *cache.CachedPage) {
// Set headers
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.Header().Set("ETag", page.ETag)
w.Header().Set("Cache-Control", "public, max-age=3600")
w.Header().Set("Last-Modified", page.LastUpdated.Format(http.TimeFormat))
// Check If-None-Match for conditional requests
if match := r.Header.Get("If-None-Match"); match == page.ETag {
w.WriteHeader(http.StatusNotModified)
return
}
// Check Accept-Encoding for compression
acceptEncoding := r.Header.Get("Accept-Encoding")
if strings.Contains(acceptEncoding, "gzip") && len(page.Compressed) > 0 {
w.Header().Set("Content-Encoding", "gzip")
w.Header().Set("Vary", "Accept-Encoding")
w.Write(page.Compressed)
return
}
// Serve uncompressed
w.Write([]byte(page.Content))
}
// middleware adds security headers and logging
func (s *Server) middleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// Security headers
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("X-Frame-Options", "DENY")
w.Header().Set("X-XSS-Protection", "1; mode=block")
w.Header().Set("Referrer-Policy", "strict-origin-when-cross-origin")
// CSP header - all scripts and styles are served locally
w.Header().Set("Content-Security-Policy", "default-src 'self'; script-src 'self' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:;")
// Log request (only in debug mode to reduce noise)
start := time.Now()
next.ServeHTTP(w, r)
logger.WithFields(map[string]interface{}{
"method": r.Method,
"path": r.URL.Path,
"duration": time.Since(start),
}).Debug("HTTP request")
})
}