You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
411 lines
13 KiB
411 lines
13 KiB
package cache |
|
|
|
import ( |
|
"context" |
|
"html/template" |
|
"time" |
|
|
|
"gitcitadel-online/internal/generator" |
|
"gitcitadel-online/internal/logger" |
|
"gitcitadel-online/internal/nostr" |
|
) |
|
|
|
// Rewarmer handles cache rewarming |
|
type Rewarmer struct { |
|
cache *Cache |
|
feedCache *FeedCache |
|
wikiService *nostr.WikiService |
|
feedService *nostr.FeedService |
|
ebooksService *nostr.EBooksService |
|
htmlGenerator *generator.HTMLGenerator |
|
wikiIndex string |
|
blogIndex string |
|
feedRelay string |
|
maxFeedEvents int |
|
interval time.Duration |
|
feedInterval time.Duration |
|
} |
|
|
|
// NewRewarmer creates a new cache rewarming service |
|
func NewRewarmer( |
|
cache *Cache, |
|
feedCache *FeedCache, |
|
wikiService *nostr.WikiService, |
|
feedService *nostr.FeedService, |
|
ebooksService *nostr.EBooksService, |
|
htmlGenerator *generator.HTMLGenerator, |
|
wikiIndex, blogIndex, feedRelay string, |
|
maxFeedEvents int, |
|
interval, feedInterval time.Duration, |
|
) *Rewarmer { |
|
return &Rewarmer{ |
|
cache: cache, |
|
feedCache: feedCache, |
|
wikiService: wikiService, |
|
feedService: feedService, |
|
ebooksService: ebooksService, |
|
htmlGenerator: htmlGenerator, |
|
wikiIndex: wikiIndex, |
|
blogIndex: blogIndex, |
|
feedRelay: feedRelay, |
|
maxFeedEvents: maxFeedEvents, |
|
interval: interval, |
|
feedInterval: feedInterval, |
|
} |
|
} |
|
|
|
// Start starts the rewarming goroutines |
|
func (r *Rewarmer) Start(ctx context.Context) { |
|
// Initial population |
|
go r.rewarmPages(ctx) |
|
go r.rewarmFeed(ctx) |
|
|
|
// Periodic rewarming |
|
go r.periodicRewarmPages(ctx) |
|
go r.periodicRewarmFeed(ctx) |
|
} |
|
|
|
// rewarmPages rewarms the page cache |
|
func (r *Rewarmer) rewarmPages(ctx context.Context) { |
|
logger.Info("Starting page cache rewarming...") |
|
|
|
// Initialize wikiPages as empty - will be populated if wiki fetch succeeds |
|
wikiPages := make([]generator.WikiPageInfo, 0) |
|
|
|
// Fetch wiki index (non-blocking - landing page can still be generated) |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
wikiIndex, err := r.wikiService.FetchWikiIndex(ctx, r.wikiIndex) |
|
if err != nil { |
|
logger.Warnf("Error fetching wiki index from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
// Continue to generate landing page even if wiki fetch fails |
|
} else { |
|
// Fetch wiki events |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
wikiEvents, err := r.wikiService.FetchWikiEvents(ctx, wikiIndex) |
|
if err != nil { |
|
logger.Warnf("Error fetching wiki events from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
} else { |
|
// Build wiki page info for navigation |
|
wikiPages = make([]generator.WikiPageInfo, 0, len(wikiEvents)) |
|
for _, event := range wikiEvents { |
|
wikiPages = append(wikiPages, generator.WikiPageInfo{ |
|
DTag: event.DTag, |
|
Title: event.Title, |
|
}) |
|
} |
|
|
|
// Generate and cache wiki index page |
|
wikiIndexHTML, err := r.htmlGenerator.GenerateWikiIndexPage(wikiIndex, wikiPages, []generator.FeedItemInfo{}) |
|
if err != nil { |
|
logger.Errorf("Error generating wiki index page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/wiki", wikiIndexHTML); err != nil { |
|
logger.Errorf("Error caching wiki index page: %v", err) |
|
} else { |
|
logger.WithField("pages", len(wikiPages)).Info("Wiki index page cached successfully") |
|
} |
|
} |
|
|
|
// Generate and cache wiki pages |
|
for _, event := range wikiEvents { |
|
html, err := r.htmlGenerator.GenerateWikiPage(event, wikiPages, []generator.FeedItemInfo{}) |
|
if err != nil { |
|
logger.WithField("dtag", event.DTag).Errorf("Error generating wiki page: %v", err) |
|
continue |
|
} |
|
if err := r.cache.Set("/wiki/"+event.DTag, html); err != nil { |
|
logger.WithField("dtag", event.DTag).Errorf("Error caching wiki page: %v", err) |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Fetch blog index if configured (needed for landing page) |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
var newestBlogItem *generator.BlogItemInfo |
|
if r.blogIndex != "" { |
|
blogIndex, err := r.wikiService.FetchWikiIndex(ctx, r.blogIndex) |
|
if err != nil { |
|
logger.Warnf("Error fetching blog index from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
} else { |
|
// Fetch blog events using the generic FetchIndexEvents function |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
blogKind := r.wikiService.GetBlogKind() |
|
blogEventList, err := r.wikiService.FetchIndexEvents(ctx, blogIndex, blogKind) |
|
if err != nil { |
|
logger.Warnf("Error fetching blog events from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
} else { |
|
logger.WithFields(map[string]interface{}{ |
|
"events": len(blogEventList), |
|
"kind": blogKind, |
|
}).Debug("Fetched blog events") |
|
blogItems := make([]generator.BlogItemInfo, 0, len(blogEventList)) |
|
for _, event := range blogEventList { |
|
// Parse the blog event |
|
blog, err := nostr.ParseBlogEvent(event, blogKind) |
|
if err != nil { |
|
logger.WithField("event_id", event.ID).Warnf("Error parsing blog event: %v", err) |
|
continue |
|
} |
|
|
|
html, err := r.htmlGenerator.ProcessAsciiDoc(blog.Content) |
|
if err != nil { |
|
logger.WithField("dtag", blog.DTag).Warnf("Error processing blog content: %v", err) |
|
html = blog.Content // Fallback to raw content |
|
} |
|
blogItems = append(blogItems, generator.BlogItemInfo{ |
|
DTag: blog.DTag, |
|
Title: blog.Title, |
|
Summary: blog.Summary, |
|
Content: template.HTML(html), |
|
Author: event.PubKey, |
|
Image: blog.Image, |
|
CreatedAt: int64(event.CreatedAt), |
|
}) |
|
} |
|
logger.WithField("items", len(blogItems)).Debug("Generated blog items") |
|
|
|
// Get newest blog item for landing page |
|
if len(blogItems) > 0 { |
|
newestBlogItem = &blogItems[0] |
|
} |
|
|
|
// Generate blog page without feed items (feed only on landing page) |
|
blogHTML, err := r.htmlGenerator.GenerateBlogPage(blogIndex, blogItems, []generator.FeedItemInfo{}) |
|
if err != nil { |
|
logger.Errorf("Error generating blog page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/blog", blogHTML); err != nil { |
|
logger.Errorf("Error caching blog page: %v", err) |
|
} else { |
|
logger.WithField("items", len(blogItems)).Info("Blog page cached successfully") |
|
} |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Fetch and cache articles page (longform articles) - needed for landing page |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
var allArticleItems []generator.ArticleItemInfo |
|
var newestArticleItem *generator.ArticleItemInfo |
|
longformKind := r.wikiService.GetLongformKind() |
|
if longformKind > 0 { |
|
articleEvents, err := r.wikiService.FetchLongformArticles(ctx, "wss://theforest.nostr1.com", longformKind, 50) |
|
if err != nil { |
|
logger.Warnf("Error fetching longform articles from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
} else { |
|
articleItems := make([]generator.ArticleItemInfo, 0, len(articleEvents)) |
|
for _, event := range articleEvents { |
|
// Parse the longform article |
|
article, err := nostr.ParseLongformEvent(event, longformKind) |
|
if err != nil { |
|
logger.WithField("event_id", event.ID).Warnf("Error parsing longform article: %v", err) |
|
continue |
|
} |
|
|
|
// Process content using gc-parser (handles Markdown, AsciiDoc, etc.) |
|
result, err := r.htmlGenerator.ProcessAsciiDoc(article.Content) |
|
var html string |
|
if err != nil { |
|
logger.WithField("dtag", article.DTag).Warnf("Error processing content: %v", err) |
|
html = article.Content // Fallback to raw content |
|
} else { |
|
html = result |
|
} |
|
articleItems = append(articleItems, generator.ArticleItemInfo{ |
|
DTag: article.DTag, |
|
Title: article.Title, |
|
Summary: article.Summary, |
|
Content: template.HTML(html), |
|
Author: event.PubKey, |
|
Image: article.Image, |
|
CreatedAt: int64(event.CreatedAt), |
|
}) |
|
} |
|
logger.WithField("items", len(articleItems)).Debug("Generated article items") |
|
|
|
// Store all article items for landing page |
|
allArticleItems = articleItems |
|
|
|
// Get newest article item for landing page |
|
if len(articleItems) > 0 { |
|
newestArticleItem = &articleItems[0] |
|
} |
|
|
|
// Generate articles page |
|
articlesHTML, err := r.htmlGenerator.GenerateArticlesPage(articleItems, []generator.FeedItemInfo{}) |
|
if err != nil { |
|
logger.Errorf("Error generating articles page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/articles", articlesHTML); err != nil { |
|
logger.Errorf("Error caching articles page: %v", err) |
|
} else { |
|
logger.WithField("items", len(articleItems)).Info("Articles page cached successfully") |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Fetch and cache e-books page (needed for landing page) |
|
// If theforest fails, leave pages as-is (don't remove existing events) |
|
var allEBooks []generator.EBookInfo |
|
if r.ebooksService != nil { |
|
ebooks, err := r.ebooksService.FetchTopLevelIndexEvents(ctx) |
|
if err != nil { |
|
logger.Warnf("Error fetching e-books from theforest: %v - keeping existing pages", err) |
|
// Don't update cache - leave existing pages as-is |
|
} else { |
|
// Convert to generator.EBookInfo |
|
generatorEBooks := make([]generator.EBookInfo, 0, len(ebooks)) |
|
for _, ebook := range ebooks { |
|
generatorEBooks = append(generatorEBooks, generator.EBookInfo{ |
|
EventID: ebook.EventID, |
|
Title: ebook.Title, |
|
DTag: ebook.DTag, |
|
Author: ebook.Author, |
|
Summary: ebook.Summary, |
|
Image: ebook.Image, |
|
Type: ebook.Type, |
|
CreatedAt: ebook.CreatedAt, |
|
Naddr: ebook.Naddr, |
|
}) |
|
} |
|
|
|
// Store all e-books for landing page |
|
allEBooks = generatorEBooks |
|
|
|
ebooksHTML, err := r.htmlGenerator.GenerateEBooksPage(generatorEBooks, []generator.FeedItemInfo{}) |
|
if err != nil { |
|
logger.Errorf("Error generating e-books page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/ebooks", ebooksHTML); err != nil { |
|
logger.Errorf("Error caching e-books page: %v", err) |
|
} else { |
|
logger.WithField("ebooks", len(generatorEBooks)).Info("E-books page cached successfully") |
|
} |
|
} |
|
} |
|
} |
|
|
|
// Always generate landing page AFTER blog, articles, and e-books are fetched and cached |
|
// Now we have all the data needed for the landing page |
|
landingHTML, err := r.htmlGenerator.GenerateLandingPage(wikiPages, newestBlogItem, newestArticleItem, allArticleItems, allEBooks) |
|
if err != nil { |
|
logger.Errorf("Error generating landing page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/", landingHTML); err != nil { |
|
logger.Errorf("Error caching landing page: %v", err) |
|
} else { |
|
logger.WithField("pages", len(wikiPages)).Info("Landing page cached successfully") |
|
} |
|
} |
|
|
|
// Generate and cache Feed page (using feed items from cache) |
|
feedItems := r.convertFeedItemsToInfo(r.feedCache.Get()) |
|
feedHTML, err := r.htmlGenerator.GenerateFeedPage(feedItems) |
|
if err != nil { |
|
logger.Errorf("Error generating feed page: %v", err) |
|
} else { |
|
if err := r.cache.Set("/feed", feedHTML); err != nil { |
|
logger.Errorf("Error caching feed page: %v", err) |
|
} else { |
|
logger.WithField("items", len(feedItems)).Info("Feed page cached successfully") |
|
} |
|
} |
|
|
|
logger.Info("Page cache rewarming completed") |
|
} |
|
|
|
// rewarmFeed rewarms the feed cache |
|
func (r *Rewarmer) rewarmFeed(ctx context.Context) { |
|
logger.WithFields(map[string]interface{}{ |
|
"relay": r.feedRelay, |
|
"max_events": r.maxFeedEvents, |
|
}).Info("Starting feed cache rewarming") |
|
|
|
nostrItems, err := r.feedService.FetchFeedItems(ctx, r.feedRelay, r.maxFeedEvents) |
|
if err != nil { |
|
logger.WithField("relay", r.feedRelay).Warnf("Error fetching feed: %v", err) |
|
// Don't clear the cache on error - keep old items |
|
return |
|
} |
|
|
|
if len(nostrItems) == 0 { |
|
logger.WithField("relay", r.feedRelay).Warn("No feed items fetched") |
|
// Don't clear the cache - keep old items |
|
return |
|
} |
|
|
|
// Convert nostr.FeedItem to cache.FeedItem |
|
items := make([]FeedItem, 0, len(nostrItems)) |
|
for _, item := range nostrItems { |
|
items = append(items, FeedItem{ |
|
EventID: item.EventID, |
|
Author: item.Author, |
|
Content: item.Content, |
|
Time: item.Time, |
|
Link: item.Link, |
|
Title: item.Title, |
|
Summary: item.Summary, |
|
Image: item.Image, |
|
}) |
|
} |
|
|
|
r.feedCache.Set(items) |
|
logger.WithFields(map[string]interface{}{ |
|
"items": len(items), |
|
"relay": r.feedRelay, |
|
}).Info("Feed cache rewarmed successfully") |
|
} |
|
|
|
// periodicRewarmPages periodically rewarms pages |
|
func (r *Rewarmer) periodicRewarmPages(ctx context.Context) { |
|
ticker := time.NewTicker(r.interval) |
|
defer ticker.Stop() |
|
|
|
for { |
|
select { |
|
case <-ctx.Done(): |
|
return |
|
case <-ticker.C: |
|
r.rewarmPages(ctx) |
|
} |
|
} |
|
} |
|
|
|
// convertFeedItemsToInfo converts cache.FeedItem to generator.FeedItemInfo |
|
func (r *Rewarmer) convertFeedItemsToInfo(items []FeedItem) []generator.FeedItemInfo { |
|
feedItems := make([]generator.FeedItemInfo, 0, len(items)) |
|
for _, item := range items { |
|
feedItems = append(feedItems, generator.FeedItemInfo{ |
|
EventID: item.EventID, |
|
Author: item.Author, |
|
Content: item.Content, |
|
Time: item.Time.Format("2006-01-02 15:04:05"), |
|
TimeISO: item.Time.Format(time.RFC3339), |
|
Link: item.Link, |
|
}) |
|
} |
|
return feedItems |
|
} |
|
|
|
// periodicRewarmFeed periodically rewarms feed |
|
func (r *Rewarmer) periodicRewarmFeed(ctx context.Context) { |
|
ticker := time.NewTicker(r.feedInterval) |
|
defer ticker.Stop() |
|
|
|
for { |
|
select { |
|
case <-ctx.Done(): |
|
return |
|
case <-ticker.C: |
|
r.rewarmFeed(ctx) |
|
} |
|
} |
|
}
|
|
|