You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

340 lines
11 KiB

package nostr
import (
"context"
"fmt"
"gitcitadel-online/internal/logger"
"github.com/nbd-wtf/go-nostr"
)
// logFilter logs the exact filter being used for debugging
func logFilter(filter nostr.Filter, context string) {
logger.WithFields(map[string]interface{}{
"context": context,
"kinds": filter.Kinds,
"authors": filter.Authors,
"ids": filter.IDs,
"tags": filter.Tags,
"limit": filter.Limit,
}).Debug("Nostr filter")
}
// WikiService handles wiki-specific operations
type WikiService struct {
client *Client
articleKinds []int // Allowed article kinds (from config)
wikiKind int // Primary wiki kind constant (first from wiki_kinds config)
blogKind int // Primary blog kind constant (first from blog_kinds config)
longformKind int // Longform article kind (from config)
additionalFallback string // Additional fallback relay URL (from config)
indexKind int // Index event kind (from config)
}
// NewWikiService creates a new wiki service
func NewWikiService(client *Client, articleKinds []int, wikiKind int, additionalFallback string, indexKind int, blogKind int, longformKind int) *WikiService {
return &WikiService{
client: client,
articleKinds: articleKinds,
wikiKind: wikiKind,
blogKind: blogKind,
longformKind: longformKind,
additionalFallback: additionalFallback,
indexKind: indexKind,
}
}
// FetchWikiIndex fetches a wiki index by naddr
func (ws *WikiService) FetchWikiIndex(ctx context.Context, naddrStr string) (*IndexEvent, error) {
// Parse naddr
naddr, err := ParseNaddr(naddrStr)
if err != nil {
return nil, fmt.Errorf("failed to parse naddr: %w", err)
}
// Create filter for the index event
filter := naddr.ToFilter()
logFilter(filter, fmt.Sprintf("wiki index (kind %d)", ws.indexKind))
// Fetch the event from theforest only (primary relay)
primaryRelay := ws.client.GetPrimaryRelay()
if primaryRelay == "" {
return nil, fmt.Errorf("primary relay not configured")
}
event, err := ws.client.FetchEventFromRelays(ctx, filter, []string{primaryRelay})
if err != nil {
return nil, fmt.Errorf("failed to fetch index event: %w", err)
}
// Parse the index event
index, err := ParseIndexEvent(event, ws.indexKind)
if err != nil {
return nil, fmt.Errorf("failed to parse index event: %w", err)
}
return index, nil
}
// FetchWikiEvents fetches all wiki events referenced in an index
// Uses ProcessEventsWithCache for the initial fetch, then filters by index items
func (ws *WikiService) FetchWikiEvents(ctx context.Context, index *IndexEvent) ([]*WikiEvent, error) {
// Build a map of expected items (kind:pubkey:dtag) for fast lookup
expectedItems := make(map[string]IndexItem)
for _, item := range index.Items {
if item.Kind == ws.wikiKind {
key := fmt.Sprintf("%d:%s:%s", item.Kind, item.Pubkey, item.DTag)
expectedItems[key] = item
}
}
if len(expectedItems) == 0 {
return []*WikiEvent{}, nil
}
// Use ProcessEventsWithCache to fetch events of this kind
// Use a high display limit (1000) to ensure we get all events referenced in the index
// This means we'll fetch 2000 events, which should be enough for most cases
displayLimit := 1000
primaryRelay := ws.client.GetPrimaryRelay()
if primaryRelay == "" {
return nil, fmt.Errorf("primary relay not configured")
}
logger.WithFields(map[string]interface{}{
"kind": ws.wikiKind,
"items": len(expectedItems),
"index_event_id": index.Event.ID,
}).Debug("Fetching wiki events using ProcessEventsWithCache with index")
// Use standard process with index event ID: fetch index, query only referenced events, merge cache, deduplicate, filter deletions, sort, limit, fetch profiles
result, err := ws.client.ProcessEventsWithCache(ctx, ws.wikiKind, displayLimit, make(map[string]*nostr.Event), primaryRelay, index.Event.ID, ws.indexKind)
if err != nil {
logger.WithField("error", err).Warn("Failed to fetch wiki events using ProcessEventsWithCache")
return nil, err
}
allEvents := result.Events
logger.WithFields(map[string]interface{}{
"fetched": len(allEvents),
"expected": len(expectedItems),
}).Debug("Fetched wiki events using ProcessEventsWithCache with index")
// Build event map by kind:pubkey:dtag for matching
eventMap := make(map[string]*nostr.Event)
for _, event := range allEvents {
// Extract d-tag from event
var dTag string
for _, tag := range event.Tags {
if len(tag) > 0 && tag[0] == "d" && len(tag) > 1 {
dTag = tag[1]
break
}
}
if dTag == "" {
continue // Skip events without d-tag
}
key := fmt.Sprintf("%d:%s:%s", event.Kind, event.PubKey, dTag)
// Keep the newest version if we have multiple
existing, exists := eventMap[key]
if !exists || event.CreatedAt > existing.CreatedAt {
eventMap[key] = event
}
}
// Convert matched events to wiki events, preserving order from index.Items
var wikiEvents []*WikiEvent
for _, item := range index.Items {
if item.Kind != ws.wikiKind {
continue
}
key := fmt.Sprintf("%d:%s:%s", item.Kind, item.Pubkey, item.DTag)
event, exists := eventMap[key]
if !exists {
continue
}
wiki, err := ParseWikiEvent(event, ws.wikiKind)
if err != nil {
logger.WithFields(map[string]interface{}{
"key": key,
}).Warnf("Error parsing wiki event: %v", err)
continue
}
wikiEvents = append(wikiEvents, wiki)
}
logger.WithFields(map[string]interface{}{
"matched": len(wikiEvents),
"expected": len(expectedItems),
}).Debug("Matched wiki events")
if len(wikiEvents) == 0 && len(index.Items) > 0 {
logger.WithField("items", len(index.Items)).Warn("No wiki events matched from fetched events")
}
return wikiEvents, nil
}
// GetBlogKind returns the blog kind configured in this service
func (ws *WikiService) GetBlogKind() int {
return ws.blogKind
}
// GetLongformKind returns the longform kind configured in this service
func (ws *WikiService) GetLongformKind() int {
return ws.longformKind
}
// FetchLongformArticles fetches the newest longform articles (kind 30023) from a specific relay
// Uses the standard ProcessEventsWithCache process
func (ws *WikiService) FetchLongformArticles(ctx context.Context, relayURL string, longformKind int, limit int) ([]*nostr.Event, error) {
// Use standard process: fetch 2x limit, merge cache, deduplicate, filter deletions, sort, limit, fetch profiles
result, err := ws.client.ProcessEventsWithCache(ctx, longformKind, limit, make(map[string]*nostr.Event), relayURL, "", 0)
if err != nil {
return nil, fmt.Errorf("failed to process longform articles: %w", err)
}
// Note: Profiles are available in result.Profiles but not returned here
// Callers should fetch profiles separately if needed, or we could return both
return result.Events, nil
}
// FetchIndexEvents fetches all events of a specific kind referenced in an index
// Only supports article kinds configured in the service
// Uses ProcessEventsWithCache for the initial fetch, then filters by index items
func (ws *WikiService) FetchIndexEvents(ctx context.Context, index *IndexEvent, targetKind int) ([]*nostr.Event, error) {
// Check if the target kind is in the allowed article kinds
allowed := false
for _, kind := range ws.articleKinds {
if kind == targetKind {
allowed = true
break
}
}
if !allowed {
return nil, fmt.Errorf("unsupported event kind: %d (only %v are supported)", targetKind, ws.articleKinds)
}
// Build a map of expected items (kind:pubkey:dtag) for fast lookup
expectedItems := make(map[string]IndexItem)
for _, item := range index.Items {
if item.Kind == targetKind {
key := fmt.Sprintf("%d:%s:%s", item.Kind, item.Pubkey, item.DTag)
expectedItems[key] = item
}
}
if len(expectedItems) == 0 {
return []*nostr.Event{}, nil
}
// Use ProcessEventsWithCache to fetch events of this kind
// Use a high display limit (1000) to ensure we get all events referenced in the index
// This means we'll fetch 2000 events, which should be enough for most cases
displayLimit := 1000
primaryRelay := ws.client.GetPrimaryRelay()
if primaryRelay == "" {
return nil, fmt.Errorf("primary relay not configured")
}
logger.WithFields(map[string]interface{}{
"kind": targetKind,
"items": len(expectedItems),
"index_event_id": index.Event.ID,
}).Debug("Fetching events using ProcessEventsWithCache with index")
// Use standard process with index event ID: fetch index, query only referenced events, merge cache, deduplicate, filter deletions, sort, limit, fetch profiles
result, err := ws.client.ProcessEventsWithCache(ctx, targetKind, displayLimit, make(map[string]*nostr.Event), primaryRelay, index.Event.ID, ws.indexKind)
if err != nil {
logger.WithField("error", err).Warn("Failed to fetch events using ProcessEventsWithCache")
return nil, err
}
allEvents := result.Events
logger.WithFields(map[string]interface{}{
"fetched": len(allEvents),
"expected": len(expectedItems),
}).Debug("Fetched events using ProcessEventsWithCache with index")
// Build event map by kind:pubkey:dtag for matching
eventMap := make(map[string]*nostr.Event)
for _, event := range allEvents {
// Extract d-tag from event
var dTag string
for _, tag := range event.Tags {
if len(tag) > 0 && tag[0] == "d" && len(tag) > 1 {
dTag = tag[1]
break
}
}
if dTag == "" {
continue // Skip events without d-tag
}
key := fmt.Sprintf("%d:%s:%s", event.Kind, event.PubKey, dTag)
// Keep the newest version if we have multiple
existing, exists := eventMap[key]
if !exists || event.CreatedAt > existing.CreatedAt {
eventMap[key] = event
}
}
// Convert to result slice, preserving order from index.Items
events := make([]*nostr.Event, 0, len(eventMap))
for _, item := range index.Items {
if item.Kind != targetKind {
continue
}
key := fmt.Sprintf("%d:%s:%s", item.Kind, item.Pubkey, item.DTag)
event, exists := eventMap[key]
if !exists {
continue
}
events = append(events, event)
}
logger.WithFields(map[string]interface{}{
"matched": len(events),
"expected": len(expectedItems),
"kind": targetKind,
}).Debug("Matched index events")
if len(events) == 0 && len(index.Items) > 0 {
logger.WithFields(map[string]interface{}{
"kind": targetKind,
"items": len(index.Items),
}).Warn("No events matched from fetched events")
}
return events, nil
}
// FetchWikiEventByDTag fetches a single wiki event by d tag
func (ws *WikiService) FetchWikiEventByDTag(ctx context.Context, pubkey, dTag string) (*WikiEvent, error) {
filter := nostr.Filter{
Kinds: []int{ws.wikiKind},
Authors: []string{pubkey},
Tags: map[string][]string{
"d": {dTag},
},
Limit: 1,
}
logFilter(filter, fmt.Sprintf("wiki by d-tag %s", dTag))
// Fetch from theforest only (primary relay)
primaryRelay := ws.client.GetPrimaryRelay()
if primaryRelay == "" {
return nil, fmt.Errorf("primary relay not configured")
}
event, err := ws.client.FetchEventFromRelays(ctx, filter, []string{primaryRelay})
if err != nil {
return nil, fmt.Errorf("failed to fetch wiki event: %w", err)
}
return ParseWikiEvent(event, ws.wikiKind)
}