16 changed files with 1240 additions and 436 deletions
@ -0,0 +1,255 @@
@@ -0,0 +1,255 @@
|
||||
import { parseDocumentHeader, parseHeaderLine } from "./asciidocParser"; |
||||
|
||||
/** |
||||
* Validation result for AsciiDoc documents |
||||
*/ |
||||
export interface AsciiDocValidationResult { |
||||
valid: boolean; |
||||
errors: string[]; |
||||
warnings: string[]; |
||||
} |
||||
|
||||
/** |
||||
* Header information extracted from document |
||||
*/ |
||||
interface HeaderInfo { |
||||
lineNumber: number; |
||||
level: number; |
||||
title: string; |
||||
originalLine: string; |
||||
} |
||||
|
||||
/** |
||||
* Validate AsciiDoc document structure |
||||
*
|
||||
* Checks: |
||||
* - Document header (single =) exists and has title |
||||
* - At least one additional header exists |
||||
* - Headers have proper text (not empty) |
||||
* - Leaf headers (last in their branch) have content beneath them |
||||
* - Interim headers (with child headers) don't need content |
||||
* - Headers form intact branches (no skipped levels - this would create orphaned events) |
||||
*
|
||||
* @param content - The AsciiDoc content to validate |
||||
* @returns Validation result with errors and warnings |
||||
*/ |
||||
export function validateAsciiDocDocument(content: string): AsciiDocValidationResult { |
||||
const errors: string[] = []; |
||||
const warnings: string[] = []; |
||||
|
||||
// Early return for empty document
|
||||
if (!content || content.trim().length === 0) { |
||||
errors.push("Document is empty"); |
||||
return { valid: false, errors, warnings }; |
||||
} |
||||
|
||||
// Validate document header
|
||||
const documentHeader = validateDocumentHeader(content, errors); |
||||
if (!documentHeader) { |
||||
return { valid: false, errors, warnings }; |
||||
} |
||||
|
||||
// Parse section headers
|
||||
const lines = content.split("\n"); |
||||
const headerLines = parseSectionHeaders(lines); |
||||
|
||||
// Validate section headers exist
|
||||
if (!validateSectionHeadersExist(headerLines, errors)) { |
||||
return { valid: false, errors, warnings }; |
||||
} |
||||
|
||||
// Validate header text
|
||||
validateHeaderText(headerLines, errors); |
||||
|
||||
// Validate header hierarchy (no skipped levels)
|
||||
validateHeaderHierarchy(headerLines, errors); |
||||
|
||||
// Validate leaf headers have content
|
||||
validateLeafHeaderContent(headerLines, lines, errors); |
||||
|
||||
return { |
||||
valid: errors.length === 0, |
||||
errors, |
||||
warnings, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Validate document header exists and has a title |
||||
*/ |
||||
function validateDocumentHeader( |
||||
content: string, |
||||
errors: string[] |
||||
): { title: string; remaining: string } | null { |
||||
const documentHeader = parseDocumentHeader(content); |
||||
if (!documentHeader) { |
||||
errors.push("Document must start with a document header (single = followed by title)"); |
||||
return null; |
||||
} |
||||
|
||||
if (!documentHeader.title || documentHeader.title.trim().length === 0) { |
||||
errors.push("Document header must have a title"); |
||||
return null; |
||||
} |
||||
|
||||
return documentHeader; |
||||
} |
||||
|
||||
/** |
||||
* Parse all section headers from document lines (excluding document header) |
||||
*/ |
||||
function parseSectionHeaders(lines: string[]): HeaderInfo[] { |
||||
const headerLines: HeaderInfo[] = []; |
||||
|
||||
// Start from line 2 (index 1) to skip document header
|
||||
for (let i = 1; i < lines.length; i++) { |
||||
const line = lines[i]; |
||||
const headerInfo = parseHeaderLine(line); |
||||
if (headerInfo) { |
||||
headerLines.push({ |
||||
lineNumber: i + 1, // 1-indexed for user display
|
||||
level: headerInfo.level, |
||||
title: headerInfo.title, |
||||
originalLine: line.trim(), |
||||
}); |
||||
} |
||||
} |
||||
|
||||
return headerLines; |
||||
} |
||||
|
||||
/** |
||||
* Validate that at least one section header exists |
||||
*/ |
||||
function validateSectionHeadersExist(headerLines: HeaderInfo[], errors: string[]): boolean { |
||||
if (headerLines.length === 0) { |
||||
errors.push("Document must have at least one section header (==, ===, etc.) after the document title"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
/** |
||||
* Validate that all headers have text |
||||
*/ |
||||
function validateHeaderText(headerLines: HeaderInfo[], errors: string[]): void { |
||||
for (const header of headerLines) { |
||||
if (!header.title || header.title.trim().length === 0) { |
||||
errors.push(`Header on line ${header.lineNumber} has no title text: "${header.originalLine}"`); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Validate header hierarchy - no skipped levels |
||||
*/ |
||||
function validateHeaderHierarchy(headerLines: HeaderInfo[], errors: string[]): void { |
||||
let previousLevel = 0; |
||||
|
||||
for (const header of headerLines) { |
||||
// Only check for skipped levels when going deeper (not when going back up)
|
||||
if (header.level > previousLevel + 1) { |
||||
errors.push( |
||||
`Header on line ${header.lineNumber} ("${header.title}") skips a level (from level ${previousLevel} to ${header.level}). ` + |
||||
`This would create orphaned chapter-events. Headers must form intact branches.` |
||||
); |
||||
} |
||||
previousLevel = header.level; |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Identify leaf headers (headers that are last in their branch) |
||||
* A header is a leaf if there are no child headers (deeper level) after it |
||||
* before the next header at the same or higher level |
||||
*/ |
||||
function identifyLeafHeaders(headerLines: HeaderInfo[]): Set<number> { |
||||
const leafHeaders = new Set<number>(); |
||||
|
||||
for (let i = 0; i < headerLines.length; i++) { |
||||
const currentHeader = headerLines[i]; |
||||
const hasChildren = hasChildHeaders(headerLines, i); |
||||
|
||||
if (!hasChildren) { |
||||
leafHeaders.add(currentHeader.lineNumber); |
||||
} |
||||
} |
||||
|
||||
return leafHeaders; |
||||
} |
||||
|
||||
/** |
||||
* Check if a header has child headers after it |
||||
*/ |
||||
function hasChildHeaders(headerLines: HeaderInfo[], currentIndex: number): boolean { |
||||
const currentHeader = headerLines[currentIndex]; |
||||
|
||||
// Look ahead to find the next header at same or higher level
|
||||
for (let j = currentIndex + 1; j < headerLines.length; j++) { |
||||
const nextHeader = headerLines[j]; |
||||
|
||||
if (nextHeader.level <= currentHeader.level) { |
||||
// Found a header at same or higher level - end of this branch
|
||||
// If we haven't found any children, this is a leaf
|
||||
return false; |
||||
} |
||||
|
||||
if (nextHeader.level > currentHeader.level) { |
||||
// Found a child header - this header is not a leaf
|
||||
return true; |
||||
} |
||||
} |
||||
|
||||
// No more headers - this is a leaf
|
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* Validate that leaf headers have content beneath them |
||||
*/ |
||||
function validateLeafHeaderContent( |
||||
headerLines: HeaderInfo[], |
||||
lines: string[], |
||||
errors: string[] |
||||
): void { |
||||
const leafHeaders = identifyLeafHeaders(headerLines); |
||||
|
||||
for (const header of headerLines) { |
||||
if (leafHeaders.has(header.lineNumber)) { |
||||
if (!hasContentAfterHeader(header, lines)) { |
||||
errors.push( |
||||
`Leaf header on line ${header.lineNumber} ("${header.title}") must have content beneath it` |
||||
); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Check if there is content after a header (before the next header at same or higher level) |
||||
*/ |
||||
function hasContentAfterHeader(header: HeaderInfo, lines: string[]): boolean { |
||||
const headerIndex = header.lineNumber - 1; // Convert to 0-indexed
|
||||
|
||||
// Look for content between this header and the next header at same or higher level
|
||||
for (let i = headerIndex + 1; i < lines.length; i++) { |
||||
const headerInfo = parseHeaderLine(lines[i]); |
||||
|
||||
if (headerInfo) { |
||||
// Found a header - if it's at same or higher level, we've reached the end of this branch
|
||||
if (headerInfo.level <= header.level) { |
||||
break; |
||||
} |
||||
// If it's deeper, continue (shouldn't happen for a leaf, but handle gracefully)
|
||||
continue; |
||||
} |
||||
|
||||
// Check for non-empty content (not attribute lines)
|
||||
const line = lines[i].trim(); |
||||
if (line.length > 0 && !line.startsWith(":")) { |
||||
return true; |
||||
} |
||||
} |
||||
|
||||
return false; |
||||
} |
||||
@ -0,0 +1,294 @@
@@ -0,0 +1,294 @@
|
||||
import { TFile, TFolder, App, Notice } from "obsidian"; |
||||
import { EventKind, EventMetadata, ScriptoriumSettings } from "../types"; |
||||
import { readMetadata, writeMetadata, createDefaultMetadata, validateMetadata, mergeWithHeaderTitle } from "../metadataManager"; |
||||
import { buildEvents } from "../eventManager"; |
||||
import { saveEvents, loadEvents, eventsFileExists } from "../eventStorage"; |
||||
import { publishEventsWithRetry } from "../nostr/relayClient"; |
||||
import { getWriteRelays } from "../relayManager"; |
||||
import { parseAsciiDocStructure, isAsciiDocDocument } from "../asciidocParser"; |
||||
import { validateAsciiDocDocument } from "../asciidocValidator"; |
||||
import { verifyEventSecurity } from "../utils/security"; |
||||
import { showErrorNotice } from "../utils/errorHandling"; |
||||
import { log, logError } from "../utils/console"; |
||||
import { determineEventKind, getFolderNameForKind } from "../utils/eventKind"; |
||||
import { StructurePreviewModal } from "../ui/structurePreviewModal"; |
||||
import { MetadataReminderModal } from "../ui/metadataReminderModal"; |
||||
import { MetadataModal } from "../ui/metadataModal"; |
||||
import { isAsciiDocFile } from "../utils/fileExtensions"; |
||||
|
||||
/** |
||||
* Get the current active file |
||||
*/ |
||||
export async function getCurrentFile(app: App): Promise<TFile | null> { |
||||
const activeFile = app.workspace.getActiveFile(); |
||||
if (!activeFile) { |
||||
new Notice("No active file"); |
||||
return null; |
||||
} |
||||
return activeFile; |
||||
} |
||||
|
||||
/** |
||||
* Ensure the Nostr notes folder structure exists |
||||
*/ |
||||
export async function ensureNostrNotesFolder( |
||||
app: App, |
||||
kind: EventKind |
||||
): Promise<string> { |
||||
const baseFolder = "Nostr notes"; |
||||
const kindFolder = getFolderNameForKind(kind); |
||||
const fullPath = `${baseFolder}/${kindFolder}`; |
||||
|
||||
// Check if base folder exists
|
||||
const baseFolderObj = app.vault.getAbstractFileByPath(baseFolder); |
||||
if (!baseFolderObj || !(baseFolderObj instanceof TFolder)) { |
||||
await app.vault.createFolder(baseFolder); |
||||
} |
||||
|
||||
// Check if kind folder exists
|
||||
const kindFolderObj = app.vault.getAbstractFileByPath(fullPath); |
||||
if (!kindFolderObj || !(kindFolderObj instanceof TFolder)) { |
||||
await app.vault.createFolder(fullPath); |
||||
} |
||||
|
||||
return fullPath; |
||||
} |
||||
|
||||
/** |
||||
* Handle creating Nostr events from current file |
||||
*/ |
||||
export async function handleCreateEvents( |
||||
app: App, |
||||
file: TFile, |
||||
settings: ScriptoriumSettings |
||||
): Promise<void> { |
||||
if (!settings.privateKey) { |
||||
new Notice("Please set your private key in settings"); |
||||
return; |
||||
} |
||||
|
||||
try { |
||||
const content = await app.vault.read(file); |
||||
let metadata = await readMetadata(file, app); |
||||
|
||||
// Determine event kind from file extension or metadata
|
||||
const eventKind = determineEventKind( |
||||
file, |
||||
content, |
||||
settings.defaultEventKind, |
||||
metadata?.kind |
||||
); |
||||
|
||||
// Ensure folder structure exists before creating events
|
||||
await ensureNostrNotesFolder(app, eventKind); |
||||
|
||||
// Create default metadata if none exists and write it with placeholders
|
||||
if (!metadata) { |
||||
metadata = createDefaultMetadata(eventKind); |
||||
await writeMetadata(file, metadata, app); |
||||
// Re-read to get the formatted version with placeholders
|
||||
metadata = await readMetadata(file, app) || metadata; |
||||
} |
||||
|
||||
// Merge with header title for 30040
|
||||
if (eventKind === 30040 && isAsciiDocDocument(content)) { |
||||
const headerTitle = content.split("\n")[0]?.replace(/^=+\s*/, "").trim() || ""; |
||||
metadata = mergeWithHeaderTitle(metadata, headerTitle); |
||||
} |
||||
|
||||
// Show reminder modal before proceeding
|
||||
new MetadataReminderModal(app, eventKind, async () => { |
||||
// Re-read metadata after user confirms (they may have updated it)
|
||||
const updatedContent = await app.vault.read(file); |
||||
let updatedMetadata: EventMetadata = await readMetadata(file, app) || metadata || createDefaultMetadata(eventKind); |
||||
|
||||
// Ensure we have valid metadata
|
||||
if (!updatedMetadata) { |
||||
updatedMetadata = createDefaultMetadata(eventKind); |
||||
} |
||||
|
||||
// Merge with header title for 30040
|
||||
if (eventKind === 30040 && isAsciiDocDocument(updatedContent)) { |
||||
const headerTitle = updatedContent.split("\n")[0]?.replace(/^=+\s*/, "").trim() || ""; |
||||
updatedMetadata = mergeWithHeaderTitle(updatedMetadata, headerTitle); |
||||
} |
||||
|
||||
// Validate metadata
|
||||
const validation = validateMetadata(updatedMetadata, eventKind); |
||||
if (!validation.valid) { |
||||
new Notice(`Metadata validation failed: ${validation.errors.join(", ")}`); |
||||
return; |
||||
} |
||||
|
||||
// Validate AsciiDoc structure if this is a structured AsciiDoc document
|
||||
if (isAsciiDocFile(file) && eventKind === 30040 && isAsciiDocDocument(updatedContent)) { |
||||
const asciiDocValidation = validateAsciiDocDocument(updatedContent); |
||||
if (!asciiDocValidation.valid) { |
||||
const errorMsg = `AsciiDoc validation failed:\n${asciiDocValidation.errors.join("\n")}`; |
||||
if (asciiDocValidation.warnings.length > 0) { |
||||
new Notice(`${errorMsg}\n\nWarnings:\n${asciiDocValidation.warnings.join("\n")}`); |
||||
} else { |
||||
new Notice(errorMsg); |
||||
} |
||||
return; |
||||
} |
||||
if (asciiDocValidation.warnings.length > 0) { |
||||
log(`AsciiDoc validation warnings: ${asciiDocValidation.warnings.join("; ")}`); |
||||
} |
||||
} |
||||
|
||||
// Build events
|
||||
if (!settings.privateKey) { |
||||
new Notice("Please set your private key in settings"); |
||||
return; |
||||
} |
||||
const result = await buildEvents(file, updatedContent, updatedMetadata, settings.privateKey, app); |
||||
|
||||
if (result.errors.length > 0) { |
||||
new Notice(`Errors: ${result.errors.join(", ")}`); |
||||
return; |
||||
} |
||||
|
||||
// Security check: verify events don't contain private keys
|
||||
for (const event of result.events) { |
||||
if (!verifyEventSecurity(event)) { |
||||
new Notice("Security error: Event contains private key. Aborting."); |
||||
logError("Event security check failed - event may contain private key"); |
||||
return; |
||||
} |
||||
} |
||||
|
||||
// Show preview for structured documents
|
||||
if (result.structure.length > 0) { |
||||
new StructurePreviewModal(app, result.structure, async () => { |
||||
await saveEvents(file, result.events, app); |
||||
new Notice(`Created ${result.events.length} event(s) and saved to ${file.basename}_events.jsonl`); |
||||
}).open(); |
||||
} else { |
||||
await saveEvents(file, result.events, app); |
||||
new Notice(`Created ${result.events.length} event(s) and saved to ${file.basename}_events.jsonl`); |
||||
} |
||||
}).open(); |
||||
} catch (error: any) { |
||||
showErrorNotice("Error creating events", error); |
||||
logError("Error creating events", error); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Handle previewing document structure |
||||
*/ |
||||
export async function handlePreviewStructure( |
||||
app: App, |
||||
file: TFile |
||||
): Promise<void> { |
||||
try { |
||||
const content = await app.vault.read(file); |
||||
if (!isAsciiDocDocument(content)) { |
||||
new Notice("This file is not an AsciiDoc document with structure"); |
||||
return; |
||||
} |
||||
|
||||
let metadata = await readMetadata(file, app); |
||||
if (!metadata || metadata.kind !== 30040) { |
||||
metadata = createDefaultMetadata(30040); |
||||
} |
||||
|
||||
const headerTitle = content.split("\n")[0]?.replace(/^=+\s*/, "").trim() || ""; |
||||
metadata = mergeWithHeaderTitle(metadata, headerTitle); |
||||
|
||||
const structure = parseAsciiDocStructure(content, metadata as any); |
||||
new StructurePreviewModal(app, structure, () => {}).open(); |
||||
} catch (error: any) { |
||||
showErrorNotice("Error previewing structure", error); |
||||
logError("Error previewing structure", error); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Handle publishing events to relays |
||||
*/ |
||||
export async function handlePublishEvents( |
||||
app: App, |
||||
file: TFile, |
||||
settings: ScriptoriumSettings |
||||
): Promise<void> { |
||||
if (!settings.privateKey) { |
||||
new Notice("Please set your private key in settings"); |
||||
return; |
||||
} |
||||
|
||||
const exists = await eventsFileExists(file, app); |
||||
if (!exists) { |
||||
new Notice("No events file found. Please create events first."); |
||||
return; |
||||
} |
||||
|
||||
try { |
||||
const events = await loadEvents(file, app); |
||||
if (events.length === 0) { |
||||
new Notice("No events to publish"); |
||||
return; |
||||
} |
||||
|
||||
const writeRelays = getWriteRelays(settings.relayList); |
||||
if (writeRelays.length === 0) { |
||||
new Notice("No write relays configured. Please fetch relay list in settings."); |
||||
return; |
||||
} |
||||
|
||||
// Relays are already normalized and deduplicated by getWriteRelays
|
||||
new Notice(`Publishing ${events.length} event(s) to ${writeRelays.length} relay(s)...`); |
||||
|
||||
const results = await publishEventsWithRetry(writeRelays, events, settings.privateKey); |
||||
|
||||
// Count successes
|
||||
let successCount = 0; |
||||
let failureCount = 0; |
||||
results.forEach((relayResults) => { |
||||
relayResults.forEach((result) => { |
||||
if (result.success) { |
||||
successCount++; |
||||
} else { |
||||
failureCount++; |
||||
} |
||||
}); |
||||
}); |
||||
|
||||
if (failureCount === 0) { |
||||
new Notice(`Successfully published all ${successCount} event(s)`); |
||||
} else { |
||||
new Notice(`Published ${successCount} event(s), ${failureCount} failed`); |
||||
} |
||||
} catch (error: any) { |
||||
showErrorNotice("Error publishing events", error); |
||||
logError("Error publishing events", error); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Handle editing metadata |
||||
*/ |
||||
export async function handleEditMetadata( |
||||
app: App, |
||||
file: TFile, |
||||
defaultEventKind: EventKind |
||||
): Promise<void> { |
||||
try { |
||||
let metadata = await readMetadata(file, app); |
||||
if (!metadata) { |
||||
// Determine kind from file extension
|
||||
const content = await app.vault.read(file); |
||||
const eventKind = determineEventKind(file, content, defaultEventKind); |
||||
metadata = createDefaultMetadata(eventKind); |
||||
} |
||||
|
||||
new MetadataModal(app, metadata, async (updatedMetadata) => { |
||||
await writeMetadata(file, updatedMetadata, app); |
||||
new Notice("Metadata saved"); |
||||
}).open(); |
||||
} catch (error: any) { |
||||
showErrorNotice("Error editing metadata", error); |
||||
logError("Error editing metadata", error); |
||||
} |
||||
} |
||||
@ -0,0 +1,319 @@
@@ -0,0 +1,319 @@
|
||||
import { Kind30040Metadata, Kind30041Metadata, EventMetadata } from "../types"; |
||||
|
||||
/** |
||||
* NKBIP-08 tag names |
||||
*/ |
||||
export const NKBIP08_TAGS = { |
||||
COLLECTION: "C", |
||||
TITLE: "T", |
||||
CHAPTER: "c", |
||||
SECTION: "s", |
||||
VERSION: "v", |
||||
} as const; |
||||
|
||||
/** |
||||
* Normalize tag values according to NKBIP-08 (NIP-54 rules): |
||||
* - Remove quotes (single and double) |
||||
* - Convert any non-letter non-number character to a hyphen |
||||
* - Convert all letters to lowercase |
||||
* - Numbers are preserved (not converted to hyphens) |
||||
* - Collapse multiple hyphens to single hyphen |
||||
* - Trim leading/trailing hyphens |
||||
*
|
||||
* IMPORTANT: This handles hierarchical paths with colons (e.g., "part-1:question-2:article-3") |
||||
* by converting colons to hyphens, resulting in "part-1-question-2-article-3" as per NKBIP-08 spec. |
||||
*/ |
||||
export function normalizeNKBIP08TagValue(text: string): string { |
||||
if (!text) { |
||||
return ""; |
||||
} |
||||
|
||||
// Remove quotes (single and double)
|
||||
let normalized = text.trim().replace(/^["']|["']$/g, ""); |
||||
|
||||
// Normalize: lowercase, convert non-letter non-number to hyphen
|
||||
// Per NKBIP-08: "Section identifiers cannot contain colons in tag values.
|
||||
// Hierarchical paths with colons MUST be normalized: colons → hyphens"
|
||||
let result = ""; |
||||
for (let i = 0; i < normalized.length; i++) { |
||||
const char = normalized[i]; |
||||
if ((char >= "a" && char <= "z") || (char >= "A" && char <= "Z") || (char >= "0" && char <= "9")) { |
||||
result += char.toLowerCase(); |
||||
} else { |
||||
// Non-alphanumeric (including colons) becomes hyphen (but don't add consecutive hyphens)
|
||||
if (result && result[result.length - 1] !== "-") { |
||||
result += "-"; |
||||
} |
||||
} |
||||
} |
||||
|
||||
// Collapse multiple hyphens
|
||||
result = result.replace(/-+/g, "-"); |
||||
|
||||
// Trim leading/trailing hyphens
|
||||
result = result.replace(/^-+|-+$/g, ""); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* NKBIP-08 tag metadata for 30040 (publication index) |
||||
*/ |
||||
export interface NKBIP08_30040Tags { |
||||
collection_id?: string; // C tag (optional - for compendiums, digests, libraries)
|
||||
version_tag?: string; // v tag
|
||||
} |
||||
|
||||
/** |
||||
* NKBIP-08 tag metadata for 30041 (publication content) when nested under 30040 |
||||
*/ |
||||
export interface NKBIP08_30041Tags { |
||||
collection_id?: string; // C tag (inherited from root 30040)
|
||||
title_id?: string; // T tag (derived from root 30040 title)
|
||||
chapter_id?: string; // c tag (from chapter title)
|
||||
section_id?: string; // s tag (from section title, only if not a chapter)
|
||||
version_tag?: string; // v tag (inherited from parent 30040)
|
||||
} |
||||
|
||||
/** |
||||
* Extract NKBIP-08 tags from a 30040 metadata object |
||||
*/ |
||||
export function extractNKBIP08TagsFrom30040( |
||||
metadata: Kind30040Metadata |
||||
): NKBIP08_30040Tags { |
||||
return { |
||||
collection_id: metadata.collection_id, |
||||
version_tag: metadata.version_tag, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Build NKBIP-08 tags for a nested 30041 event under a 30040 parent |
||||
*
|
||||
* Per NKBIP-08 spec: |
||||
* - C tag (collection_id): Inherited from root 30040 (optional - for compendiums, digests, libraries) |
||||
* - T tag (title_id): Normalized book title (from root 30040, not just immediate parent) |
||||
* - c tag (chapter_id): Normalized chapter identifier |
||||
* - If 30041 is directly under root (isChapter=true): from 30041's own title |
||||
* - If 30041 is under a chapter (isChapter=false): from parent 30040's title |
||||
* - s tag (section_id): Normalized section identifier (from 30041 node's title) |
||||
* - Only added if isChapter=false (i.e., this is a section, not a chapter) |
||||
* - v tag (version_tag): Inherited from parent 30040 |
||||
*
|
||||
* @param parentMetadata - The parent 30040 metadata (source of inheritance) |
||||
* @param rootMetadata - The root 30040 metadata (source of collection_id) |
||||
* @param bookTitle - The book title (root 30040 title) for T tag |
||||
* @param chapterTitle - The chapter title (parent 30040 title if nested, or 30041 title if direct child) |
||||
* @param sectionTitle - The section title (30041 node title) |
||||
* @param isChapter - Whether this 30041 is directly under root (making it a chapter, not a section) |
||||
* @returns NKBIP-08 tags for the 30041 event |
||||
*/ |
||||
export function buildNKBIP08TagsFor30041( |
||||
parentMetadata: Kind30040Metadata, |
||||
rootMetadata: Kind30040Metadata, |
||||
bookTitle: string, |
||||
chapterTitle: string, |
||||
sectionTitle: string, |
||||
isChapter: boolean = false |
||||
): NKBIP08_30041Tags { |
||||
return { |
||||
// C tag: Inherited from root 30040 (optional - for compendiums, digests, libraries)
|
||||
collection_id: rootMetadata.collection_id ? normalizeNKBIP08TagValue(rootMetadata.collection_id) : undefined, |
||||
// Inherit from parent 30040
|
||||
version_tag: parentMetadata.version_tag ? normalizeNKBIP08TagValue(parentMetadata.version_tag) : undefined, |
||||
// T tag: Normalized book title (from root 30040)
|
||||
title_id: bookTitle ? normalizeNKBIP08TagValue(bookTitle) : undefined, |
||||
// c tag: If this is a chapter (direct child of root), use its own title; otherwise use parent's title
|
||||
chapter_id: chapterTitle ? normalizeNKBIP08TagValue(chapterTitle) : undefined, |
||||
// s tag: Only add if this is NOT a chapter (i.e., it's a section under a chapter)
|
||||
section_id: isChapter ? undefined : (sectionTitle ? normalizeNKBIP08TagValue(sectionTitle) : undefined), |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Merge NKBIP-08 tags for nested 30040 events |
||||
* Child 30040 inherits from parent 30040 if parent values exist, otherwise uses own values |
||||
* All values are normalized per NKBIP-08 spec |
||||
*
|
||||
* @param parentMetadata - Parent 30040 metadata (optional) |
||||
* @param childMetadata - Child 30040 metadata |
||||
* @returns Merged NKBIP-08 tags (normalized) |
||||
*/ |
||||
export function mergeNKBIP08TagsFor30040( |
||||
parentMetadata: Kind30040Metadata | undefined, |
||||
childMetadata: Kind30040Metadata, |
||||
rootMetadata?: Kind30040Metadata |
||||
): NKBIP08_30040Tags { |
||||
// Collection ID is inherited from root (if present), not from parent
|
||||
const collectionId = rootMetadata?.collection_id || childMetadata.collection_id; |
||||
const versionTag = parentMetadata?.version_tag || childMetadata.version_tag; |
||||
|
||||
return { |
||||
collection_id: collectionId ? normalizeNKBIP08TagValue(collectionId) : undefined, |
||||
version_tag: versionTag ? normalizeNKBIP08TagValue(versionTag) : undefined, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Apply NKBIP-08 tags to a 30041 metadata object |
||||
* Used when building nested 30041 events under 30040 |
||||
*
|
||||
* @param metadata - The 30041 metadata to update |
||||
* @param nkbip08Tags - The NKBIP-08 tags to apply |
||||
* @returns Updated metadata with NKBIP-08 tags |
||||
*/ |
||||
export function applyNKBIP08TagsTo30041( |
||||
metadata: Kind30041Metadata, |
||||
nkbip08Tags: NKBIP08_30041Tags |
||||
): Kind30041Metadata { |
||||
return { |
||||
...metadata, |
||||
collection_id: nkbip08Tags.collection_id, |
||||
title_id: nkbip08Tags.title_id, |
||||
chapter_id: nkbip08Tags.chapter_id, |
||||
section_id: nkbip08Tags.section_id, |
||||
version_tag: nkbip08Tags.version_tag, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Apply NKBIP-08 tags to a 30040 metadata object |
||||
* Used when building nested 30040 events |
||||
*
|
||||
* @param metadata - The 30040 metadata to update |
||||
* @param nkbip08Tags - The NKBIP-08 tags to apply |
||||
* @returns Updated metadata with NKBIP-08 tags |
||||
*/ |
||||
export function applyNKBIP08TagsTo30040( |
||||
metadata: Kind30040Metadata, |
||||
nkbip08Tags: NKBIP08_30040Tags |
||||
): Kind30040Metadata { |
||||
return { |
||||
...metadata, |
||||
collection_id: nkbip08Tags.collection_id, |
||||
version_tag: nkbip08Tags.version_tag, |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* Add NKBIP-08 tags to a tags array for a 30040 event |
||||
*
|
||||
* Per NKBIP-08 spec: |
||||
* - C tag (collection_id): Optional, normalized (for compendiums, digests, libraries - inherited from root) |
||||
* - T tag (title_id): MANDATORY for book/title events, also added to chapters (inherited from book) |
||||
* - c tag (chapter_id): Optional, for chapter index events, normalized |
||||
* - v tag (version_tag): Optional, normalized |
||||
*
|
||||
* @param tags - The tags array to add to |
||||
* @param metadata - The 30040 metadata containing NKBIP-08 tag values |
||||
* @param isBook - Whether this is a book/title event (requires T tag) |
||||
* @param isChapter - Whether this is a chapter event (requires c tag and inherits T tag) |
||||
* @param bookTitle - The book title for T tag (used for chapters to inherit from book) |
||||
*/ |
||||
export function addNKBIP08TagsTo30040( |
||||
tags: string[][], |
||||
metadata: Kind30040Metadata, |
||||
isBook: boolean = false, |
||||
isChapter: boolean = false, |
||||
bookTitle?: string, |
||||
rootMetadata?: Kind30040Metadata |
||||
): void { |
||||
// C tag (collection) - optional, inherited from root if present
|
||||
if (rootMetadata?.collection_id) { |
||||
const normalized = normalizeNKBIP08TagValue(rootMetadata.collection_id); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.COLLECTION, normalized]); |
||||
} |
||||
} else if (metadata.collection_id) { |
||||
// Fallback: use own collection_id if root not provided (for root itself)
|
||||
const normalized = normalizeNKBIP08TagValue(metadata.collection_id); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.COLLECTION, normalized]); |
||||
} |
||||
} |
||||
|
||||
// T tag (title) - MANDATORY for book/title events per NKBIP-08 spec
|
||||
// Also added to chapters (inherited from book)
|
||||
if (isBook && metadata.title) { |
||||
const normalized = normalizeNKBIP08TagValue(metadata.title); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.TITLE, normalized]); |
||||
} |
||||
} else if (isChapter && bookTitle) { |
||||
// Chapter inherits T tag from book
|
||||
const normalized = normalizeNKBIP08TagValue(bookTitle); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.TITLE, normalized]); |
||||
} |
||||
} |
||||
|
||||
// c tag (chapter) - for chapter index events
|
||||
if (isChapter && metadata.title) { |
||||
const normalized = normalizeNKBIP08TagValue(metadata.title); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.CHAPTER, normalized]); |
||||
} |
||||
} |
||||
|
||||
// v tag (version) - optional, add if present
|
||||
if (metadata.version_tag) { |
||||
const normalized = normalizeNKBIP08TagValue(metadata.version_tag); |
||||
if (normalized) { |
||||
tags.push([NKBIP08_TAGS.VERSION, normalized]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Add NKBIP-08 tags to a tags array for a 30041 event |
||||
* Only adds tags if they are present (for nested 30041 under 30040) |
||||
* Stand-alone 30041 events don't have NKBIP-08 tags |
||||
*
|
||||
* Per NKBIP-08 spec: |
||||
* - C tag (collection_id): Optional, normalized (inherited from root 30040) |
||||
* - T tag (title_id): MANDATORY for nested 30041, normalized |
||||
* - c tag (chapter_id): Optional, normalized |
||||
* - s tag (section_id): Optional, normalized |
||||
* - v tag (version_tag): Optional, normalized |
||||
*
|
||||
* @param tags - The tags array to add to |
||||
* @param metadata - The 30041 metadata containing NKBIP-08 tag values |
||||
*/ |
||||
export function addNKBIP08TagsTo30041( |
||||
tags: string[][], |
||||
metadata: Kind30041Metadata |
||||
): void { |
||||
// Only add NKBIP-08 tags if they exist (indicating this is a nested 30041)
|
||||
// All tag values are already normalized when stored in metadata
|
||||
if (metadata.collection_id) { |
||||
tags.push([NKBIP08_TAGS.COLLECTION, metadata.collection_id]); |
||||
} |
||||
if (metadata.title_id) { |
||||
tags.push([NKBIP08_TAGS.TITLE, metadata.title_id]); |
||||
} |
||||
if (metadata.chapter_id) { |
||||
tags.push([NKBIP08_TAGS.CHAPTER, metadata.chapter_id]); |
||||
} |
||||
if (metadata.section_id) { |
||||
tags.push([NKBIP08_TAGS.SECTION, metadata.section_id]); |
||||
} |
||||
if (metadata.version_tag) { |
||||
tags.push([NKBIP08_TAGS.VERSION, metadata.version_tag]); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Check if a 30041 event has NKBIP-08 tags (indicating it's nested under 30040) |
||||
*
|
||||
* @param metadata - The 30041 metadata to check |
||||
* @returns True if the event has NKBIP-08 tags |
||||
*/ |
||||
export function hasNKBIP08Tags(metadata: Kind30041Metadata): boolean { |
||||
return !!( |
||||
metadata.collection_id || |
||||
metadata.title_id || |
||||
metadata.chapter_id || |
||||
metadata.section_id || |
||||
metadata.version_tag |
||||
); |
||||
} |
||||
@ -0,0 +1,27 @@
@@ -0,0 +1,27 @@
|
||||
/** |
||||
* Centralized console logging utilities |
||||
* All logging goes through stderr for Obsidian plugin debugging |
||||
*/ |
||||
|
||||
/** |
||||
* Log a message to stderr (Obsidian console) |
||||
*/ |
||||
export function log(message: string): void { |
||||
console.error(`[Scriptorium] ${message}`); |
||||
if (typeof process !== "undefined" && process.stderr) { |
||||
process.stderr.write(`[Scriptorium] ${message}\n`); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Log an error with optional stack trace |
||||
*/ |
||||
export function logError(message: string, error?: any): void { |
||||
const errorMsg = error?.message || String(error || ""); |
||||
const stackTrace = error?.stack || ""; |
||||
|
||||
log(`Error: ${message} - ${errorMsg}`); |
||||
if (stackTrace) { |
||||
log(`Stack trace: ${stackTrace}`); |
||||
} |
||||
} |
||||
@ -0,0 +1,22 @@
@@ -0,0 +1,22 @@
|
||||
import { Notice } from "obsidian"; |
||||
import { sanitizeString } from "./security"; |
||||
|
||||
/** |
||||
* Sanitize error message for display (removes private keys) |
||||
*/ |
||||
export function sanitizeErrorMessage(error: any): string { |
||||
if (!error) return "Unknown error"; |
||||
|
||||
const message = error?.message || String(error); |
||||
return sanitizeString(message) |
||||
.replace(/nsec1[a-z0-9]{58,}/gi, "[REDACTED]") |
||||
.replace(/[0-9a-f]{64}/gi, "[REDACTED]"); |
||||
} |
||||
|
||||
/** |
||||
* Show a notice with sanitized error message |
||||
*/ |
||||
export function showErrorNotice(message: string, error?: any): void { |
||||
const safeMessage = error ? sanitizeErrorMessage(error) : message; |
||||
new Notice(safeMessage); |
||||
} |
||||
@ -0,0 +1,50 @@
@@ -0,0 +1,50 @@
|
||||
import { TFile } from "obsidian"; |
||||
import { EventKind } from "../types"; |
||||
import { isAsciiDocFile, isMarkdownFile } from "./fileExtensions"; |
||||
import { isAsciiDocDocument } from "../asciidocParser"; |
||||
|
||||
/** |
||||
* Determine event kind from file extension and content |
||||
*/ |
||||
export function determineEventKind( |
||||
file: TFile, |
||||
content: string, |
||||
defaultKind: EventKind, |
||||
metadataKind?: EventKind |
||||
): EventKind { |
||||
if (isAsciiDocFile(file)) { |
||||
if (isAsciiDocDocument(content)) { |
||||
return 30040; |
||||
} |
||||
return 30818; |
||||
} |
||||
|
||||
if (isMarkdownFile(file)) { |
||||
return metadataKind || defaultKind; |
||||
} |
||||
|
||||
return defaultKind; |
||||
} |
||||
|
||||
/** |
||||
* Get folder name for an event kind |
||||
*/ |
||||
export function getFolderNameForKind(kind: EventKind): string { |
||||
const folderMap: Record<EventKind, string> = { |
||||
1: "kind-1-notes", |
||||
11: "kind-11-threads", |
||||
30023: "kind-30023-articles", |
||||
30040: "kind-30040-publications", |
||||
30041: "kind-30041-chapters", |
||||
30817: "kind-30817-wiki-md", |
||||
30818: "kind-30818-wiki-adoc", |
||||
}; |
||||
return folderMap[kind]; |
||||
} |
||||
|
||||
/** |
||||
* Check if event kind requires a title |
||||
*/ |
||||
export function requiresTitle(kind: EventKind): boolean { |
||||
return kind !== 1; |
||||
} |
||||
@ -0,0 +1,25 @@
@@ -0,0 +1,25 @@
|
||||
import { TFile } from "obsidian"; |
||||
import { EventKind } from "../types"; |
||||
|
||||
/** |
||||
* Check if file is a Markdown file |
||||
*/ |
||||
export function isMarkdownFile(file: TFile): boolean { |
||||
return file.extension === "md" || file.extension === "markdown"; |
||||
} |
||||
|
||||
/** |
||||
* Check if file is an AsciiDoc file |
||||
*/ |
||||
export function isAsciiDocFile(file: TFile): boolean { |
||||
return file.extension === "adoc" || file.extension === "asciidoc"; |
||||
} |
||||
|
||||
/** |
||||
* Get file extension type |
||||
*/ |
||||
export function getFileType(file: TFile): "markdown" | "asciidoc" | "unknown" { |
||||
if (isMarkdownFile(file)) return "markdown"; |
||||
if (isAsciiDocFile(file)) return "asciidoc"; |
||||
return "unknown"; |
||||
} |
||||
Loading…
Reference in new issue