@colbymchenry/cmem 0.2.36 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/hooks/sync.ts","../../src/parser/index.ts","../../src/utils/config.ts","../../src/db/index.ts","../../src/db/sessions.ts","../../src/db/vectors.ts","../../src/embeddings/index.ts","../../src/utils/format.ts","../../src/db/lessons.ts"],"sourcesContent":["/**\n * Session Sync Hook\n *\n * Called on Stop and PreCompact events to sync the current session.\n * After syncing, spawns a background process to synthesize lessons.\n */\n\nimport { statSync, existsSync, readFileSync, readdirSync } from 'fs';\nimport { join, dirname, basename } from 'path';\nimport { spawn } from 'child_process';\nimport { fileURLToPath } from 'url';\nimport { parseSessionFile, generateSummary, ParsedMessage, extractSessionMetadata, isAutomatedByContent } from '../parser/index.js';\nimport {\n createSession,\n getSessionBySourceFile,\n updateSession,\n needsReembedding,\n updateEmbeddingState,\n} from '../db/sessions.js';\nimport { storeEmbedding } from '../db/vectors.js';\nimport { getEmbedding, createEmbeddingText, initializeEmbeddings, isReady } from '../embeddings/index.js';\nimport { generateTitle } from '../utils/format.js';\nimport { backupSessionFile, CLAUDE_PROJECTS_DIR } from '../utils/config.js';\nimport { queueForSynthesis, clearSessionInjections } from '../db/lessons.js';\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\n\ninterface HookInput {\n session_id: string;\n cwd: string;\n transcript_path?: string;\n}\n\ninterface SessionIndexEntry {\n sessionId: string;\n projectPath: string;\n}\n\n/**\n * Read stdin as JSON\n */\nasync function readStdin(): Promise<string> {\n return new Promise((resolve) => {\n let data = '';\n process.stdin.setEncoding('utf8');\n process.stdin.on('readable', () => {\n let chunk;\n while ((chunk = process.stdin.read()) !== null) {\n data += chunk;\n }\n });\n process.stdin.on('end', () => {\n resolve(data);\n });\n // If no stdin data after timeout, resolve with empty\n setTimeout(() => resolve(data), 100);\n });\n}\n\n/**\n * Spawn a detached background process to run synthesis\n * This allows the hook to exit quickly while synthesis runs in background\n */\nfunction spawnBackgroundSynthesis(): void {\n try {\n // Path to synthesize.js is in the same directory as this hook\n const synthesizePath = join(__dirname, 'synthesize.js');\n\n if (!existsSync(synthesizePath)) {\n // Synthesize script not found, skip\n return;\n }\n\n // Spawn detached process that processes 1 session\n // Using 'node' directly since we're in a Node.js environment\n const child = spawn('node', [synthesizePath, '1'], {\n detached: true,\n stdio: 'ignore', // Don't inherit stdio\n env: { ...process.env },\n });\n\n // Allow parent process to exit independently\n child.unref();\n } catch {\n // Silently fail - synthesis can be run manually\n }\n}\n\n/**\n * Find session file from Claude's storage\n */\nfunction findSessionFile(sessionId: string, cwd: string): string | null {\n // Try to find in projects directory based on cwd hash\n if (existsSync(CLAUDE_PROJECTS_DIR)) {\n const projectDirs = readdirSync(CLAUDE_PROJECTS_DIR);\n for (const projectDir of projectDirs) {\n const projectPath = join(CLAUDE_PROJECTS_DIR, projectDir);\n const sessionFile = join(projectPath, `${sessionId}.jsonl`);\n if (existsSync(sessionFile)) {\n return sessionFile;\n }\n }\n }\n\n return null;\n}\n\n/**\n * Get project path from sessions-index.json\n */\nfunction getProjectPathFromIndex(filePath: string, sessionId: string): string | null {\n const projectDir = dirname(filePath);\n const indexPath = join(projectDir, 'sessions-index.json');\n\n if (!existsSync(indexPath)) return null;\n\n try {\n const content = readFileSync(indexPath, 'utf-8');\n const index = JSON.parse(content) as { entries: SessionIndexEntry[] };\n const entry = index.entries.find(e => e.sessionId === sessionId);\n return entry?.projectPath || null;\n } catch {\n return null;\n }\n}\n\n/**\n * Load messages from all subagent sessions for a parent session\n */\nfunction loadSubagentMessages(projectDirPath: string, parentSessionId: string): ParsedMessage[] {\n const subagentsDir = join(projectDirPath, parentSessionId, 'subagents');\n if (!existsSync(subagentsDir)) return [];\n\n const messages: ParsedMessage[] = [];\n\n try {\n const agentFiles = readdirSync(subagentsDir).filter(f => f.endsWith('.jsonl'));\n for (const agentFile of agentFiles) {\n const agentPath = join(subagentsDir, agentFile);\n const agentMessages = parseSessionFile(agentPath);\n messages.push(...agentMessages);\n }\n } catch {\n // Ignore errors reading subagent files\n }\n\n return messages;\n}\n\n/**\n * Process a session file - sync to database and backup\n */\nasync function processSessionFile(\n filePath: string,\n embeddingsReady: boolean,\n embedThreshold: number = 500\n): Promise<{ sessionId: string; isNew: boolean } | null> {\n try {\n // Parse the session file\n const messages = parseSessionFile(filePath);\n\n if (messages.length === 0) {\n return null;\n }\n\n // Get file stats\n const stats = statSync(filePath);\n const fileMtime = stats.mtime.toISOString();\n\n // Check if we already have this session\n const existingSession = getSessionBySourceFile(filePath);\n\n // Get session ID from filename\n const sessionIdFromFile = basename(filePath, '.jsonl');\n\n // Load agent messages for this session (for embeddings)\n const agentMessages = loadSubagentMessages(dirname(filePath), sessionIdFromFile);\n const allMessages = [...messages, ...agentMessages];\n\n // Calculate content length for embedding decisions (including agent messages)\n const contentLength = allMessages.reduce((sum, m) => sum + m.content.length, 0);\n\n // Generate title from first user message\n const firstUserMsg = messages.find(m => m.role === 'user');\n const title = firstUserMsg ? generateTitle(firstUserMsg.content) : 'Untitled Session';\n\n // Generate summary\n const summary = generateSummary(messages);\n\n // Build raw data for storage\n const rawData = JSON.stringify({ filePath, messages, mtime: fileMtime });\n\n // Get project path from sessions-index.json\n const projectPath = getProjectPathFromIndex(filePath, sessionIdFromFile);\n\n // Extract metadata (isSidechain, agentId, isMeta) from JSONL\n const metadata = extractSessionMetadata(filePath);\n // Mark as automated if: JSONL metadata says so, OR title matches automated patterns\n const isAutomated = metadata.isSidechain || metadata.isMeta || isAutomatedByContent(title);\n\n let sessionId: string;\n let isNew = false;\n\n if (existingSession) {\n // Update existing session\n sessionId = existingSession.id;\n\n // Update if content has changed\n if (existingSession.messageCount !== messages.length) {\n updateSession(sessionId, {\n title,\n summary,\n rawData,\n messages,\n isSidechain: metadata.isSidechain,\n isAutomated,\n projectPath: projectPath || undefined,\n });\n }\n } else {\n // Create new session\n isNew = true;\n sessionId = createSession({\n title,\n summary,\n projectPath,\n sourceFile: filePath,\n rawData,\n messages,\n isSidechain: metadata.isSidechain,\n isAutomated,\n });\n }\n\n // Handle embedding (include agent messages for better search)\n if (embeddingsReady) {\n const shouldEmbed = needsReembedding(sessionId, contentLength, embedThreshold);\n\n if (shouldEmbed) {\n try {\n // Use all messages including agents for richer embeddings\n const embeddingText = createEmbeddingText(title, summary, allMessages);\n const embedding = await getEmbedding(embeddingText);\n storeEmbedding(sessionId, embedding);\n updateEmbeddingState(sessionId, contentLength, fileMtime);\n } catch {\n // Embedding failed, continue without it\n }\n }\n }\n\n // Backup the JSONL file to cmem's storage (conversations never lost)\n backupSessionFile(filePath);\n\n return { sessionId, isNew };\n } catch {\n return null;\n }\n}\n\nasync function main() {\n try {\n const stdinData = await readStdin();\n const input: HookInput = stdinData ? JSON.parse(stdinData) : {};\n\n // Find session file\n let sessionFile: string | null = null;\n\n if (input.transcript_path && existsSync(input.transcript_path)) {\n sessionFile = input.transcript_path;\n } else if (input.session_id && input.cwd) {\n sessionFile = findSessionFile(input.session_id, input.cwd);\n }\n\n if (!sessionFile) {\n process.exit(0);\n }\n\n // Initialize embeddings if possible\n let embeddingsReady = false;\n if (isReady()) {\n embeddingsReady = true;\n } else {\n try {\n await initializeEmbeddings();\n embeddingsReady = isReady();\n } catch {\n // Continue without embeddings\n }\n }\n\n // Process the session file\n const result = await processSessionFile(sessionFile, embeddingsReady);\n\n if (result) {\n // Get project path for synthesis queue\n const projectPath = getProjectPathFromIndex(sessionFile, result.sessionId) || input.cwd || '';\n\n // Queue for synthesis (will be processed later)\n try {\n queueForSynthesis(result.sessionId, projectPath);\n // Spawn background process to synthesize lessons\n spawnBackgroundSynthesis();\n } catch {\n // Synthesis queue may not exist yet, that's okay\n }\n }\n\n // Clear lesson injection cache for this session\n // On Stop: session is ending, cache doesn't matter\n // On PreCompact: context will be lost, lessons need to be re-injected\n if (input.session_id) {\n try {\n clearSessionInjections(input.session_id);\n } catch {\n // Ignore errors clearing cache\n }\n }\n\n process.exit(0);\n } catch {\n process.exit(1);\n }\n}\n\nmain();\n","import { readFileSync, readdirSync, existsSync, statSync } from 'fs';\nimport { join, basename, dirname } from 'path';\nimport { CLAUDE_PROJECTS_DIR, CLAUDE_SESSIONS_DIR } from '../utils/config.js';\n\nexport interface ParsedMessage {\n role: 'user' | 'assistant';\n content: string;\n timestamp: string;\n}\n\nexport interface SessionMetadata {\n isSidechain: boolean;\n isMeta: boolean;\n}\n\n// Patterns that indicate automated/system sessions based on title\nconst AUTOMATED_TITLE_PATTERNS = [\n /^<[a-z-]+>/i, // XML-like tags: <project-instructions>, <command-message>, etc.\n /^<[A-Z_]+>/, // Uppercase tags: <SYSTEM>, <TOOL_USE>, etc.\n /^\\[system\\]/i, // [system] prefix\n /^\\/[a-z]+$/i, // Slash commands: /init, /help, etc.\n];\n\n/**\n * Check if a title/first message indicates an automated session\n */\nexport function isAutomatedByContent(title: string): boolean {\n for (const pattern of AUTOMATED_TITLE_PATTERNS) {\n if (pattern.test(title.trim())) {\n return true;\n }\n }\n return false;\n}\n\nexport interface ParsedSession {\n filePath: string;\n projectPath: string | null;\n messages: ParsedMessage[];\n rawData: string;\n modifiedAt: Date;\n agentMessages?: ParsedMessage[]; // Messages from subagents linked to this session\n}\n\ninterface SessionIndexEntry {\n sessionId: string;\n fullPath: string;\n fileMtime: number;\n firstPrompt: string;\n messageCount: number;\n created: string;\n modified: string;\n gitBranch?: string;\n projectPath: string;\n isSidechain: boolean;\n}\n\ninterface SessionsIndex {\n version: number;\n entries: SessionIndexEntry[];\n}\n\n/**\n * Extract session metadata from JSONL file (isSidechain, agentId, isMeta, etc.)\n * Only uses reliable JSONL metadata fields, not content-based heuristics\n */\nexport function extractSessionMetadata(filepath: string): SessionMetadata {\n const content = readFileSync(filepath, 'utf-8');\n\n const metadata: SessionMetadata = {\n isSidechain: false,\n isMeta: false,\n };\n\n // Find the first user message to check metadata\n for (const line of content.split('\\n')) {\n if (!line.trim()) continue;\n\n try {\n const parsed = JSON.parse(line);\n\n // Look for the first user message\n if (parsed.type === 'user' && parsed.message) {\n // Check isSidechain field (present in all Claude Code JSONL entries)\n // This is true for subagent/automated sessions\n if (parsed.isSidechain === true) {\n metadata.isSidechain = true;\n }\n\n // Check if this has an agentId (subagent session)\n if (parsed.agentId) {\n metadata.isSidechain = true;\n }\n\n // Check isMeta field - true for local command sessions\n if (parsed.isMeta === true) {\n metadata.isMeta = true;\n }\n\n break; // Only need first user message\n }\n } catch {\n // Skip malformed lines\n }\n }\n\n return metadata;\n}\n\n/**\n * Parse a Claude Code session JSONL file\n */\nexport function parseSessionFile(filepath: string): ParsedMessage[] {\n const content = readFileSync(filepath, 'utf-8');\n const messages: ParsedMessage[] = [];\n\n for (const line of content.split('\\n')) {\n if (!line.trim()) continue;\n\n try {\n const parsed = JSON.parse(line);\n\n // Handle different Claude Code JSONL formats\n if ((parsed.type === 'user' || parsed.type === 'assistant') && parsed.message) {\n // Claude Code format: type is 'user'/'assistant', message contains the actual content\n const msg = parsed.message;\n if (msg.role && msg.content) {\n const content = Array.isArray(msg.content)\n ? msg.content\n .filter((c: { type: string }) => c.type === 'text')\n .map((c: { text: string }) => c.text)\n .join('\\n')\n : typeof msg.content === 'string'\n ? msg.content\n : JSON.stringify(msg.content);\n\n if (content) {\n messages.push({\n role: msg.role as 'user' | 'assistant',\n content,\n timestamp: parsed.timestamp || new Date().toISOString(),\n });\n }\n }\n } else if (parsed.type === 'message' && parsed.role && parsed.content) {\n // Standard message format\n messages.push({\n role: parsed.role as 'user' | 'assistant',\n content: typeof parsed.content === 'string'\n ? parsed.content\n : JSON.stringify(parsed.content),\n timestamp: parsed.timestamp || new Date().toISOString(),\n });\n } else if (parsed.role && parsed.content && !parsed.type) {\n // Simplified format without type field\n messages.push({\n role: parsed.role as 'user' | 'assistant',\n content: typeof parsed.content === 'string'\n ? parsed.content\n : JSON.stringify(parsed.content),\n timestamp: parsed.timestamp || new Date().toISOString(),\n });\n }\n // Skip tool_use, tool_result, and other non-message entries\n } catch {\n // Skip malformed lines\n }\n }\n\n return messages;\n}\n\n/**\n * Find all Claude Code session files (excluding subagent sessions)\n */\nexport function findSessionFiles(): ParsedSession[] {\n const sessions: ParsedSession[] = [];\n\n // Search in ~/.claude/projects/<hash>/\n if (existsSync(CLAUDE_PROJECTS_DIR)) {\n const projectDirs = readdirSync(CLAUDE_PROJECTS_DIR);\n for (const projectDir of projectDirs) {\n const projectDirPath = join(CLAUDE_PROJECTS_DIR, projectDir);\n const stat = statSync(projectDirPath);\n if (!stat.isDirectory()) continue;\n\n // Try to read sessions-index.json for metadata\n const indexPath = join(projectDirPath, 'sessions-index.json');\n const sessionIndex = loadSessionIndex(indexPath);\n\n // Build a map of session IDs to their metadata\n const indexedSessions = new Map<string, SessionIndexEntry>();\n if (sessionIndex) {\n for (const entry of sessionIndex.entries) {\n indexedSessions.set(entry.sessionId, entry);\n }\n }\n\n // Only load .jsonl files directly in the project directory (not in subfolders)\n const entries = readdirSync(projectDirPath, { withFileTypes: true });\n for (const entry of entries) {\n if (!entry.isFile() || !entry.name.endsWith('.jsonl')) continue;\n\n const filePath = join(projectDirPath, entry.name);\n const sessionId = entry.name.replace('.jsonl', '');\n\n try {\n const session = loadSession(filePath);\n if (session) {\n // Use metadata from sessions-index.json if available\n const indexEntry = indexedSessions.get(sessionId);\n if (indexEntry) {\n session.projectPath = indexEntry.projectPath;\n }\n\n // Load any subagent messages for this session\n const agentMessages = loadSubagentMessages(projectDirPath, sessionId);\n if (agentMessages.length > 0) {\n session.agentMessages = agentMessages;\n }\n\n sessions.push(session);\n }\n } catch {\n // Skip unreadable files\n }\n }\n }\n }\n\n // Search in ~/.claude/sessions/ (no subagents here)\n if (existsSync(CLAUDE_SESSIONS_DIR)) {\n const sessionFiles = readdirSync(CLAUDE_SESSIONS_DIR).filter(f => f.endsWith('.jsonl'));\n for (const sessionFile of sessionFiles) {\n const filePath = join(CLAUDE_SESSIONS_DIR, sessionFile);\n try {\n const session = loadSession(filePath);\n if (session) {\n sessions.push(session);\n }\n } catch {\n // Skip unreadable files\n }\n }\n }\n\n // Sort by modification time (newest first)\n sessions.sort((a, b) => b.modifiedAt.getTime() - a.modifiedAt.getTime());\n\n return sessions;\n}\n\n/**\n * Load sessions-index.json if it exists\n */\nfunction loadSessionIndex(indexPath: string): SessionsIndex | null {\n if (!existsSync(indexPath)) return null;\n\n try {\n const content = readFileSync(indexPath, 'utf-8');\n return JSON.parse(content) as SessionsIndex;\n } catch {\n return null;\n }\n}\n\n/**\n * Load messages from all subagent sessions for a parent session\n */\nfunction loadSubagentMessages(projectDirPath: string, parentSessionId: string): ParsedMessage[] {\n const subagentsDir = join(projectDirPath, parentSessionId, 'subagents');\n if (!existsSync(subagentsDir)) return [];\n\n const messages: ParsedMessage[] = [];\n\n try {\n const agentFiles = readdirSync(subagentsDir).filter(f => f.endsWith('.jsonl'));\n for (const agentFile of agentFiles) {\n const agentPath = join(subagentsDir, agentFile);\n const agentMessages = parseSessionFile(agentPath);\n messages.push(...agentMessages);\n }\n } catch {\n // Ignore errors reading subagent files\n }\n\n return messages;\n}\n\n/**\n * Load a session from a file path\n */\nfunction loadSession(filePath: string): ParsedSession | null {\n const rawData = readFileSync(filePath, 'utf-8');\n const messages = parseSessionFile(filePath);\n\n // Skip empty sessions\n if (messages.length === 0) {\n return null;\n }\n\n const stats = statSync(filePath);\n\n return {\n filePath,\n projectPath: null,\n messages,\n rawData,\n modifiedAt: stats.mtime,\n };\n}\n\n\n/**\n * Get the most recent session file\n */\nexport function getMostRecentSession(): ParsedSession | null {\n const sessions = findSessionFiles();\n return sessions.length > 0 ? sessions[0] : null;\n}\n\n/**\n * Generate a summary from session messages\n */\nexport function generateSummary(messages: ParsedMessage[]): string {\n // Take first user message as the main topic\n const firstUserMessage = messages.find(m => m.role === 'user');\n if (!firstUserMessage) {\n return 'No user messages found';\n }\n\n // Truncate to reasonable summary length\n const summary = firstUserMessage.content.slice(0, 300);\n return summary.length < firstUserMessage.content.length ? summary + '...' : summary;\n}\n","import { homedir } from 'os';\nimport { join, basename, dirname } from 'path';\nimport { mkdirSync, existsSync, copyFileSync } from 'fs';\n\n// Data storage paths\nexport const CMEM_DIR = join(homedir(), '.cmem');\nexport const DB_PATH = join(CMEM_DIR, 'sessions.db');\nexport const MODELS_DIR = join(CMEM_DIR, 'models');\nexport const BACKUPS_DIR = join(CMEM_DIR, 'backups');\n\n// Claude Code paths\nexport const CLAUDE_DIR = join(homedir(), '.claude');\nexport const CLAUDE_PROJECTS_DIR = join(CLAUDE_DIR, 'projects');\nexport const CLAUDE_SESSIONS_DIR = join(CLAUDE_DIR, 'sessions');\n\n// Embedding model settings (transformers.js)\nexport const EMBEDDING_MODEL = 'nomic-ai/nomic-embed-text-v1.5';\nexport const EMBEDDING_DIMENSIONS = 768;\n\n// Text limits\nexport const MAX_EMBEDDING_CHARS = 8000;\nexport const MAX_MESSAGE_PREVIEW_CHARS = 500;\nexport const MAX_MESSAGES_FOR_CONTEXT = 20;\n\n// Database maintenance thresholds\nexport const DB_SIZE_ALERT_THRESHOLD = 5 * 1024 * 1024 * 1024; // 5GB\nexport const DEFAULT_PURGE_DAYS = 30;\n\n// Ensure cmem directory exists\nexport function ensureCmemDir(): void {\n if (!existsSync(CMEM_DIR)) {\n mkdirSync(CMEM_DIR, { recursive: true });\n }\n}\n\n// Ensure models directory exists\nexport function ensureModelsDir(): void {\n ensureCmemDir();\n if (!existsSync(MODELS_DIR)) {\n mkdirSync(MODELS_DIR, { recursive: true });\n }\n}\n\n// Ensure backups directory exists\nexport function ensureBackupsDir(): void {\n ensureCmemDir();\n if (!existsSync(BACKUPS_DIR)) {\n mkdirSync(BACKUPS_DIR, { recursive: true });\n }\n}\n\n/**\n * Get the backup path for a Claude session file\n * Mirrors the Claude directory structure: ~/.cmem/backups/<project-dir>/<session>.jsonl\n */\nexport function getBackupPath(sourceFile: string): string {\n const projectDir = basename(dirname(sourceFile));\n const sessionFile = basename(sourceFile);\n return join(BACKUPS_DIR, projectDir, sessionFile);\n}\n\n/**\n * Backup a session JSONL file\n * Creates the project directory in backups if needed\n */\nexport function backupSessionFile(sourceFile: string): boolean {\n try {\n if (!existsSync(sourceFile)) return false;\n\n ensureBackupsDir();\n const backupPath = getBackupPath(sourceFile);\n const backupDir = dirname(backupPath);\n\n if (!existsSync(backupDir)) {\n mkdirSync(backupDir, { recursive: true });\n }\n\n copyFileSync(sourceFile, backupPath);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Check if a backup exists for a session\n */\nexport function hasBackup(sourceFile: string): boolean {\n const backupPath = getBackupPath(sourceFile);\n return existsSync(backupPath);\n}\n\n/**\n * Restore a session from backup to Claude's directory\n * Returns true if restored successfully\n */\nexport function restoreFromBackup(sourceFile: string): boolean {\n try {\n const backupPath = getBackupPath(sourceFile);\n if (!existsSync(backupPath)) return false;\n\n const sourceDir = dirname(sourceFile);\n if (!existsSync(sourceDir)) {\n mkdirSync(sourceDir, { recursive: true });\n }\n\n copyFileSync(backupPath, sourceFile);\n return true;\n } catch {\n return false;\n }\n}\n","import Database from 'better-sqlite3';\nimport * as sqliteVec from 'sqlite-vec';\nimport { existsSync } from 'fs';\nimport { DB_PATH, ensureCmemDir, EMBEDDING_DIMENSIONS } from '../utils/config.js';\nimport { extractSessionMetadata } from '../parser/index.js';\n\nlet db: Database.Database | null = null;\n\n/**\n * Get or create the database connection\n */\nexport function getDatabase(): Database.Database {\n if (db) return db;\n\n ensureCmemDir();\n\n db = new Database(DB_PATH);\n db.pragma('journal_mode = WAL');\n\n // Load sqlite-vec extension\n sqliteVec.load(db);\n\n // Initialize schema\n initSchema(db);\n\n return db;\n}\n\n/**\n * Initialize database schema\n */\nfunction initSchema(database: Database.Database): void {\n // Migrations tracking table\n database.exec(`\n CREATE TABLE IF NOT EXISTS migrations (\n name TEXT PRIMARY KEY,\n applied_at TEXT NOT NULL\n );\n `);\n\n // Sessions table\n database.exec(`\n CREATE TABLE IF NOT EXISTS sessions (\n id TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n summary TEXT,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n message_count INTEGER DEFAULT 0,\n project_path TEXT,\n source_file TEXT,\n raw_data TEXT NOT NULL\n );\n `);\n\n // Add source_file column if it doesn't exist (migration)\n try {\n database.exec(`ALTER TABLE sessions ADD COLUMN source_file TEXT`);\n } catch {\n // Column already exists\n }\n\n // Add is_sidechain column if it doesn't exist (migration)\n try {\n database.exec(`ALTER TABLE sessions ADD COLUMN is_sidechain INTEGER DEFAULT 0`);\n } catch {\n // Column already exists\n }\n\n // Add is_automated column if it doesn't exist (migration)\n try {\n database.exec(`ALTER TABLE sessions ADD COLUMN is_automated INTEGER DEFAULT 0`);\n } catch {\n // Column already exists\n }\n\n // Add custom_title column if it doesn't exist (migration)\n // Used for user-renamed sessions\n try {\n database.exec(`ALTER TABLE sessions ADD COLUMN custom_title TEXT`);\n } catch {\n // Column already exists\n }\n\n // Messages table\n database.exec(`\n CREATE TABLE IF NOT EXISTS messages (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL,\n role TEXT NOT NULL CHECK (role IN ('user', 'assistant')),\n content TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE\n );\n `);\n\n // Embedding state tracking for incremental updates\n database.exec(`\n CREATE TABLE IF NOT EXISTS embedding_state (\n session_id TEXT PRIMARY KEY,\n content_length INTEGER NOT NULL,\n file_mtime TEXT,\n last_embedded_at TEXT NOT NULL,\n FOREIGN KEY (session_id) REFERENCES sessions(id) ON DELETE CASCADE\n );\n `);\n\n // Vector embeddings table (768 dimensions for nomic-embed-text)\n database.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS session_embeddings USING vec0(\n session_id TEXT PRIMARY KEY,\n embedding FLOAT[${EMBEDDING_DIMENSIONS}]\n );\n `);\n\n // Favorites table for starred sessions\n database.exec(`\n CREATE TABLE IF NOT EXISTS favorites (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL CHECK (type IN ('session', 'folder')),\n value TEXT NOT NULL,\n created_at TEXT NOT NULL,\n UNIQUE(type, value)\n );\n `);\n\n // Project order table for manual sorting\n database.exec(`\n CREATE TABLE IF NOT EXISTS project_order (\n path TEXT PRIMARY KEY,\n sort_order INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n );\n `);\n\n // Indexes for sessions\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_messages_session ON messages(session_id);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_sessions_updated ON sessions(updated_at DESC);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_sessions_source ON sessions(source_file);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_favorites_type ON favorites(type);\n `);\n\n // ==================== LESSONS TABLES ====================\n\n // Lessons table\n database.exec(`\n CREATE TABLE IF NOT EXISTS lessons (\n id TEXT PRIMARY KEY,\n project_path TEXT NOT NULL,\n category TEXT NOT NULL CHECK (category IN (\n 'architecture_decision', 'anti_pattern', 'bug_pattern',\n 'project_convention', 'dependency_knowledge', 'domain_knowledge',\n 'workflow', 'other'\n )),\n title TEXT NOT NULL,\n trigger_context TEXT NOT NULL,\n insight TEXT NOT NULL,\n reasoning TEXT,\n confidence REAL DEFAULT 0.5,\n times_applied INTEGER DEFAULT 0,\n times_validated INTEGER DEFAULT 0,\n times_rejected INTEGER DEFAULT 0,\n source_session_id TEXT,\n source_type TEXT DEFAULT 'synthesized',\n archived INTEGER DEFAULT 0,\n created_at TEXT DEFAULT (datetime('now')),\n updated_at TEXT DEFAULT (datetime('now')),\n last_applied_at TEXT\n );\n `);\n\n // Lesson embeddings\n database.exec(`\n CREATE VIRTUAL TABLE IF NOT EXISTS lesson_embeddings USING vec0(\n lesson_id TEXT PRIMARY KEY,\n embedding FLOAT[${EMBEDDING_DIMENSIONS}]\n );\n `);\n\n // Lesson feedback\n database.exec(`\n CREATE TABLE IF NOT EXISTS lesson_feedback (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n lesson_id TEXT NOT NULL,\n session_id TEXT,\n feedback_type TEXT NOT NULL CHECK (feedback_type IN (\n 'validated', 'rejected', 'modified'\n )),\n comment TEXT,\n created_at TEXT DEFAULT (datetime('now')),\n FOREIGN KEY (lesson_id) REFERENCES lessons(id) ON DELETE CASCADE\n );\n `);\n\n // Synthesis queue\n database.exec(`\n CREATE TABLE IF NOT EXISTS synthesis_queue (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL UNIQUE,\n project_path TEXT NOT NULL,\n queued_at TEXT DEFAULT (datetime('now')),\n status TEXT DEFAULT 'pending',\n processed_at TEXT,\n lessons_created INTEGER DEFAULT 0,\n error TEXT\n );\n `);\n\n // Session injection cache - tracks which lessons have been injected per Claude session\n // Prevents re-injecting the same lessons in a conversation\n database.exec(`\n CREATE TABLE IF NOT EXISTS session_injections (\n session_id TEXT NOT NULL,\n lesson_id TEXT NOT NULL,\n injected_at TEXT DEFAULT (datetime('now')),\n PRIMARY KEY (session_id, lesson_id)\n );\n `);\n\n // Indexes for lessons\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_lessons_project ON lessons(project_path);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_lessons_category ON lessons(category);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_lessons_confidence ON lessons(confidence DESC);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_lessons_archived ON lessons(archived);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_synthesis_queue_status ON synthesis_queue(status);\n `);\n database.exec(`\n CREATE INDEX IF NOT EXISTS idx_session_injections_session ON session_injections(session_id);\n `);\n\n // Run data migrations\n runMigrations(database);\n}\n\n/**\n * Run data migrations (separate from schema migrations)\n */\nfunction runMigrations(database: Database.Database): void {\n // Migration: Populate is_sidechain and is_automated from JSONL metadata\n const migrationName = 'populate_session_metadata_v1';\n\n const existing = database.prepare(\n 'SELECT 1 FROM migrations WHERE name = ?'\n ).get(migrationName);\n\n if (existing) return; // Already applied\n\n // Get all sessions with source files\n const sessions = database.prepare(`\n SELECT id, source_file FROM sessions WHERE source_file IS NOT NULL\n `).all() as { id: string; source_file: string }[];\n\n const updateStmt = database.prepare(`\n UPDATE sessions SET is_sidechain = ?, is_automated = ? WHERE id = ?\n `);\n\n const transaction = database.transaction(() => {\n for (const session of sessions) {\n if (!existsSync(session.source_file)) continue;\n\n try {\n const metadata = extractSessionMetadata(session.source_file);\n const isAutomated = metadata.isSidechain || metadata.isMeta;\n updateStmt.run(\n metadata.isSidechain ? 1 : 0,\n isAutomated ? 1 : 0,\n session.id\n );\n } catch {\n // Skip files that can't be parsed\n }\n }\n\n // Mark migration as complete\n database.prepare(\n 'INSERT INTO migrations (name, applied_at) VALUES (?, ?)'\n ).run(migrationName, new Date().toISOString());\n });\n\n transaction();\n}\n\n/**\n * Close the database connection\n */\nexport function closeDatabase(): void {\n if (db) {\n db.close();\n db = null;\n }\n}\n","import { getDatabase } from './index.js';\nimport { randomUUID } from 'crypto';\n\nexport interface Session {\n id: string;\n title: string;\n customTitle: string | null;\n summary: string | null;\n createdAt: string;\n updatedAt: string;\n messageCount: number;\n projectPath: string | null;\n sourceFile: string | null;\n rawData: string;\n isSidechain: boolean;\n isAutomated: boolean;\n}\n\nexport interface Message {\n id: number;\n sessionId: string;\n role: 'user' | 'assistant';\n content: string;\n timestamp: string;\n}\n\nexport interface SessionInput {\n title: string;\n summary?: string;\n projectPath?: string;\n sourceFile?: string;\n rawData: string;\n messages: Omit<Message, 'id' | 'sessionId'>[];\n isSidechain?: boolean;\n isAutomated?: boolean;\n}\n\nexport interface EmbeddingState {\n sessionId: string;\n contentLength: number;\n fileMtime: string | null;\n lastEmbeddedAt: string;\n}\n\n/**\n * Create a new session\n */\nexport function createSession(input: SessionInput): string {\n const db = getDatabase();\n const id = randomUUID();\n const now = new Date().toISOString();\n\n const insertSession = db.prepare(`\n INSERT INTO sessions (id, title, summary, created_at, updated_at, message_count, project_path, source_file, raw_data, is_sidechain, is_automated)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const insertMessage = db.prepare(`\n INSERT INTO messages (session_id, role, content, timestamp)\n VALUES (?, ?, ?, ?)\n `);\n\n const transaction = db.transaction(() => {\n insertSession.run(\n id,\n input.title,\n input.summary || null,\n now,\n now,\n input.messages.length,\n input.projectPath || null,\n input.sourceFile || null,\n input.rawData,\n input.isSidechain ? 1 : 0,\n input.isAutomated ? 1 : 0\n );\n\n for (const msg of input.messages) {\n insertMessage.run(id, msg.role, msg.content, msg.timestamp);\n }\n });\n\n transaction();\n return id;\n}\n\n/**\n * Update an existing session with new data\n */\nexport function updateSession(id: string, input: Partial<SessionInput>): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const updates: string[] = ['updated_at = ?'];\n const values: unknown[] = [now];\n\n if (input.title !== undefined) {\n updates.push('title = ?');\n values.push(input.title);\n }\n if (input.summary !== undefined) {\n updates.push('summary = ?');\n values.push(input.summary);\n }\n if (input.rawData !== undefined) {\n updates.push('raw_data = ?');\n values.push(input.rawData);\n }\n if (input.messages !== undefined) {\n updates.push('message_count = ?');\n values.push(input.messages.length);\n }\n if (input.isSidechain !== undefined) {\n updates.push('is_sidechain = ?');\n values.push(input.isSidechain ? 1 : 0);\n }\n if (input.isAutomated !== undefined) {\n updates.push('is_automated = ?');\n values.push(input.isAutomated ? 1 : 0);\n }\n if (input.projectPath !== undefined) {\n updates.push('project_path = ?');\n values.push(input.projectPath);\n }\n\n values.push(id);\n\n const transaction = db.transaction(() => {\n db.prepare(`UPDATE sessions SET ${updates.join(', ')} WHERE id = ?`).run(...values);\n\n // If messages provided, replace them\n if (input.messages !== undefined) {\n db.prepare('DELETE FROM messages WHERE session_id = ?').run(id);\n const insertMessage = db.prepare(`\n INSERT INTO messages (session_id, role, content, timestamp)\n VALUES (?, ?, ?, ?)\n `);\n for (const msg of input.messages) {\n insertMessage.run(id, msg.role, msg.content, msg.timestamp);\n }\n }\n });\n\n transaction();\n}\n\n/**\n * Get a session by ID\n */\nexport function getSession(id: string): Session | null {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions WHERE id = ?\n `).get(id) as SessionRow | undefined;\n\n if (!row) return null;\n return mapSessionRow(row);\n}\n\n/**\n * Get a session by ID prefix (for partial ID matching)\n */\nexport function getSessionByIdPrefix(idPrefix: string): Session | null {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions WHERE id LIKE ? || '%'\n LIMIT 1\n `).get(idPrefix) as SessionRow | undefined;\n\n if (!row) return null;\n return mapSessionRow(row);\n}\n\n/**\n * Get a session by source file path\n */\nexport function getSessionBySourceFile(sourceFile: string): Session | null {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions WHERE source_file = ?\n `).get(sourceFile) as SessionRow | undefined;\n\n if (!row) return null;\n return mapSessionRow(row);\n}\n\n/**\n * Get messages for a session\n */\nexport function getSessionMessages(sessionId: string): Message[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT id, session_id as sessionId, role, content, timestamp\n FROM messages WHERE session_id = ?\n ORDER BY timestamp ASC\n `).all(sessionId) as Message[];\n\n return rows;\n}\n\ntype SessionRow = {\n isSidechain: number;\n isAutomated: number;\n customTitle: string | null;\n} & Omit<Session, 'isSidechain' | 'isAutomated' | 'customTitle'>;\n\nfunction mapSessionRow(row: SessionRow): Session {\n return {\n ...row,\n customTitle: row.customTitle,\n isSidechain: row.isSidechain === 1,\n isAutomated: row.isAutomated === 1,\n };\n}\n\n/**\n * List all sessions ordered by updated_at desc\n */\nexport function listSessions(limit = 100): Session[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions\n ORDER BY updated_at DESC\n LIMIT ?\n `).all(limit) as SessionRow[];\n\n return rows.map(mapSessionRow);\n}\n\n/**\n * Check if a session appears to be an automated/system session rather than a real human conversation\n * Uses metadata stored in the database (isSidechain, isAutomated) which was extracted from the JSONL\n */\nexport function isAutomatedSession(session: Session): boolean {\n // Use the metadata fields from the database\n if (session.isSidechain || session.isAutomated) {\n return true;\n }\n\n return false;\n}\n\n/**\n * List human sessions (filters out automated/system sessions)\n * Filters directly in SQL for better performance\n */\nexport function listHumanSessions(limit = 100): Session[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions\n WHERE is_sidechain = 0 AND is_automated = 0\n ORDER BY updated_at DESC\n LIMIT ?\n `).all(limit) as SessionRow[];\n\n return rows.map(mapSessionRow);\n}\n\n/**\n * List human sessions filtered by project path\n * Uses LIKE to match sessions where the project_path starts with the given path\n */\nexport function listHumanSessionsByProject(projectPath: string, limit = 100): Session[] {\n const db = getDatabase();\n // Use LIKE to match sessions in this folder or subfolders\n const rows = db.prepare(`\n SELECT id, title, custom_title as customTitle, summary,\n created_at as createdAt, updated_at as updatedAt,\n message_count as messageCount, project_path as projectPath,\n source_file as sourceFile, raw_data as rawData,\n is_sidechain as isSidechain, is_automated as isAutomated\n FROM sessions\n WHERE is_sidechain = 0 AND is_automated = 0\n AND project_path LIKE ? || '%'\n ORDER BY updated_at DESC\n LIMIT ?\n `).all(projectPath, limit) as SessionRow[];\n\n return rows.map(mapSessionRow);\n}\n\n/**\n * Delete a session\n */\nexport function deleteSession(id: string): boolean {\n const db = getDatabase();\n\n const transaction = db.transaction(() => {\n // Delete from vector table first\n db.prepare('DELETE FROM session_embeddings WHERE session_id = ?').run(id);\n // Delete embedding state\n db.prepare('DELETE FROM embedding_state WHERE session_id = ?').run(id);\n // Delete messages (cascade should handle this, but explicit is safer)\n db.prepare('DELETE FROM messages WHERE session_id = ?').run(id);\n // Delete session\n const result = db.prepare('DELETE FROM sessions WHERE id = ?').run(id);\n return result.changes > 0;\n });\n\n return transaction();\n}\n\n/**\n * Update session summary\n */\nexport function updateSessionSummary(id: string, summary: string): void {\n const db = getDatabase();\n db.prepare(`\n UPDATE sessions SET summary = ?, updated_at = ? WHERE id = ?\n `).run(summary, new Date().toISOString(), id);\n}\n\n/**\n * Rename a session (set custom title)\n */\nexport function renameSession(id: string, customTitle: string | null): void {\n const db = getDatabase();\n db.prepare(`\n UPDATE sessions SET custom_title = ?, updated_at = ? WHERE id = ?\n `).run(customTitle, new Date().toISOString(), id);\n}\n\n/**\n * Get storage statistics\n */\nexport function getStats(): {\n sessionCount: number;\n messageCount: number;\n embeddingCount: number;\n} {\n const db = getDatabase();\n\n const sessionCount = (db.prepare('SELECT COUNT(*) as count FROM sessions').get() as { count: number }).count;\n const messageCount = (db.prepare('SELECT COUNT(*) as count FROM messages').get() as { count: number }).count;\n const embeddingCount = (db.prepare('SELECT COUNT(*) as count FROM session_embeddings').get() as { count: number }).count;\n\n return { sessionCount, messageCount, embeddingCount };\n}\n\n/**\n * Check if a session exists by its raw data hash (to avoid duplicates)\n */\nexport function sessionExists(rawData: string): boolean {\n const db = getDatabase();\n const row = db.prepare('SELECT 1 FROM sessions WHERE raw_data = ? LIMIT 1').get(rawData);\n return !!row;\n}\n\n/**\n * Get embedding state for a session\n */\nexport function getEmbeddingState(sessionId: string): EmbeddingState | null {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT session_id as sessionId, content_length as contentLength,\n file_mtime as fileMtime, last_embedded_at as lastEmbeddedAt\n FROM embedding_state WHERE session_id = ?\n `).get(sessionId) as EmbeddingState | undefined;\n\n return row || null;\n}\n\n/**\n * Update embedding state for a session\n */\nexport function updateEmbeddingState(\n sessionId: string,\n contentLength: number,\n fileMtime?: string\n): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n INSERT OR REPLACE INTO embedding_state (session_id, content_length, file_mtime, last_embedded_at)\n VALUES (?, ?, ?, ?)\n `).run(sessionId, contentLength, fileMtime || null, now);\n}\n\n/**\n * Check if session needs re-embedding based on content growth\n * Returns true if content has grown by more than threshold chars\n */\nexport function needsReembedding(sessionId: string, currentContentLength: number, threshold = 500): boolean {\n const state = getEmbeddingState(sessionId);\n if (!state) return true; // Never embedded\n return currentContentLength - state.contentLength >= threshold;\n}\n\n/**\n * Get all sessions that need embedding\n */\nexport function getSessionsNeedingEmbedding(): Session[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT s.id, s.title, s.custom_title as customTitle, s.summary,\n s.created_at as createdAt, s.updated_at as updatedAt,\n s.message_count as messageCount, s.project_path as projectPath,\n s.source_file as sourceFile, s.raw_data as rawData,\n s.is_sidechain as isSidechain, s.is_automated as isAutomated\n FROM sessions s\n LEFT JOIN embedding_state e ON s.id = e.session_id\n WHERE e.session_id IS NULL\n ORDER BY s.updated_at DESC\n `).all() as SessionRow[];\n\n return rows.map(mapSessionRow);\n}\n\n// ==================== FAVORITES ====================\n\nexport type FavoriteType = 'session' | 'folder';\n\nexport interface Favorite {\n id: number;\n type: FavoriteType;\n value: string;\n createdAt: string;\n}\n\n/**\n * Add a favorite (session or folder)\n */\nexport function addFavorite(type: FavoriteType, value: string): boolean {\n const db = getDatabase();\n try {\n db.prepare(`\n INSERT OR IGNORE INTO favorites (type, value, created_at)\n VALUES (?, ?, ?)\n `).run(type, value, new Date().toISOString());\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Remove a favorite\n */\nexport function removeFavorite(type: FavoriteType, value: string): boolean {\n const db = getDatabase();\n const result = db.prepare(`\n DELETE FROM favorites WHERE type = ? AND value = ?\n `).run(type, value);\n return result.changes > 0;\n}\n\n/**\n * Toggle a favorite (add if not exists, remove if exists)\n */\nexport function toggleFavorite(type: FavoriteType, value: string): boolean {\n if (isFavorite(type, value)) {\n removeFavorite(type, value);\n return false; // No longer a favorite\n } else {\n addFavorite(type, value);\n return true; // Now a favorite\n }\n}\n\n/**\n * Check if something is a favorite\n */\nexport function isFavorite(type: FavoriteType, value: string): boolean {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT 1 FROM favorites WHERE type = ? AND value = ?\n `).get(type, value);\n return !!row;\n}\n\n/**\n * Get all favorites of a type\n */\nexport function getFavorites(type: FavoriteType): Favorite[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT id, type, value, created_at as createdAt\n FROM favorites\n WHERE type = ?\n ORDER BY created_at DESC\n `).all(type) as Favorite[];\n return rows;\n}\n\n/**\n * Get all favorite session IDs\n */\nexport function getFavoriteSessionIds(): Set<string> {\n const favorites = getFavorites('session');\n return new Set(favorites.map(f => f.value));\n}\n\n/**\n * Get all favorite folder paths\n */\nexport function getFavoriteFolderPaths(): Set<string> {\n const favorites = getFavorites('folder');\n return new Set(favorites.map(f => f.value));\n}\n\n/**\n * Check if there are any favorite folders\n */\nexport function hasFavoriteFolders(): boolean {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT 1 FROM favorites WHERE type = 'folder' LIMIT 1\n `).get();\n return !!row;\n}\n\n/**\n * Check if there are any favorite sessions\n */\nexport function hasFavoriteSessions(): boolean {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT 1 FROM favorites WHERE type = 'session' LIMIT 1\n `).get();\n return !!row;\n}\n\n// ==================== PROJECT ORDER ====================\n\nexport interface ProjectOrder {\n path: string;\n sortOrder: number;\n updatedAt: string;\n}\n\n/**\n * Get the sort order for all projects\n * Returns a map of project path -> sort order\n */\nexport function getProjectOrders(): Map<string, number> {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT path, sort_order as sortOrder\n FROM project_order\n ORDER BY sort_order ASC\n `).all() as { path: string; sortOrder: number }[];\n\n const map = new Map<string, number>();\n for (const row of rows) {\n map.set(row.path, row.sortOrder);\n }\n return map;\n}\n\n/**\n * Set the sort order for a project\n */\nexport function setProjectOrder(path: string, sortOrder: number): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n INSERT OR REPLACE INTO project_order (path, sort_order, updated_at)\n VALUES (?, ?, ?)\n `).run(path, sortOrder, now);\n}\n\n/**\n * Update project orders for multiple projects at once (for reordering)\n */\nexport function updateProjectOrders(orders: { path: string; sortOrder: number }[]): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const stmt = db.prepare(`\n INSERT OR REPLACE INTO project_order (path, sort_order, updated_at)\n VALUES (?, ?, ?)\n `);\n\n const transaction = db.transaction(() => {\n for (const order of orders) {\n stmt.run(order.path, order.sortOrder, now);\n }\n });\n\n transaction();\n}\n\n/**\n * Remove project order (revert to default sorting for this project)\n */\nexport function removeProjectOrder(path: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM project_order WHERE path = ?').run(path);\n}\n\n/**\n * Check if any custom project orders exist\n */\nexport function hasCustomProjectOrder(): boolean {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT 1 FROM project_order LIMIT 1\n `).get();\n return !!row;\n}\n","import { getDatabase } from './index.js';\nimport type { Session } from './sessions.js';\n\n/**\n * Store an embedding for a session\n */\nexport function storeEmbedding(sessionId: string, embedding: number[]): void {\n const db = getDatabase();\n\n // sqlite-vec expects JSON array format\n const embeddingJson = JSON.stringify(embedding);\n\n db.prepare(`\n INSERT OR REPLACE INTO session_embeddings (session_id, embedding)\n VALUES (?, ?)\n `).run(sessionId, embeddingJson);\n}\n\n/**\n * Search for similar sessions using vector similarity\n */\nexport function searchSimilar(queryEmbedding: number[], limit = 10): Array<{ sessionId: string; distance: number }> {\n const db = getDatabase();\n\n const embeddingJson = JSON.stringify(queryEmbedding);\n\n // Use vec_distance_L2 for Euclidean distance (lower = more similar)\n const rows = db.prepare(`\n SELECT session_id as sessionId, vec_distance_L2(embedding, ?) as distance\n FROM session_embeddings\n ORDER BY distance ASC\n LIMIT ?\n `).all(embeddingJson, limit) as Array<{ sessionId: string; distance: number }>;\n\n return rows;\n}\n\n/**\n * Search sessions and return full session objects\n */\nexport function searchSessions(queryEmbedding: number[], limit = 10): Session[] {\n const db = getDatabase();\n\n const embeddingJson = JSON.stringify(queryEmbedding);\n\n const rows = db.prepare(`\n SELECT s.id, s.title, s.summary, s.created_at as createdAt, s.updated_at as updatedAt,\n s.message_count as messageCount, s.project_path as projectPath, s.raw_data as rawData,\n vec_distance_L2(e.embedding, ?) as distance\n FROM session_embeddings e\n JOIN sessions s ON e.session_id = s.id\n ORDER BY distance ASC\n LIMIT ?\n `).all(embeddingJson, limit) as Session[];\n\n return rows;\n}\n\n/**\n * Delete embedding for a session\n */\nexport function deleteEmbedding(sessionId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM session_embeddings WHERE session_id = ?').run(sessionId);\n}\n\n/**\n * Check if session has an embedding\n */\nexport function hasEmbedding(sessionId: string): boolean {\n const db = getDatabase();\n const row = db.prepare('SELECT 1 FROM session_embeddings WHERE session_id = ? LIMIT 1').get(sessionId);\n return !!row;\n}\n","import { existsSync } from 'fs';\nimport { join } from 'path';\nimport {\n EMBEDDING_MODEL,\n MODELS_DIR,\n MAX_EMBEDDING_CHARS,\n MAX_MESSAGE_PREVIEW_CHARS,\n ensureModelsDir,\n} from '../utils/config.js';\n\n// Lazy load transformers.js\nlet transformersModule: typeof import('@xenova/transformers') | null = null;\nlet pipeline: any = null;\nlet initialized = false;\n\nasync function getTransformers() {\n if (!transformersModule) {\n transformersModule = await import('@xenova/transformers');\n }\n return transformersModule;\n}\n\nexport interface InitProgress {\n status: 'checking' | 'downloading' | 'loading' | 'ready';\n file?: string;\n progress?: number;\n}\n\n/**\n * Check if the model is already cached locally\n */\nexport function isModelCached(): boolean {\n // Transformers.js stores models at models/org/model-name/\n const modelCachePath = join(MODELS_DIR, EMBEDDING_MODEL);\n return existsSync(modelCachePath);\n}\n\n/**\n * Initialize the embedding pipeline\n * Downloads the model on first use with progress callback\n */\nexport async function initializeEmbeddings(\n onProgress?: (progress: InitProgress) => void\n): Promise<void> {\n if (initialized && pipeline) {\n return;\n }\n\n ensureModelsDir();\n\n const { pipeline: createPipeline, env } = await getTransformers();\n\n // Configure cache directory\n env.cacheDir = MODELS_DIR;\n\n // Check if model is cached\n const cached = isModelCached();\n if (cached) {\n env.allowRemoteModels = false; // Use local cache only\n onProgress?.({ status: 'loading' });\n } else {\n onProgress?.({ status: 'downloading' });\n }\n\n // Create the pipeline with progress callback\n pipeline = await createPipeline('feature-extraction', EMBEDDING_MODEL, {\n progress_callback: onProgress\n ? (progress: { status: string; file?: string; progress?: number }) => {\n if (progress.status === 'progress' && progress.file && progress.progress !== undefined) {\n onProgress({\n status: 'downloading',\n file: progress.file,\n progress: progress.progress,\n });\n }\n }\n : undefined,\n });\n\n initialized = true;\n onProgress?.({ status: 'ready' });\n}\n\n/**\n * Check if embeddings are ready to use\n */\nexport function isReady(): boolean {\n return initialized && pipeline !== null;\n}\n\n/**\n * Generate an embedding vector for text\n */\nexport async function getEmbedding(text: string): Promise<number[]> {\n // Initialize if needed (will use cached model if available)\n if (!initialized) {\n await initializeEmbeddings();\n }\n\n // Truncate to max chars\n const truncated = text.slice(0, MAX_EMBEDDING_CHARS);\n\n // Generate embedding\n const output = await pipeline(truncated, {\n pooling: 'mean',\n normalize: true,\n });\n\n // Convert to regular array\n return Array.from(output.data);\n}\n\n/**\n * Create embedding text from session data\n * Combines title, summary, and key messages for semantic search\n */\nexport function createEmbeddingText(\n title: string,\n summary: string | undefined,\n messages: Array<{ role: string; content: string }>\n): string {\n const parts: string[] = [];\n\n // Add title\n parts.push(`Title: ${title}`);\n\n // Add summary if available\n if (summary) {\n parts.push(`Summary: ${summary}`);\n }\n\n // Add first 5 user messages (truncated)\n const userMessages = messages\n .filter((m) => m.role === 'user')\n .slice(0, 5)\n .map((m) => m.content.slice(0, MAX_MESSAGE_PREVIEW_CHARS));\n\n if (userMessages.length > 0) {\n parts.push('User messages:');\n parts.push(...userMessages);\n }\n\n // Add first 3 assistant responses (truncated) for context\n const assistantMessages = messages\n .filter((m) => m.role === 'assistant')\n .slice(0, 3)\n .map((m) => m.content.slice(0, MAX_MESSAGE_PREVIEW_CHARS));\n\n if (assistantMessages.length > 0) {\n parts.push('Assistant responses:');\n parts.push(...assistantMessages);\n }\n\n return parts.join('\\n\\n');\n}\n","/**\n * Format a timestamp into a human-readable relative time\n */\nexport function formatTimeAgo(timestamp: string): string {\n const date = new Date(timestamp);\n const now = new Date();\n const diffMs = now.getTime() - date.getTime();\n const diffSecs = Math.floor(diffMs / 1000);\n const diffMins = Math.floor(diffSecs / 60);\n const diffHours = Math.floor(diffMins / 60);\n const diffDays = Math.floor(diffHours / 24);\n const diffWeeks = Math.floor(diffDays / 7);\n const diffMonths = Math.floor(diffDays / 30);\n\n if (diffSecs < 60) return 'just now';\n if (diffMins < 60) return `${diffMins}m ago`;\n if (diffHours < 24) return `${diffHours}h ago`;\n if (diffDays < 7) return `${diffDays}d ago`;\n if (diffWeeks < 4) return `${diffWeeks}w ago`;\n return `${diffMonths}mo ago`;\n}\n\n/**\n * Truncate text to a maximum length with ellipsis\n */\nexport function truncate(text: string, maxLength: number): string {\n if (text.length <= maxLength) return text;\n return text.slice(0, maxLength - 3) + '...';\n}\n\n/**\n * Generate a short ID from a longer string\n */\nexport function shortId(id: string): string {\n return id.slice(0, 8);\n}\n\n/**\n * Format a number with thousands separator\n */\nexport function formatNumber(num: number): string {\n return num.toLocaleString();\n}\n\n/**\n * Format bytes into human-readable size\n */\nexport function formatBytes(bytes: number): string {\n if (bytes === 0) return '0 B';\n const k = 1024;\n const sizes = ['B', 'KB', 'MB', 'GB'];\n const i = Math.floor(Math.log(bytes) / Math.log(k));\n return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i];\n}\n\n/**\n * Generate a title from message content\n */\nexport function generateTitle(content: string): string {\n // Take first line or first 50 chars\n const firstLine = content.split('\\n')[0].trim();\n const title = truncate(firstLine, 50);\n return title || 'Untitled Session';\n}\n","/**\n * Lessons Database Operations\n *\n * CRUD operations for lessons and synthesis queue.\n */\n\nimport { randomUUID } from 'crypto';\nimport { getDatabase } from './index.js';\nimport type {\n Lesson,\n CreateLessonInput,\n UpdateLessonInput,\n LessonCategory,\n LessonSearchOptions,\n LessonFeedback,\n SynthesisQueueItem,\n} from '../learning/types.js';\n\n// ==================== LESSON ROW MAPPING ====================\n\ninterface LessonRow {\n id: string;\n project_path: string;\n category: LessonCategory;\n title: string;\n trigger_context: string;\n insight: string;\n reasoning: string | null;\n confidence: number;\n times_applied: number;\n times_validated: number;\n times_rejected: number;\n source_session_id: string | null;\n source_type: string;\n archived: number;\n created_at: string;\n updated_at: string;\n last_applied_at: string | null;\n}\n\nfunction mapLessonRow(row: LessonRow): Lesson {\n return {\n id: row.id,\n projectPath: row.project_path,\n category: row.category,\n title: row.title,\n triggerContext: row.trigger_context,\n insight: row.insight,\n reasoning: row.reasoning ?? undefined,\n confidence: row.confidence,\n timesApplied: row.times_applied,\n timesValidated: row.times_validated,\n timesRejected: row.times_rejected,\n sourceSessionId: row.source_session_id ?? undefined,\n sourceType: row.source_type as Lesson['sourceType'],\n archived: row.archived === 1,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n lastAppliedAt: row.last_applied_at ?? undefined,\n };\n}\n\n// ==================== LESSON CRUD ====================\n\n/**\n * Create a new lesson\n */\nexport function createLesson(input: CreateLessonInput): Lesson {\n const db = getDatabase();\n const id = randomUUID();\n const now = new Date().toISOString();\n\n db.prepare(`\n INSERT INTO lessons (\n id, project_path, category, title, trigger_context, insight,\n reasoning, confidence, source_session_id, source_type,\n created_at, updated_at\n )\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `).run(\n id,\n input.projectPath,\n input.category,\n input.title,\n input.triggerContext,\n input.insight,\n input.reasoning ?? null,\n input.confidence ?? 0.5,\n input.sourceSessionId ?? null,\n input.sourceType ?? 'synthesized',\n now,\n now\n );\n\n return getLesson(id)!;\n}\n\n/**\n * Update an existing lesson\n */\nexport function updateLesson(id: string, updates: UpdateLessonInput): Lesson | null {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const fields: string[] = ['updated_at = ?'];\n const values: unknown[] = [now];\n\n if (updates.category !== undefined) {\n fields.push('category = ?');\n values.push(updates.category);\n }\n if (updates.title !== undefined) {\n fields.push('title = ?');\n values.push(updates.title);\n }\n if (updates.triggerContext !== undefined) {\n fields.push('trigger_context = ?');\n values.push(updates.triggerContext);\n }\n if (updates.insight !== undefined) {\n fields.push('insight = ?');\n values.push(updates.insight);\n }\n if (updates.reasoning !== undefined) {\n fields.push('reasoning = ?');\n values.push(updates.reasoning);\n }\n if (updates.confidence !== undefined) {\n fields.push('confidence = ?');\n values.push(updates.confidence);\n }\n if (updates.archived !== undefined) {\n fields.push('archived = ?');\n values.push(updates.archived ? 1 : 0);\n }\n\n values.push(id);\n\n db.prepare(`\n UPDATE lessons SET ${fields.join(', ')} WHERE id = ?\n `).run(...values);\n\n return getLesson(id);\n}\n\n/**\n * Delete a lesson\n */\nexport function deleteLesson(id: string): boolean {\n const db = getDatabase();\n\n const transaction = db.transaction(() => {\n // Delete embedding\n db.prepare('DELETE FROM lesson_embeddings WHERE lesson_id = ?').run(id);\n // Delete feedback\n db.prepare('DELETE FROM lesson_feedback WHERE lesson_id = ?').run(id);\n // Delete lesson\n const result = db.prepare('DELETE FROM lessons WHERE id = ?').run(id);\n return result.changes > 0;\n });\n\n return transaction();\n}\n\n/**\n * Archive a lesson (soft delete)\n */\nexport function archiveLesson(id: string): void {\n const db = getDatabase();\n db.prepare(`\n UPDATE lessons SET archived = 1, updated_at = ? WHERE id = ?\n `).run(new Date().toISOString(), id);\n}\n\n/**\n * Unarchive a lesson\n */\nexport function unarchiveLesson(id: string): void {\n const db = getDatabase();\n db.prepare(`\n UPDATE lessons SET archived = 0, updated_at = ? WHERE id = ?\n `).run(new Date().toISOString(), id);\n}\n\n/**\n * Get a lesson by ID\n */\nexport function getLesson(id: string): Lesson | null {\n const db = getDatabase();\n const row = db.prepare(`\n SELECT * FROM lessons WHERE id = ?\n `).get(id) as LessonRow | undefined;\n\n return row ? mapLessonRow(row) : null;\n}\n\n/**\n * Get lessons by project path with filtering options\n */\nexport function getLessonsByProject(\n projectPath: string,\n options: LessonSearchOptions = {}\n): Lesson[] {\n const db = getDatabase();\n\n let query = 'SELECT * FROM lessons WHERE project_path = ?';\n const params: unknown[] = [projectPath];\n\n if (options.category) {\n query += ' AND category = ?';\n params.push(options.category);\n }\n\n if (options.archived !== undefined) {\n query += ' AND archived = ?';\n params.push(options.archived ? 1 : 0);\n } else {\n // Default to non-archived\n query += ' AND archived = 0';\n }\n\n if (options.minConfidence !== undefined) {\n query += ' AND confidence >= ?';\n params.push(options.minConfidence);\n }\n\n query += ' ORDER BY confidence DESC, times_applied DESC';\n\n if (options.limit) {\n query += ' LIMIT ?';\n params.push(options.limit);\n }\n\n const rows = db.prepare(query).all(...params) as LessonRow[];\n return rows.map(mapLessonRow);\n}\n\n/**\n * Get core lessons - high confidence, frequently validated\n */\nexport function getCoreLessons(projectPath: string, limit: number = 3): Lesson[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT * FROM lessons\n WHERE project_path = ?\n AND archived = 0\n AND confidence >= 0.7\n ORDER BY\n times_validated DESC,\n confidence DESC,\n times_applied DESC\n LIMIT ?\n `).all(projectPath, limit) as LessonRow[];\n\n return rows.map(mapLessonRow);\n}\n\n/**\n * Get all lessons (for admin/debugging)\n */\nexport function getAllLessons(options: LessonSearchOptions = {}): Lesson[] {\n const db = getDatabase();\n\n let query = 'SELECT * FROM lessons WHERE 1=1';\n const params: unknown[] = [];\n\n if (options.category) {\n query += ' AND category = ?';\n params.push(options.category);\n }\n\n if (options.archived !== undefined) {\n query += ' AND archived = ?';\n params.push(options.archived ? 1 : 0);\n }\n\n if (options.minConfidence !== undefined) {\n query += ' AND confidence >= ?';\n params.push(options.minConfidence);\n }\n\n query += ' ORDER BY updated_at DESC';\n\n if (options.limit) {\n query += ' LIMIT ?';\n params.push(options.limit);\n }\n\n const rows = db.prepare(query).all(...params) as LessonRow[];\n return rows.map(mapLessonRow);\n}\n\n// ==================== LESSON APPLICATION/FEEDBACK ====================\n\n/**\n * Record that a lesson was applied (shown to user)\n */\nexport function recordLessonApplication(id: string): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n UPDATE lessons\n SET times_applied = times_applied + 1,\n last_applied_at = ?,\n updated_at = ?\n WHERE id = ?\n `).run(now, now, id);\n}\n\n/**\n * Record validation feedback for a lesson\n */\nexport function recordLessonValidation(id: string, sessionId?: string, comment?: string): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const transaction = db.transaction(() => {\n // Update lesson counter\n db.prepare(`\n UPDATE lessons\n SET times_validated = times_validated + 1, updated_at = ?\n WHERE id = ?\n `).run(now, id);\n\n // Record feedback\n db.prepare(`\n INSERT INTO lesson_feedback (lesson_id, session_id, feedback_type, comment)\n VALUES (?, ?, 'validated', ?)\n `).run(id, sessionId ?? null, comment ?? null);\n });\n\n transaction();\n}\n\n/**\n * Record rejection feedback for a lesson\n */\nexport function recordLessonRejection(id: string, sessionId?: string, comment?: string): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const transaction = db.transaction(() => {\n // Update lesson counter\n db.prepare(`\n UPDATE lessons\n SET times_rejected = times_rejected + 1, updated_at = ?\n WHERE id = ?\n `).run(now, id);\n\n // Record feedback\n db.prepare(`\n INSERT INTO lesson_feedback (lesson_id, session_id, feedback_type, comment)\n VALUES (?, ?, 'rejected', ?)\n `).run(id, sessionId ?? null, comment ?? null);\n });\n\n transaction();\n}\n\n/**\n * Get feedback for a lesson\n */\nexport function getLessonFeedback(lessonId: string): LessonFeedback[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT\n id,\n lesson_id as lessonId,\n session_id as sessionId,\n feedback_type as feedbackType,\n comment,\n created_at as createdAt\n FROM lesson_feedback\n WHERE lesson_id = ?\n ORDER BY created_at DESC\n `).all(lessonId) as LessonFeedback[];\n\n return rows;\n}\n\n// ==================== LESSON EMBEDDINGS ====================\n\n/**\n * Store embedding for a lesson\n */\nexport function storeLessonEmbedding(lessonId: string, embedding: number[]): void {\n const db = getDatabase();\n\n // Delete existing embedding if any\n db.prepare('DELETE FROM lesson_embeddings WHERE lesson_id = ?').run(lessonId);\n\n // Insert new embedding\n db.prepare(`\n INSERT INTO lesson_embeddings (lesson_id, embedding)\n VALUES (?, ?)\n `).run(lessonId, JSON.stringify(embedding));\n}\n\n/**\n * Lesson with similarity distance (lower is more similar)\n */\nexport interface LessonWithDistance {\n lesson: Lesson;\n distance: number;\n}\n\n/**\n * Search lessons by embedding similarity\n */\nexport function searchLessonsByEmbedding(\n embedding: number[],\n projectPath: string,\n limit: number = 5\n): Lesson[] {\n return searchLessonsByEmbeddingWithDistance(embedding, projectPath, limit)\n .map(r => r.lesson);\n}\n\n/**\n * Search lessons by embedding similarity, returning distance scores\n * Distance is L2 (Euclidean) - lower is more similar\n */\nexport function searchLessonsByEmbeddingWithDistance(\n embedding: number[],\n projectPath: string,\n limit: number = 5\n): LessonWithDistance[] {\n const db = getDatabase();\n\n // Use sqlite-vec for vector similarity search\n const rows = db.prepare(`\n SELECT\n l.*,\n le.distance\n FROM lesson_embeddings le\n JOIN lessons l ON l.id = le.lesson_id\n WHERE l.project_path = ?\n AND l.archived = 0\n AND le.embedding MATCH ?\n ORDER BY le.distance ASC\n LIMIT ?\n `).all(projectPath, JSON.stringify(embedding), limit) as (LessonRow & { distance: number })[];\n\n return rows.map(row => ({\n lesson: mapLessonRow(row),\n distance: row.distance,\n }));\n}\n\n/**\n * Search lessons globally by embedding (across all projects)\n */\nexport function searchAllLessonsByEmbedding(\n embedding: number[],\n limit: number = 10\n): Lesson[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT\n l.*,\n le.distance\n FROM lesson_embeddings le\n JOIN lessons l ON l.id = le.lesson_id\n WHERE l.archived = 0\n AND le.embedding MATCH ?\n ORDER BY le.distance ASC\n LIMIT ?\n `).all(JSON.stringify(embedding), limit) as (LessonRow & { distance: number })[];\n\n return rows.map(row => mapLessonRow(row));\n}\n\n// ==================== SYNTHESIS QUEUE ====================\n\n/**\n * Queue a session for synthesis\n */\nexport function queueForSynthesis(sessionId: string, projectPath: string): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n INSERT OR REPLACE INTO synthesis_queue (session_id, project_path, queued_at, status)\n VALUES (?, ?, ?, 'pending')\n `).run(sessionId, projectPath, now);\n}\n\n/**\n * Get pending synthesis queue items\n */\nexport function getPendingSynthesis(limit: number = 5): SynthesisQueueItem[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT\n id,\n session_id as sessionId,\n project_path as projectPath,\n queued_at as queuedAt,\n status,\n processed_at as processedAt,\n lessons_created as lessonsCreated,\n error\n FROM synthesis_queue\n WHERE status = 'pending'\n ORDER BY queued_at ASC\n LIMIT ?\n `).all(limit) as SynthesisQueueItem[];\n\n return rows;\n}\n\n/**\n * Mark synthesis as in progress\n */\nexport function markSynthesisProcessing(id: number): void {\n const db = getDatabase();\n db.prepare(`\n UPDATE synthesis_queue SET status = 'processing' WHERE id = ?\n `).run(id);\n}\n\n/**\n * Mark synthesis as complete\n */\nexport function markSynthesisComplete(id: number, lessonsCreated: number): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n UPDATE synthesis_queue\n SET status = 'completed', processed_at = ?, lessons_created = ?\n WHERE id = ?\n `).run(now, lessonsCreated, id);\n}\n\n/**\n * Mark synthesis as failed\n */\nexport function markSynthesisFailed(id: number, error: string): void {\n const db = getDatabase();\n const now = new Date().toISOString();\n\n db.prepare(`\n UPDATE synthesis_queue\n SET status = 'failed', processed_at = ?, error = ?\n WHERE id = ?\n `).run(now, error, id);\n}\n\n/**\n * Get synthesis queue stats\n */\nexport function getSynthesisStats(): {\n pending: number;\n processing: number;\n completed: number;\n failed: number;\n totalLessonsCreated: number;\n} {\n const db = getDatabase();\n\n const pending = (db.prepare(`\n SELECT COUNT(*) as count FROM synthesis_queue WHERE status = 'pending'\n `).get() as { count: number }).count;\n\n const processing = (db.prepare(`\n SELECT COUNT(*) as count FROM synthesis_queue WHERE status = 'processing'\n `).get() as { count: number }).count;\n\n const completed = (db.prepare(`\n SELECT COUNT(*) as count FROM synthesis_queue WHERE status = 'completed'\n `).get() as { count: number }).count;\n\n const failed = (db.prepare(`\n SELECT COUNT(*) as count FROM synthesis_queue WHERE status = 'failed'\n `).get() as { count: number }).count;\n\n const totalLessonsCreated = (db.prepare(`\n SELECT COALESCE(SUM(lessons_created), 0) as total FROM synthesis_queue WHERE status = 'completed'\n `).get() as { total: number }).total;\n\n return { pending, processing, completed, failed, totalLessonsCreated };\n}\n\n// ==================== LESSON STATS ====================\n\n/**\n * Get lesson statistics\n */\nexport function getLessonStats(): {\n totalLessons: number;\n activeLessons: number;\n archivedLessons: number;\n byCategory: Record<LessonCategory, number>;\n avgConfidence: number;\n} {\n const db = getDatabase();\n\n const totalLessons = (db.prepare(`\n SELECT COUNT(*) as count FROM lessons\n `).get() as { count: number }).count;\n\n const activeLessons = (db.prepare(`\n SELECT COUNT(*) as count FROM lessons WHERE archived = 0\n `).get() as { count: number }).count;\n\n const archivedLessons = totalLessons - activeLessons;\n\n const categoryRows = db.prepare(`\n SELECT category, COUNT(*) as count\n FROM lessons\n WHERE archived = 0\n GROUP BY category\n `).all() as { category: LessonCategory; count: number }[];\n\n const byCategory: Record<LessonCategory, number> = {\n architecture_decision: 0,\n anti_pattern: 0,\n bug_pattern: 0,\n project_convention: 0,\n dependency_knowledge: 0,\n domain_knowledge: 0,\n workflow: 0,\n other: 0,\n };\n\n for (const row of categoryRows) {\n byCategory[row.category] = row.count;\n }\n\n const avgConfidence = (db.prepare(`\n SELECT COALESCE(AVG(confidence), 0) as avg FROM lessons WHERE archived = 0\n `).get() as { avg: number }).avg;\n\n return {\n totalLessons,\n activeLessons,\n archivedLessons,\n byCategory,\n avgConfidence,\n };\n}\n\n/**\n * Get queue status (alias for getSynthesisStats)\n */\nexport function getQueueStatus() {\n return getSynthesisStats();\n}\n\n// ==================== SESSION INJECTION CACHE ====================\n\n/**\n * Get lesson IDs that have already been injected in this session\n */\nexport function getInjectedLessonIds(sessionId: string): Set<string> {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT lesson_id FROM session_injections WHERE session_id = ?\n `).all(sessionId) as { lesson_id: string }[];\n\n return new Set(rows.map(row => row.lesson_id));\n}\n\n/**\n * Record that lessons were injected in a session\n */\nexport function recordInjectedLessons(sessionId: string, lessonIds: string[]): void {\n if (lessonIds.length === 0) return;\n\n const db = getDatabase();\n const now = new Date().toISOString();\n\n const stmt = db.prepare(`\n INSERT OR IGNORE INTO session_injections (session_id, lesson_id, injected_at)\n VALUES (?, ?, ?)\n `);\n\n const transaction = db.transaction(() => {\n for (const lessonId of lessonIds) {\n stmt.run(sessionId, lessonId, now);\n }\n });\n\n transaction();\n}\n\n/**\n * Clear injection cache for a session (e.g., on compaction)\n */\nexport function clearSessionInjections(sessionId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM session_injections WHERE session_id = ?').run(sessionId);\n}\n\n/**\n * Clean up old session injection records (sessions older than N days)\n */\nexport function cleanupOldInjections(daysOld: number = 7): number {\n const db = getDatabase();\n const cutoff = new Date();\n cutoff.setDate(cutoff.getDate() - daysOld);\n\n const result = db.prepare(`\n DELETE FROM session_injections WHERE injected_at < ?\n `).run(cutoff.toISOString());\n\n return result.changes;\n}\n\n/**\n * Get distinct project paths from lessons\n */\nexport function getDistinctProjects(): string[] {\n const db = getDatabase();\n\n const rows = db.prepare(`\n SELECT DISTINCT project_path\n FROM lessons\n WHERE project_path IS NOT NULL AND project_path != ''\n ORDER BY project_path ASC\n `).all() as { project_path: string }[];\n\n return rows.map(row => row.project_path);\n}\n"],"mappings":";;;AAOA,SAAS,YAAAA,WAAU,cAAAC,aAAY,gBAAAC,eAAc,eAAAC,oBAAmB;AAChE,SAAS,QAAAC,OAAM,WAAAC,UAAS,YAAAC,iBAAgB;AACxC,SAAS,aAAa;AACtB,SAAS,qBAAqB;;;ACV9B,SAAS,cAAc,aAAa,cAAAC,aAAY,gBAAgB;;;ACAhE,SAAS,eAAe;AACxB,SAAS,MAAM,UAAU,eAAe;AACxC,SAAS,WAAW,YAAY,oBAAoB;AAG7C,IAAM,WAAW,KAAK,QAAQ,GAAG,OAAO;AACxC,IAAM,UAAU,KAAK,UAAU,aAAa;AAC5C,IAAM,aAAa,KAAK,UAAU,QAAQ;AAC1C,IAAM,cAAc,KAAK,UAAU,SAAS;AAG5C,IAAM,aAAa,KAAK,QAAQ,GAAG,SAAS;AAC5C,IAAM,sBAAsB,KAAK,YAAY,UAAU;AACvD,IAAM,sBAAsB,KAAK,YAAY,UAAU;AAGvD,IAAM,kBAAkB;AACxB,IAAM,uBAAuB;AAG7B,IAAM,sBAAsB;AAC5B,IAAM,4BAA4B;AAIlC,IAAM,0BAA0B,IAAI,OAAO,OAAO;AAIlD,SAAS,gBAAsB;AACpC,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,cAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AACF;AAGO,SAAS,kBAAwB;AACtC,gBAAc;AACd,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,cAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EAC3C;AACF;AAGO,SAAS,mBAAyB;AACvC,gBAAc;AACd,MAAI,CAAC,WAAW,WAAW,GAAG;AAC5B,cAAU,aAAa,EAAE,WAAW,KAAK,CAAC;AAAA,EAC5C;AACF;AAMO,SAAS,cAAc,YAA4B;AACxD,QAAM,aAAa,SAAS,QAAQ,UAAU,CAAC;AAC/C,QAAM,cAAc,SAAS,UAAU;AACvC,SAAO,KAAK,aAAa,YAAY,WAAW;AAClD;AAMO,SAAS,kBAAkB,YAA6B;AAC7D,MAAI;AACF,QAAI,CAAC,WAAW,UAAU,EAAG,QAAO;AAEpC,qBAAiB;AACjB,UAAM,aAAa,cAAc,UAAU;AAC3C,UAAM,YAAY,QAAQ,UAAU;AAEpC,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,gBAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,IAC1C;AAEA,iBAAa,YAAY,UAAU;AACnC,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;;;ADlEA,IAAM,2BAA2B;AAAA,EAC/B;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AAAA,EACA;AAAA;AACF;AAKO,SAAS,qBAAqB,OAAwB;AAC3D,aAAW,WAAW,0BAA0B;AAC9C,QAAI,QAAQ,KAAK,MAAM,KAAK,CAAC,GAAG;AAC9B,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAiCO,SAAS,uBAAuB,UAAmC;AACxE,QAAM,UAAU,aAAa,UAAU,OAAO;AAE9C,QAAM,WAA4B;AAAA,IAChC,aAAa;AAAA,IACb,QAAQ;AAAA,EACV;AAGA,aAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACtC,QAAI,CAAC,KAAK,KAAK,EAAG;AAElB,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,IAAI;AAG9B,UAAI,OAAO,SAAS,UAAU,OAAO,SAAS;AAG5C,YAAI,OAAO,gBAAgB,MAAM;AAC/B,mBAAS,cAAc;AAAA,QACzB;AAGA,YAAI,OAAO,SAAS;AAClB,mBAAS,cAAc;AAAA,QACzB;AAGA,YAAI,OAAO,WAAW,MAAM;AAC1B,mBAAS,SAAS;AAAA,QACpB;AAEA;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AAKO,SAAS,iBAAiB,UAAmC;AAClE,QAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,QAAM,WAA4B,CAAC;AAEnC,aAAW,QAAQ,QAAQ,MAAM,IAAI,GAAG;AACtC,QAAI,CAAC,KAAK,KAAK,EAAG;AAElB,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,IAAI;AAG9B,WAAK,OAAO,SAAS,UAAU,OAAO,SAAS,gBAAgB,OAAO,SAAS;AAE7E,cAAM,MAAM,OAAO;AACnB,YAAI,IAAI,QAAQ,IAAI,SAAS;AAC3B,gBAAMC,WAAU,MAAM,QAAQ,IAAI,OAAO,IACrC,IAAI,QACD,OAAO,CAAC,MAAwB,EAAE,SAAS,MAAM,EACjD,IAAI,CAAC,MAAwB,EAAE,IAAI,EACnC,KAAK,IAAI,IACZ,OAAO,IAAI,YAAY,WACrB,IAAI,UACJ,KAAK,UAAU,IAAI,OAAO;AAEhC,cAAIA,UAAS;AACX,qBAAS,KAAK;AAAA,cACZ,MAAM,IAAI;AAAA,cACV,SAAAA;AAAA,cACA,WAAW,OAAO,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,YACxD,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,WAAW,OAAO,SAAS,aAAa,OAAO,QAAQ,OAAO,SAAS;AAErE,iBAAS,KAAK;AAAA,UACZ,MAAM,OAAO;AAAA,UACb,SAAS,OAAO,OAAO,YAAY,WAC/B,OAAO,UACP,KAAK,UAAU,OAAO,OAAO;AAAA,UACjC,WAAW,OAAO,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,QACxD,CAAC;AAAA,MACH,WAAW,OAAO,QAAQ,OAAO,WAAW,CAAC,OAAO,MAAM;AAExD,iBAAS,KAAK;AAAA,UACZ,MAAM,OAAO;AAAA,UACb,SAAS,OAAO,OAAO,YAAY,WAC/B,OAAO,UACP,KAAK,UAAU,OAAO,OAAO;AAAA,UACjC,WAAW,OAAO,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,QACxD,CAAC;AAAA,MACH;AAAA,IAEF,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AACT;AA0JO,SAAS,gBAAgB,UAAmC;AAEjE,QAAM,mBAAmB,SAAS,KAAK,OAAK,EAAE,SAAS,MAAM;AAC7D,MAAI,CAAC,kBAAkB;AACrB,WAAO;AAAA,EACT;AAGA,QAAM,UAAU,iBAAiB,QAAQ,MAAM,GAAG,GAAG;AACrD,SAAO,QAAQ,SAAS,iBAAiB,QAAQ,SAAS,UAAU,QAAQ;AAC9E;;;AE9UA,OAAO,cAAc;AACrB,YAAY,eAAe;AAC3B,SAAS,cAAAC,mBAAkB;AAI3B,IAAI,KAA+B;AAK5B,SAAS,cAAiC;AAC/C,MAAI,GAAI,QAAO;AAEf,gBAAc;AAEd,OAAK,IAAI,SAAS,OAAO;AACzB,KAAG,OAAO,oBAAoB;AAG9B,EAAU,eAAK,EAAE;AAGjB,aAAW,EAAE;AAEb,SAAO;AACT;AAKA,SAAS,WAAW,UAAmC;AAErD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,GAKb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAYb;AAGD,MAAI;AACF,aAAS,KAAK,kDAAkD;AAAA,EAClE,QAAQ;AAAA,EAER;AAGA,MAAI;AACF,aAAS,KAAK,gEAAgE;AAAA,EAChF,QAAQ;AAAA,EAER;AAGA,MAAI;AACF,aAAS,KAAK,gEAAgE;AAAA,EAChF,QAAQ;AAAA,EAER;AAIA,MAAI;AACF,aAAS,KAAK,mDAAmD;AAAA,EACnE,QAAQ;AAAA,EAER;AAGA,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GASb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA,wBAGQ,oBAAoB;AAAA;AAAA,GAEzC;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAMb;AAGD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AAKD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAwBb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA,wBAGQ,oBAAoB;AAAA;AAAA,GAEzC;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAYb;AAGD,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAWb;AAID,WAAS,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAOb;AAGD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AACD,WAAS,KAAK;AAAA;AAAA,GAEb;AAGD,gBAAc,QAAQ;AACxB;AAKA,SAAS,cAAc,UAAmC;AAExD,QAAM,gBAAgB;AAEtB,QAAM,WAAW,SAAS;AAAA,IACxB;AAAA,EACF,EAAE,IAAI,aAAa;AAEnB,MAAI,SAAU;AAGd,QAAM,WAAW,SAAS,QAAQ;AAAA;AAAA,GAEjC,EAAE,IAAI;AAEP,QAAM,aAAa,SAAS,QAAQ;AAAA;AAAA,GAEnC;AAED,QAAM,cAAc,SAAS,YAAY,MAAM;AAC7C,eAAW,WAAW,UAAU;AAC9B,UAAI,CAACC,YAAW,QAAQ,WAAW,EAAG;AAEtC,UAAI;AACF,cAAM,WAAW,uBAAuB,QAAQ,WAAW;AAC3D,cAAM,cAAc,SAAS,eAAe,SAAS;AACrD,mBAAW;AAAA,UACT,SAAS,cAAc,IAAI;AAAA,UAC3B,cAAc,IAAI;AAAA,UAClB,QAAQ;AAAA,QACV;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,aAAS;AAAA,MACP;AAAA,IACF,EAAE,IAAI,gBAAe,oBAAI,KAAK,GAAE,YAAY,CAAC;AAAA,EAC/C,CAAC;AAED,cAAY;AACd;;;ACvSA,SAAS,kBAAkB;AA8CpB,SAAS,cAAc,OAA6B;AACzD,QAAMC,MAAK,YAAY;AACvB,QAAM,KAAK,WAAW;AACtB,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,QAAM,gBAAgBA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGhC;AAED,QAAM,gBAAgBA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGhC;AAED,QAAM,cAAcA,IAAG,YAAY,MAAM;AACvC,kBAAc;AAAA,MACZ;AAAA,MACA,MAAM;AAAA,MACN,MAAM,WAAW;AAAA,MACjB;AAAA,MACA;AAAA,MACA,MAAM,SAAS;AAAA,MACf,MAAM,eAAe;AAAA,MACrB,MAAM,cAAc;AAAA,MACpB,MAAM;AAAA,MACN,MAAM,cAAc,IAAI;AAAA,MACxB,MAAM,cAAc,IAAI;AAAA,IAC1B;AAEA,eAAW,OAAO,MAAM,UAAU;AAChC,oBAAc,IAAI,IAAI,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,cAAY;AACZ,SAAO;AACT;AAKO,SAAS,cAAc,IAAY,OAAoC;AAC5E,QAAMA,MAAK,YAAY;AACvB,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,QAAM,UAAoB,CAAC,gBAAgB;AAC3C,QAAM,SAAoB,CAAC,GAAG;AAE9B,MAAI,MAAM,UAAU,QAAW;AAC7B,YAAQ,KAAK,WAAW;AACxB,WAAO,KAAK,MAAM,KAAK;AAAA,EACzB;AACA,MAAI,MAAM,YAAY,QAAW;AAC/B,YAAQ,KAAK,aAAa;AAC1B,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B;AACA,MAAI,MAAM,YAAY,QAAW;AAC/B,YAAQ,KAAK,cAAc;AAC3B,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B;AACA,MAAI,MAAM,aAAa,QAAW;AAChC,YAAQ,KAAK,mBAAmB;AAChC,WAAO,KAAK,MAAM,SAAS,MAAM;AAAA,EACnC;AACA,MAAI,MAAM,gBAAgB,QAAW;AACnC,YAAQ,KAAK,kBAAkB;AAC/B,WAAO,KAAK,MAAM,cAAc,IAAI,CAAC;AAAA,EACvC;AACA,MAAI,MAAM,gBAAgB,QAAW;AACnC,YAAQ,KAAK,kBAAkB;AAC/B,WAAO,KAAK,MAAM,cAAc,IAAI,CAAC;AAAA,EACvC;AACA,MAAI,MAAM,gBAAgB,QAAW;AACnC,YAAQ,KAAK,kBAAkB;AAC/B,WAAO,KAAK,MAAM,WAAW;AAAA,EAC/B;AAEA,SAAO,KAAK,EAAE;AAEd,QAAM,cAAcA,IAAG,YAAY,MAAM;AACvC,IAAAA,IAAG,QAAQ,uBAAuB,QAAQ,KAAK,IAAI,CAAC,eAAe,EAAE,IAAI,GAAG,MAAM;AAGlF,QAAI,MAAM,aAAa,QAAW;AAChC,MAAAA,IAAG,QAAQ,2CAA2C,EAAE,IAAI,EAAE;AAC9D,YAAM,gBAAgBA,IAAG,QAAQ;AAAA;AAAA;AAAA,OAGhC;AACD,iBAAW,OAAO,MAAM,UAAU;AAChC,sBAAc,IAAI,IAAI,IAAI,MAAM,IAAI,SAAS,IAAI,SAAS;AAAA,MAC5D;AAAA,IACF;AAAA,EACF,CAAC;AAED,cAAY;AACd;AA0CO,SAAS,uBAAuB,YAAoC;AACzE,QAAMC,MAAK,YAAY;AACvB,QAAM,MAAMA,IAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAOtB,EAAE,IAAI,UAAU;AAEjB,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,cAAc,GAAG;AAC1B;AAsBA,SAAS,cAAc,KAA0B;AAC/C,SAAO;AAAA,IACL,GAAG;AAAA,IACH,aAAa,IAAI;AAAA,IACjB,aAAa,IAAI,gBAAgB;AAAA,IACjC,aAAa,IAAI,gBAAgB;AAAA,EACnC;AACF;AAoJO,SAAS,kBAAkB,WAA0C;AAC1E,QAAMC,MAAK,YAAY;AACvB,QAAM,MAAMA,IAAG,QAAQ;AAAA;AAAA;AAAA;AAAA,GAItB,EAAE,IAAI,SAAS;AAEhB,SAAO,OAAO;AAChB;AAKO,SAAS,qBACd,WACA,eACA,WACM;AACN,QAAMA,MAAK,YAAY;AACvB,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,EAAAA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGV,EAAE,IAAI,WAAW,eAAe,aAAa,MAAM,GAAG;AACzD;AAMO,SAAS,iBAAiB,WAAmB,sBAA8B,YAAY,KAAc;AAC1G,QAAM,QAAQ,kBAAkB,SAAS;AACzC,MAAI,CAAC,MAAO,QAAO;AACnB,SAAO,uBAAuB,MAAM,iBAAiB;AACvD;;;ACtZO,SAAS,eAAe,WAAmB,WAA2B;AAC3E,QAAMC,MAAK,YAAY;AAGvB,QAAM,gBAAgB,KAAK,UAAU,SAAS;AAE9C,EAAAA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGV,EAAE,IAAI,WAAW,aAAa;AACjC;;;AChBA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,QAAAC,aAAY;AAUrB,IAAI,qBAAmE;AACvE,IAAI,WAAgB;AACpB,IAAI,cAAc;AAElB,eAAe,kBAAkB;AAC/B,MAAI,CAAC,oBAAoB;AACvB,yBAAqB,MAAM,OAAO,sBAAsB;AAAA,EAC1D;AACA,SAAO;AACT;AAWO,SAAS,gBAAyB;AAEvC,QAAM,iBAAiBC,MAAK,YAAY,eAAe;AACvD,SAAOC,YAAW,cAAc;AAClC;AAMA,eAAsB,qBACpB,YACe;AACf,MAAI,eAAe,UAAU;AAC3B;AAAA,EACF;AAEA,kBAAgB;AAEhB,QAAM,EAAE,UAAU,gBAAgB,IAAI,IAAI,MAAM,gBAAgB;AAGhE,MAAI,WAAW;AAGf,QAAM,SAAS,cAAc;AAC7B,MAAI,QAAQ;AACV,QAAI,oBAAoB;AACxB,iBAAa,EAAE,QAAQ,UAAU,CAAC;AAAA,EACpC,OAAO;AACL,iBAAa,EAAE,QAAQ,cAAc,CAAC;AAAA,EACxC;AAGA,aAAW,MAAM,eAAe,sBAAsB,iBAAiB;AAAA,IACrE,mBAAmB,aACf,CAAC,aAAmE;AAClE,UAAI,SAAS,WAAW,cAAc,SAAS,QAAQ,SAAS,aAAa,QAAW;AACtF,mBAAW;AAAA,UACT,QAAQ;AAAA,UACR,MAAM,SAAS;AAAA,UACf,UAAU,SAAS;AAAA,QACrB,CAAC;AAAA,MACH;AAAA,IACF,IACA;AAAA,EACN,CAAC;AAED,gBAAc;AACd,eAAa,EAAE,QAAQ,QAAQ,CAAC;AAClC;AAKO,SAAS,UAAmB;AACjC,SAAO,eAAe,aAAa;AACrC;AAKA,eAAsB,aAAa,MAAiC;AAElE,MAAI,CAAC,aAAa;AAChB,UAAM,qBAAqB;AAAA,EAC7B;AAGA,QAAM,YAAY,KAAK,MAAM,GAAG,mBAAmB;AAGnD,QAAM,SAAS,MAAM,SAAS,WAAW;AAAA,IACvC,SAAS;AAAA,IACT,WAAW;AAAA,EACb,CAAC;AAGD,SAAO,MAAM,KAAK,OAAO,IAAI;AAC/B;AAMO,SAAS,oBACd,OACA,SACA,UACQ;AACR,QAAM,QAAkB,CAAC;AAGzB,QAAM,KAAK,UAAU,KAAK,EAAE;AAG5B,MAAI,SAAS;AACX,UAAM,KAAK,YAAY,OAAO,EAAE;AAAA,EAClC;AAGA,QAAM,eAAe,SAClB,OAAO,CAAC,MAAM,EAAE,SAAS,MAAM,EAC/B,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,MAAM,EAAE,QAAQ,MAAM,GAAG,yBAAyB,CAAC;AAE3D,MAAI,aAAa,SAAS,GAAG;AAC3B,UAAM,KAAK,gBAAgB;AAC3B,UAAM,KAAK,GAAG,YAAY;AAAA,EAC5B;AAGA,QAAM,oBAAoB,SACvB,OAAO,CAAC,MAAM,EAAE,SAAS,WAAW,EACpC,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,MAAM,EAAE,QAAQ,MAAM,GAAG,yBAAyB,CAAC;AAE3D,MAAI,kBAAkB,SAAS,GAAG;AAChC,UAAM,KAAK,sBAAsB;AACjC,UAAM,KAAK,GAAG,iBAAiB;AAAA,EACjC;AAEA,SAAO,MAAM,KAAK,MAAM;AAC1B;;;ACjIO,SAAS,SAAS,MAAc,WAA2B;AAChE,MAAI,KAAK,UAAU,UAAW,QAAO;AACrC,SAAO,KAAK,MAAM,GAAG,YAAY,CAAC,IAAI;AACxC;AA8BO,SAAS,cAAc,SAAyB;AAErD,QAAM,YAAY,QAAQ,MAAM,IAAI,EAAE,CAAC,EAAE,KAAK;AAC9C,QAAM,QAAQ,SAAS,WAAW,EAAE;AACpC,SAAO,SAAS;AAClB;;;ACkaO,SAAS,kBAAkB,WAAmB,aAA2B;AAC9E,QAAMC,MAAK,YAAY;AACvB,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,EAAAA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGV,EAAE,IAAI,WAAW,aAAa,GAAG;AACpC;AA+MO,SAAS,uBAAuB,WAAyB;AAC9D,QAAMC,MAAK,YAAY;AACvB,EAAAA,IAAG,QAAQ,qDAAqD,EAAE,IAAI,SAAS;AACjF;;;ARlqBA,IAAM,aAAa,cAAc,YAAY,GAAG;AAChD,IAAM,YAAYC,SAAQ,UAAU;AAgBpC,eAAe,YAA6B;AAC1C,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,QAAI,OAAO;AACX,YAAQ,MAAM,YAAY,MAAM;AAChC,YAAQ,MAAM,GAAG,YAAY,MAAM;AACjC,UAAI;AACJ,cAAQ,QAAQ,QAAQ,MAAM,KAAK,OAAO,MAAM;AAC9C,gBAAQ;AAAA,MACV;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,GAAG,OAAO,MAAM;AAC5B,cAAQ,IAAI;AAAA,IACd,CAAC;AAED,eAAW,MAAM,QAAQ,IAAI,GAAG,GAAG;AAAA,EACrC,CAAC;AACH;AAMA,SAAS,2BAAiC;AACxC,MAAI;AAEF,UAAM,iBAAiBC,MAAK,WAAW,eAAe;AAEtD,QAAI,CAACC,YAAW,cAAc,GAAG;AAE/B;AAAA,IACF;AAIA,UAAM,QAAQ,MAAM,QAAQ,CAAC,gBAAgB,GAAG,GAAG;AAAA,MACjD,UAAU;AAAA,MACV,OAAO;AAAA;AAAA,MACP,KAAK,EAAE,GAAG,QAAQ,IAAI;AAAA,IACxB,CAAC;AAGD,UAAM,MAAM;AAAA,EACd,QAAQ;AAAA,EAER;AACF;AAKA,SAAS,gBAAgB,WAAmB,KAA4B;AAEtE,MAAIA,YAAW,mBAAmB,GAAG;AACnC,UAAM,cAAcC,aAAY,mBAAmB;AACnD,eAAW,cAAc,aAAa;AACpC,YAAM,cAAcF,MAAK,qBAAqB,UAAU;AACxD,YAAM,cAAcA,MAAK,aAAa,GAAG,SAAS,QAAQ;AAC1D,UAAIC,YAAW,WAAW,GAAG;AAC3B,eAAO;AAAA,MACT;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,wBAAwB,UAAkB,WAAkC;AACnF,QAAM,aAAaF,SAAQ,QAAQ;AACnC,QAAM,YAAYC,MAAK,YAAY,qBAAqB;AAExD,MAAI,CAACC,YAAW,SAAS,EAAG,QAAO;AAEnC,MAAI;AACF,UAAM,UAAUE,cAAa,WAAW,OAAO;AAC/C,UAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,UAAM,QAAQ,MAAM,QAAQ,KAAK,OAAK,EAAE,cAAc,SAAS;AAC/D,WAAO,OAAO,eAAe;AAAA,EAC/B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,SAAS,qBAAqB,gBAAwB,iBAA0C;AAC9F,QAAM,eAAeH,MAAK,gBAAgB,iBAAiB,WAAW;AACtE,MAAI,CAACC,YAAW,YAAY,EAAG,QAAO,CAAC;AAEvC,QAAM,WAA4B,CAAC;AAEnC,MAAI;AACF,UAAM,aAAaC,aAAY,YAAY,EAAE,OAAO,OAAK,EAAE,SAAS,QAAQ,CAAC;AAC7E,eAAW,aAAa,YAAY;AAClC,YAAM,YAAYF,MAAK,cAAc,SAAS;AAC9C,YAAM,gBAAgB,iBAAiB,SAAS;AAChD,eAAS,KAAK,GAAG,aAAa;AAAA,IAChC;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAKA,eAAe,mBACb,UACA,iBACA,iBAAyB,KAC8B;AACvD,MAAI;AAEF,UAAM,WAAW,iBAAiB,QAAQ;AAE1C,QAAI,SAAS,WAAW,GAAG;AACzB,aAAO;AAAA,IACT;AAGA,UAAM,QAAQI,UAAS,QAAQ;AAC/B,UAAM,YAAY,MAAM,MAAM,YAAY;AAG1C,UAAM,kBAAkB,uBAAuB,QAAQ;AAGvD,UAAM,oBAAoBC,UAAS,UAAU,QAAQ;AAGrD,UAAM,gBAAgB,qBAAqBN,SAAQ,QAAQ,GAAG,iBAAiB;AAC/E,UAAM,cAAc,CAAC,GAAG,UAAU,GAAG,aAAa;AAGlD,UAAM,gBAAgB,YAAY,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,QAAQ,CAAC;AAG9E,UAAM,eAAe,SAAS,KAAK,OAAK,EAAE,SAAS,MAAM;AACzD,UAAM,QAAQ,eAAe,cAAc,aAAa,OAAO,IAAI;AAGnE,UAAM,UAAU,gBAAgB,QAAQ;AAGxC,UAAM,UAAU,KAAK,UAAU,EAAE,UAAU,UAAU,OAAO,UAAU,CAAC;AAGvE,UAAM,cAAc,wBAAwB,UAAU,iBAAiB;AAGvE,UAAM,WAAW,uBAAuB,QAAQ;AAEhD,UAAM,cAAc,SAAS,eAAe,SAAS,UAAU,qBAAqB,KAAK;AAEzF,QAAI;AACJ,QAAI,QAAQ;AAEZ,QAAI,iBAAiB;AAEnB,kBAAY,gBAAgB;AAG5B,UAAI,gBAAgB,iBAAiB,SAAS,QAAQ;AACpD,sBAAc,WAAW;AAAA,UACvB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,aAAa,SAAS;AAAA,UACtB;AAAA,UACA,aAAa,eAAe;AAAA,QAC9B,CAAC;AAAA,MACH;AAAA,IACF,OAAO;AAEL,cAAQ;AACR,kBAAY,cAAc;AAAA,QACxB;AAAA,QACA;AAAA,QACA;AAAA,QACA,YAAY;AAAA,QACZ;AAAA,QACA;AAAA,QACA,aAAa,SAAS;AAAA,QACtB;AAAA,MACF,CAAC;AAAA,IACH;AAGA,QAAI,iBAAiB;AACnB,YAAM,cAAc,iBAAiB,WAAW,eAAe,cAAc;AAE7E,UAAI,aAAa;AACf,YAAI;AAEF,gBAAM,gBAAgB,oBAAoB,OAAO,SAAS,WAAW;AACrE,gBAAM,YAAY,MAAM,aAAa,aAAa;AAClD,yBAAe,WAAW,SAAS;AACnC,+BAAqB,WAAW,eAAe,SAAS;AAAA,QAC1D,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAGA,sBAAkB,QAAQ;AAE1B,WAAO,EAAE,WAAW,MAAM;AAAA,EAC5B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,OAAO;AACpB,MAAI;AACF,UAAM,YAAY,MAAM,UAAU;AAClC,UAAM,QAAmB,YAAY,KAAK,MAAM,SAAS,IAAI,CAAC;AAG9D,QAAI,cAA6B;AAEjC,QAAI,MAAM,mBAAmBE,YAAW,MAAM,eAAe,GAAG;AAC9D,oBAAc,MAAM;AAAA,IACtB,WAAW,MAAM,cAAc,MAAM,KAAK;AACxC,oBAAc,gBAAgB,MAAM,YAAY,MAAM,GAAG;AAAA,IAC3D;AAEA,QAAI,CAAC,aAAa;AAChB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAGA,QAAI,kBAAkB;AACtB,QAAI,QAAQ,GAAG;AACb,wBAAkB;AAAA,IACpB,OAAO;AACL,UAAI;AACF,cAAM,qBAAqB;AAC3B,0BAAkB,QAAQ;AAAA,MAC5B,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,SAAS,MAAM,mBAAmB,aAAa,eAAe;AAEpE,QAAI,QAAQ;AAEV,YAAM,cAAc,wBAAwB,aAAa,OAAO,SAAS,KAAK,MAAM,OAAO;AAG3F,UAAI;AACF,0BAAkB,OAAO,WAAW,WAAW;AAE/C,iCAAyB;AAAA,MAC3B,QAAQ;AAAA,MAER;AAAA,IACF;AAKA,QAAI,MAAM,YAAY;AACpB,UAAI;AACF,+BAAuB,MAAM,UAAU;AAAA,MACzC,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,YAAQ,KAAK,CAAC;AAAA,EAChB,QAAQ;AACN,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEA,KAAK;","names":["statSync","existsSync","readFileSync","readdirSync","join","dirname","basename","existsSync","content","existsSync","existsSync","db","db","db","db","existsSync","join","join","existsSync","db","db","dirname","join","existsSync","readdirSync","readFileSync","statSync","basename"]}