kontext-engine 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/cli/index.ts","../../src/cli/commands/init.ts","../../src/indexer/discovery.ts","../../src/indexer/incremental.ts","../../src/indexer/parser.ts","../../src/indexer/chunker.ts","../../src/indexer/embedder.ts","../../src/utils/errors.ts","../../src/utils/error-boundary.ts","../../src/utils/logger.ts","../../src/storage/db.ts","../../src/storage/schema.ts","../../src/storage/vectors.ts","../../src/cli/commands/query.ts","../../src/search/vector.ts","../../src/search/fts.ts","../../src/search/ast.ts","../../src/search/path.ts","../../src/search/fusion.ts","../../src/cli/commands/ask.ts","../../src/steering/llm.ts","../../src/cli/commands/find.ts","../../src/cli/commands/update.ts","../../src/cli/commands/watch.ts","../../src/watcher/watcher.ts","../../src/cli/commands/status.ts","../../src/cli/commands/symbols.ts","../../src/cli/commands/deps.ts","../../src/cli/commands/chunk.ts","../../src/cli/commands/config.ts","../../src/cli/commands/auth.ts"],"sourcesContent":["import { Command } from \"commander\";\nimport { registerInitCommand } from \"./commands/init.js\";\nimport { registerQueryCommand } from \"./commands/query.js\";\nimport { registerAskCommand } from \"./commands/ask.js\";\nimport { registerFindCommand } from \"./commands/find.js\";\nimport { registerUpdateCommand } from \"./commands/update.js\";\nimport { registerWatchCommand } from \"./commands/watch.js\";\nimport { registerStatusCommand } from \"./commands/status.js\";\nimport { registerSymbolsCommand } from \"./commands/symbols.js\";\nimport { registerDepsCommand } from \"./commands/deps.js\";\nimport { registerChunkCommand } from \"./commands/chunk.js\";\nimport { registerConfigCommand } from \"./commands/config.js\";\nimport { registerAuthCommand } from \"./commands/auth.js\";\n\nconst program = new Command();\n\nprogram\n .name(\"ctx\")\n .description(\"Kontext — Context engine for AI coding agents\")\n .version(\"0.1.0\")\n .option(\"--verbose\", \"Enable verbose/debug output\");\n\nregisterInitCommand(program);\nregisterQueryCommand(program);\nregisterAskCommand(program);\nregisterFindCommand(program);\nregisterUpdateCommand(program);\nregisterWatchCommand(program);\nregisterStatusCommand(program);\nregisterSymbolsCommand(program);\nregisterDepsCommand(program);\nregisterChunkCommand(program);\nregisterConfigCommand(program);\nregisterAuthCommand(program);\n\nprogram.parse();\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { discoverFiles } from \"../../indexer/discovery.js\";\nimport { computeChanges } from \"../../indexer/incremental.js\";\nimport { initParser, parseFile } from \"../../indexer/parser.js\";\nimport { chunkFile } from \"../../indexer/chunker.js\";\nimport type { Chunk } from \"../../indexer/chunker.js\";\nimport { prepareChunkText, createLocalEmbedder } from \"../../indexer/embedder.js\";\nimport type { Embedder } from \"../../indexer/embedder.js\";\nimport { IndexError, ErrorCode } from \"../../utils/errors.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\nimport { createDatabase } from \"../../storage/db.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Options for the init pipeline. */\nexport interface InitOptions {\n log?: (msg: string) => void;\n skipEmbedding?: boolean;\n}\n\ninterface IndexStats {\n filesDiscovered: number;\n filesAdded: number;\n filesModified: number;\n filesDeleted: number;\n filesUnchanged: number;\n chunksCreated: number;\n vectorsCreated: number;\n durationMs: number;\n languageCounts: Map<string, number>;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst DB_FILENAME = \"index.db\";\nconst CONFIG_FILENAME = \"config.json\";\nconst GITIGNORE_ENTRY = \".ctx/\";\n\n// ── Gitignore management ─────────────────────────────────────────────────────\n\nfunction ensureGitignore(projectRoot: string): void {\n const gitignorePath = path.join(projectRoot, \".gitignore\");\n\n if (fs.existsSync(gitignorePath)) {\n const content = fs.readFileSync(gitignorePath, \"utf-8\");\n if (content.includes(GITIGNORE_ENTRY)) return;\n const suffix = content.endsWith(\"\\n\") ? \"\" : \"\\n\";\n fs.writeFileSync(gitignorePath, `${content}${suffix}${GITIGNORE_ENTRY}\\n`);\n } else {\n fs.writeFileSync(gitignorePath, `${GITIGNORE_ENTRY}\\n`);\n }\n}\n\n// ── Config ───────────────────────────────────────────────────────────────────\n\nfunction ensureConfig(ctxDir: string): void {\n const configPath = path.join(ctxDir, CONFIG_FILENAME);\n if (fs.existsSync(configPath)) return;\n\n const config = {\n version: 1,\n dimensions: 384,\n model: \"all-MiniLM-L6-v2\",\n };\n fs.writeFileSync(configPath, JSON.stringify(config, null, 2) + \"\\n\");\n}\n\n// ── Format helpers ───────────────────────────────────────────────────────────\n\nfunction formatDuration(ms: number): string {\n if (ms < 1000) return `${Math.round(ms)}ms`;\n return `${(ms / 1000).toFixed(1)}s`;\n}\n\nfunction formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;\n}\n\nfunction formatLanguageSummary(counts: Map<string, number>): string {\n const entries = [...counts.entries()]\n .sort((a, b) => b[1] - a[1])\n .map(([lang, count]) => `${lang}: ${count}`);\n return entries.join(\", \");\n}\n\n// ── Main pipeline ────────────────────────────────────────────────────────────\n\n/** Index a codebase: discover → parse → chunk → embed → store. Runs incrementally on subsequent calls. */\nexport async function runInit(\n projectPath: string,\n options: InitOptions = {},\n): Promise<IndexStats> {\n const log = options.log ?? console.log;\n const absoluteRoot = path.resolve(projectPath);\n const start = performance.now();\n\n log(`Indexing ${absoluteRoot}...`);\n\n // 1. Setup .ctx directory\n const ctxDir = path.join(absoluteRoot, CTX_DIR);\n if (!fs.existsSync(ctxDir)) fs.mkdirSync(ctxDir, { recursive: true });\n\n ensureGitignore(absoluteRoot);\n ensureConfig(ctxDir);\n\n // 2. Open/create database\n const dbPath = path.join(ctxDir, DB_FILENAME);\n const db = createDatabase(dbPath);\n\n try {\n // 3. Discover files\n const discovered = await discoverFiles({\n root: absoluteRoot,\n extraIgnore: [\".ctx/\"],\n });\n\n const languageCounts = new Map<string, number>();\n for (const file of discovered) {\n languageCounts.set(\n file.language,\n (languageCounts.get(file.language) ?? 0) + 1,\n );\n }\n\n log(\n ` Discovered ${discovered.length} files` +\n (discovered.length > 0\n ? ` (${formatLanguageSummary(languageCounts)})`\n : \"\"),\n );\n\n // 4. Compute incremental changes\n const changes = await computeChanges(discovered, db);\n\n const filesToProcess = [\n ...changes.added.map((p) => ({ path: p, reason: \"added\" as const })),\n ...changes.modified.map((p) => ({ path: p, reason: \"modified\" as const })),\n ];\n\n if (changes.unchanged.length > 0) {\n log(` ${changes.unchanged.length} unchanged files skipped`);\n }\n if (changes.deleted.length > 0) {\n log(` ${changes.deleted.length} deleted files removed`);\n }\n if (changes.added.length > 0) {\n log(` ${changes.added.length} new files to index`);\n }\n if (changes.modified.length > 0) {\n log(` ${changes.modified.length} modified files to re-index`);\n }\n\n // 5. Delete removed files from DB (CASCADE handles chunks + vectors)\n for (const deletedPath of changes.deleted) {\n db.deleteFile(deletedPath);\n }\n\n // 6. Parse & chunk changed files\n await initParser();\n\n const allChunksWithMeta: {\n fileRelPath: string;\n chunk: Chunk;\n }[] = [];\n\n let filesProcessed = 0;\n\n for (const { path: relPath } of filesToProcess) {\n const discovered_file = discovered.find((f) => f.path === relPath);\n if (!discovered_file) continue;\n\n // Delete old data for modified files\n const existingFile = db.getFile(relPath);\n if (existingFile) {\n db.deleteChunksByFile(existingFile.id);\n }\n\n // Parse\n let nodes;\n try {\n nodes = await parseFile(discovered_file.absolutePath, discovered_file.language);\n } catch {\n log(` ⚠ Skipping ${relPath} (parse error)`);\n continue;\n }\n\n // Chunk\n const chunks = chunkFile(nodes, relPath);\n\n // Upsert file record\n const fileId = db.upsertFile({\n path: relPath,\n language: discovered_file.language,\n hash: changes.hashes.get(relPath) ?? \"\",\n size: discovered_file.size,\n });\n\n // Insert chunks into DB\n const chunkIds = db.insertChunks(\n fileId,\n chunks.map((c) => ({\n lineStart: c.lineStart,\n lineEnd: c.lineEnd,\n type: c.type,\n name: c.name,\n parent: c.parent,\n text: c.text,\n imports: c.imports,\n exports: c.exports,\n hash: c.hash,\n })),\n );\n\n // Pair chunks with their DB IDs for embedding\n for (let i = 0; i < chunks.length; i++) {\n allChunksWithMeta.push({\n fileRelPath: relPath,\n chunk: { ...chunks[i], id: String(chunkIds[i]) },\n });\n }\n\n filesProcessed++;\n if (filesProcessed % 50 === 0 || filesProcessed === filesToProcess.length) {\n log(` Parsing... ${filesProcessed}/${filesToProcess.length}`);\n }\n }\n\n log(` ${allChunksWithMeta.length} chunks created`);\n\n // 7. Embedding\n let vectorsCreated = 0;\n\n if (!options.skipEmbedding && allChunksWithMeta.length > 0) {\n const embedder = await createEmbedder();\n\n const texts = allChunksWithMeta.map((cm) =>\n prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text),\n );\n\n const vectors = await embedder.embed(texts, (done, total) => {\n log(` Embedding... ${done}/${total}`);\n });\n\n // Store vectors\n db.transaction(() => {\n for (let i = 0; i < allChunksWithMeta.length; i++) {\n const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);\n db.insertVector(chunkDbId, vectors[i]);\n }\n });\n\n vectorsCreated = vectors.length;\n }\n\n // 8. Summary\n const durationMs = performance.now() - start;\n const dbSize = fs.existsSync(dbPath) ? fs.statSync(dbPath).size : 0;\n\n log(\"\");\n log(`✓ Indexed in ${formatDuration(durationMs)}`);\n log(\n ` ${discovered.length} files → ${allChunksWithMeta.length} chunks` +\n (vectorsCreated > 0 ? ` → ${vectorsCreated} vectors` : \"\"),\n );\n log(` Database: ${CTX_DIR}/${DB_FILENAME} (${formatBytes(dbSize)})`);\n\n return {\n filesDiscovered: discovered.length,\n filesAdded: changes.added.length,\n filesModified: changes.modified.length,\n filesDeleted: changes.deleted.length,\n filesUnchanged: changes.unchanged.length,\n chunksCreated: allChunksWithMeta.length,\n vectorsCreated,\n durationMs,\n languageCounts,\n };\n } finally {\n db.close();\n }\n}\n\n// ── Embedder factory (separated for testability) ─────────────────────────────\n\nasync function createEmbedder(): Promise<Embedder> {\n return createLocalEmbedder();\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerInitCommand(program: Command): void {\n program\n .command(\"init [path]\")\n .description(\"Index current directory or specified path\")\n .action(async (inputPath?: string) => {\n const projectPath = inputPath ?? process.cwd();\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n\n try {\n await runInit(projectPath);\n } catch (err) {\n const wrapped = err instanceof IndexError ? err\n : new IndexError(\n err instanceof Error ? err.message : String(err),\n ErrorCode.INDEX_FAILED,\n err instanceof Error ? err : undefined,\n );\n process.exitCode = handleCommandError(wrapped, logger, verbose);\n }\n });\n}\n","import type { Dirent, Stats } from \"node:fs\";\nimport fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport ignore from \"ignore\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** A source file discovered during scanning, with its detected language. */\nexport interface DiscoveredFile {\n path: string; // relative to project root\n absolutePath: string;\n language: string;\n size: number; // bytes\n lastModified: number; // unix timestamp ms\n}\n\nexport interface DiscoverOptions {\n root: string;\n extraIgnore?: string[];\n followSymlinks?: boolean; // default true\n}\n\n// ── Language extension map ───────────────────────────────────────────────────\n\n/** Maps file extensions to language names. Used for filtering and language detection. */\nexport const LANGUAGE_MAP: Record<string, string> = {\n \".ts\": \"typescript\",\n \".tsx\": \"typescript\",\n \".js\": \"javascript\",\n \".jsx\": \"javascript\",\n \".mjs\": \"javascript\",\n \".cjs\": \"javascript\",\n \".py\": \"python\",\n \".go\": \"go\",\n \".rs\": \"rust\",\n \".java\": \"java\",\n \".rb\": \"ruby\",\n \".php\": \"php\",\n \".swift\": \"swift\",\n \".kt\": \"kotlin\",\n \".c\": \"c\",\n \".h\": \"c\",\n \".cpp\": \"cpp\",\n \".hpp\": \"cpp\",\n \".cc\": \"cpp\",\n \".cxx\": \"cpp\",\n \".json\": \"json\",\n \".yaml\": \"yaml\",\n \".yml\": \"yaml\",\n \".toml\": \"toml\",\n \".md\": \"markdown\",\n \".mdx\": \"markdown\",\n \".env\": \"env\",\n};\n\n// ── Built-in ignore patterns ─────────────────────────────────────────────────\n\nconst BUILTIN_IGNORE = [\n \"node_modules\",\n \".git\",\n \"dist\",\n \"build\",\n \"*.lock\",\n \"package-lock.json\",\n \"*.png\",\n \"*.jpg\",\n \"*.jpeg\",\n \"*.gif\",\n \"*.webp\",\n \"*.ico\",\n \"*.bmp\",\n \"*.svg\",\n \"*.woff\",\n \"*.woff2\",\n \"*.ttf\",\n \"*.eot\",\n \"*.mp3\",\n \"*.mp4\",\n \"*.wav\",\n \"*.avi\",\n \"*.mov\",\n \"*.zip\",\n \"*.tar\",\n \"*.gz\",\n \"*.rar\",\n \"*.7z\",\n \"*.pdf\",\n \"*.exe\",\n \"*.dll\",\n \"*.so\",\n \"*.dylib\",\n \"*.o\",\n \"*.a\",\n \"*.wasm\",\n \"*.pyc\",\n \"*.class\",\n];\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction getLanguage(filePath: string): string | null {\n const basename = path.basename(filePath);\n\n // Handle dotfiles like .env\n if (basename.startsWith(\".\") && !basename.includes(\".\", 1)) {\n const dotExt = basename; // e.g. \".env\"\n return LANGUAGE_MAP[dotExt] ?? null;\n }\n\n const ext = path.extname(filePath).toLowerCase();\n return LANGUAGE_MAP[ext] ?? null;\n}\n\nasync function readIgnoreFile(filePath: string): Promise<string[]> {\n try {\n const content = await fs.readFile(filePath, \"utf-8\");\n return content\n .split(\"\\n\")\n .map((line) => line.trim())\n .filter((line) => line.length > 0 && !line.startsWith(\"#\"));\n } catch {\n return [];\n }\n}\n\nasync function statSafe(\n filePath: string,\n followSymlinks: boolean,\n): Promise<Stats | null> {\n try {\n return followSymlinks\n ? await fs.stat(filePath)\n : await fs.lstat(filePath);\n } catch {\n return null;\n }\n}\n\n// ── Main ─────────────────────────────────────────────────────────────────────\n\n/** Recursively scan a directory for source files, respecting .gitignore and .ctxignore. */\nexport async function discoverFiles(\n options: DiscoverOptions,\n): Promise<DiscoveredFile[]> {\n const { root, extraIgnore = [], followSymlinks = true } = options;\n const absoluteRoot = path.resolve(root);\n\n // Build ignore filter\n const ig = ignore();\n ig.add(BUILTIN_IGNORE);\n\n // Load .gitignore\n const gitignoreRules = await readIgnoreFile(\n path.join(absoluteRoot, \".gitignore\"),\n );\n ig.add(gitignoreRules);\n\n // Load .ctxignore\n const ctxignoreRules = await readIgnoreFile(\n path.join(absoluteRoot, \".ctxignore\"),\n );\n ig.add(ctxignoreRules);\n\n // Add extra ignore patterns\n ig.add(extraIgnore);\n\n const results: DiscoveredFile[] = [];\n await walkDirectory(absoluteRoot, absoluteRoot, ig, followSymlinks, results);\n return results.sort((a, b) => a.path.localeCompare(b.path));\n}\n\nasync function walkDirectory(\n dir: string,\n root: string,\n ig: ReturnType<typeof ignore>,\n followSymlinks: boolean,\n results: DiscoveredFile[],\n): Promise<void> {\n let entries: Dirent[];\n try {\n entries = await fs.readdir(dir, { withFileTypes: true });\n } catch {\n // Permission denied or other error — skip silently\n return;\n }\n\n for (const entry of entries) {\n const absolutePath = path.join(dir, entry.name);\n const relativePath = path.relative(root, absolutePath);\n\n // Normalize to forward slashes for ignore matching\n const normalizedRelative = relativePath.split(path.sep).join(\"/\");\n\n // Check if ignored — directories need trailing slash for ignore\n if (entry.isDirectory() || entry.isSymbolicLink()) {\n const stat = await statSafe(absolutePath, followSymlinks);\n if (!stat) continue;\n\n if (stat.isDirectory()) {\n if (ig.ignores(normalizedRelative + \"/\") || ig.ignores(normalizedRelative)) {\n continue;\n }\n await walkDirectory(absolutePath, root, ig, followSymlinks, results);\n continue;\n }\n\n // Symlink to file — fall through to file handling\n if (!stat.isFile()) continue;\n }\n\n if (!entry.isFile() && !entry.isSymbolicLink()) continue;\n\n // Check ignore for files\n if (ig.ignores(normalizedRelative)) continue;\n\n // Get language from extension\n const language = getLanguage(relativePath);\n if (language === null) continue;\n\n // Stat for metadata\n const stat = await statSafe(absolutePath, followSymlinks);\n if (!stat || !stat.isFile()) continue;\n\n results.push({\n path: normalizedRelative,\n absolutePath,\n language,\n size: stat.size,\n lastModified: stat.mtimeMs,\n });\n }\n}\n","import { createHash } from \"node:crypto\";\nimport fs from \"node:fs/promises\";\nimport type { DiscoveredFile } from \"./discovery.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Result of incremental change detection: files categorized by status. */\nexport interface IncrementalResult {\n added: string[];\n modified: string[];\n deleted: string[];\n unchanged: string[];\n /** SHA-256 content hashes for added + modified files */\n hashes: Map<string, string>;\n /** Wall-clock duration in milliseconds */\n duration: number;\n}\n\n/** Minimal DB surface needed for change detection */\nexport interface ChangeDetectionDb {\n getFile(filePath: string): { hash: string } | null;\n getAllFilePaths(): string[];\n}\n\n// ── File hashing ─────────────────────────────────────────────────────────────\n\nexport async function hashFileContent(absolutePath: string): Promise<string> {\n const content = await fs.readFile(absolutePath);\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\n// ── Change detection ─────────────────────────────────────────────────────────\n\n/** Compare discovered files against stored hashes to detect adds, modifies, and deletes. */\nexport async function computeChanges(\n discovered: DiscoveredFile[],\n db: ChangeDetectionDb,\n): Promise<IncrementalResult> {\n const start = performance.now();\n\n const added: string[] = [];\n const modified: string[] = [];\n const unchanged: string[] = [];\n const hashes = new Map<string, string>();\n\n // Build set of discovered paths for fast lookup\n const discoveredPaths = new Set(discovered.map((f) => f.path));\n\n // Classify each discovered file\n await Promise.all(\n discovered.map(async (file) => {\n const contentHash = await hashFileContent(file.absolutePath);\n const existing = db.getFile(file.path);\n\n if (!existing) {\n added.push(file.path);\n hashes.set(file.path, contentHash);\n } else if (existing.hash !== contentHash) {\n modified.push(file.path);\n hashes.set(file.path, contentHash);\n } else {\n unchanged.push(file.path);\n }\n }),\n );\n\n // Find deleted files: in DB but not discovered\n const dbPaths = db.getAllFilePaths();\n const deleted = dbPaths.filter((p) => !discoveredPaths.has(p));\n\n // Sort for deterministic output\n added.sort();\n modified.sort();\n deleted.sort();\n unchanged.sort();\n\n return {\n added,\n modified,\n deleted,\n unchanged,\n hashes,\n duration: performance.now() - start,\n };\n}\n","import fs from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { createRequire } from \"node:module\";\nimport Parser from \"web-tree-sitter\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** An extracted AST node: function, class, method, type, import, or constant. */\nexport interface ASTNode {\n type: \"function\" | \"class\" | \"method\" | \"import\" | \"export\" | \"type\" | \"constant\";\n name: string | null;\n lineStart: number;\n lineEnd: number;\n language: string;\n parent: string | null;\n params?: string[];\n returnType?: string;\n docstring?: string;\n imports?: string[];\n exports?: boolean;\n text: string;\n}\n\n// ── Language grammar mapping ─────────────────────────────────────────────────\n\nconst GRAMMAR_FILES: Record<string, string> = {\n typescript: \"tree-sitter-typescript.wasm\",\n javascript: \"tree-sitter-javascript.wasm\",\n python: \"tree-sitter-python.wasm\",\n};\n\n// ── Parser cache ─────────────────────────────────────────────────────────────\n\nconst require = createRequire(import.meta.url);\nlet initialized = false;\nconst languageCache = new Map<string, Parser.Language>();\n\nfunction resolveWasmPath(filename: string): string {\n if (filename === \"tree-sitter.wasm\") {\n return path.join(path.dirname(require.resolve(\"web-tree-sitter\")), filename);\n }\n return path.join(path.dirname(require.resolve(\"tree-sitter-wasms/package.json\")), \"out\", filename);\n}\n\n/** Initialize the Tree-sitter WebAssembly parser. Must be called before parseFile. */\nexport async function initParser(): Promise<void> {\n if (initialized) return;\n await Parser.init({\n locateFile: (scriptName: string) => resolveWasmPath(scriptName),\n });\n initialized = true;\n}\n\nasync function getLanguage(language: string): Promise<Parser.Language | null> {\n const grammarFile = GRAMMAR_FILES[language];\n if (!grammarFile) return null;\n\n const cached = languageCache.get(language);\n if (cached) return cached;\n\n const wasmPath = resolveWasmPath(grammarFile);\n const lang = await Parser.Language.load(wasmPath);\n languageCache.set(language, lang);\n return lang;\n}\n\n// ── Docstring extraction ─────────────────────────────────────────────────────\n\nfunction extractDocstring(\n node: Parser.SyntaxNode,\n language: string,\n): string | undefined {\n if (language === \"python\") {\n // Python docstrings: first child expression_statement containing a string\n const body = node.childForFieldName(\"body\");\n if (body) {\n const firstStmt = body.namedChildren[0];\n if (firstStmt?.type === \"expression_statement\") {\n const strNode = firstStmt.namedChildren[0];\n if (strNode?.type === \"string\") {\n // Strip surrounding quotes (\"\"\" or ')\n const raw = strNode.text;\n return raw.replace(/^[\"']{1,3}|[\"']{1,3}$/g, \"\").trim();\n }\n }\n }\n return undefined;\n }\n\n // JS/TS: look for a comment preceding the node\n const prev = findPrecedingComment(node);\n if (prev) return cleanJSDocComment(prev.text);\n return undefined;\n}\n\nfunction findPrecedingComment(node: Parser.SyntaxNode): Parser.SyntaxNode | null {\n // Walk backward through previous siblings\n let candidate: Parser.SyntaxNode | null = node.previousNamedSibling;\n\n // If this node is wrapped in export_statement, check before the export\n if (node.parent?.type === \"export_statement\") {\n candidate = node.parent.previousNamedSibling;\n }\n\n if (candidate?.type === \"comment\") return candidate;\n return null;\n}\n\nfunction cleanJSDocComment(text: string): string {\n return text\n .replace(/^\\/\\*\\*?\\s*/, \"\")\n .replace(/\\s*\\*\\/$/, \"\")\n .replace(/^\\s*\\* ?/gm, \"\")\n .trim();\n}\n\n// ── Parameter extraction ─────────────────────────────────────────────────────\n\nfunction extractParams(\n node: Parser.SyntaxNode,\n language: string,\n): string[] | undefined {\n const paramsNode =\n node.childForFieldName(\"parameters\") ??\n node.childForFieldName(\"formal_parameters\");\n\n if (!paramsNode) return undefined;\n\n if (language === \"python\") {\n return paramsNode.namedChildren\n .filter((c) => c.type !== \"comment\")\n .map((c) => c.text)\n .filter((t) => t !== \"self\" && t !== \"cls\");\n }\n\n // JS/TS\n return paramsNode.namedChildren\n .filter((c) => c.type !== \"comment\")\n .map((c) => c.text);\n}\n\nfunction extractReturnType(\n node: Parser.SyntaxNode,\n language: string,\n): string | undefined {\n if (language === \"python\") {\n const retType = node.childForFieldName(\"return_type\");\n return retType?.text;\n }\n\n // TS: return type annotation comes after parameters\n const retType = node.childForFieldName(\"return_type\");\n if (retType) {\n // Strip leading \": \" from the type annotation\n const text = retType.text;\n return text.startsWith(\":\") ? text.slice(1).trim() : text;\n }\n return undefined;\n}\n\n// ── Node extraction per language ─────────────────────────────────────────────\n\nfunction isExported(node: Parser.SyntaxNode): boolean {\n return node.parent?.type === \"export_statement\";\n}\n\nfunction extractTopLevelNode(node: Parser.SyntaxNode): Parser.SyntaxNode {\n // If wrapped in export_statement, return the export_statement for text/range\n if (node.parent?.type === \"export_statement\") return node.parent;\n return node;\n}\n\nfunction extractTypeScript(\n rootNode: Parser.SyntaxNode,\n source: string,\n language: string,\n): ASTNode[] {\n const nodes: ASTNode[] = [];\n\n function walk(node: Parser.SyntaxNode, parentClassName: string | null): void {\n for (const child of node.namedChildren) {\n // Unwrap export_statement to get the inner declaration\n const inner =\n child.type === \"export_statement\"\n ? (child.namedChildren.find(\n (c) =>\n c.type === \"function_declaration\" ||\n c.type === \"class_declaration\" ||\n c.type === \"lexical_declaration\" ||\n c.type === \"interface_declaration\" ||\n c.type === \"type_alias_declaration\" ||\n c.type === \"abstract_class_declaration\",\n ) ?? child)\n : child;\n\n switch (inner.type) {\n case \"import_statement\": {\n nodes.push({\n type: \"import\",\n name: null,\n lineStart: inner.startPosition.row + 1,\n lineEnd: inner.endPosition.row + 1,\n language,\n parent: null,\n text: inner.text,\n });\n break;\n }\n\n case \"function_declaration\": {\n const topNode = extractTopLevelNode(inner);\n const name = inner.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: parentClassName ? \"method\" : \"function\",\n name,\n lineStart: topNode.startPosition.row + 1,\n lineEnd: topNode.endPosition.row + 1,\n language,\n parent: parentClassName,\n params: extractParams(inner, language),\n returnType: extractReturnType(inner, language),\n docstring: extractDocstring(inner, language),\n exports: isExported(inner),\n text: topNode.text,\n });\n break;\n }\n\n case \"class_declaration\":\n case \"abstract_class_declaration\": {\n const topNode = extractTopLevelNode(inner);\n const className = inner.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"class\",\n name: className,\n lineStart: topNode.startPosition.row + 1,\n lineEnd: topNode.endPosition.row + 1,\n language,\n parent: null,\n docstring: extractDocstring(inner, language),\n exports: isExported(inner),\n text: topNode.text,\n });\n\n // Extract methods from class body\n const classBody = inner.childForFieldName(\"body\");\n if (classBody) {\n for (const member of classBody.namedChildren) {\n if (member.type === \"method_definition\") {\n const methodName = member.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"method\",\n name: methodName,\n lineStart: member.startPosition.row + 1,\n lineEnd: member.endPosition.row + 1,\n language,\n parent: className,\n params: extractParams(member, language),\n returnType: extractReturnType(member, language),\n docstring: extractDocstring(member, language),\n exports: isExported(inner),\n text: member.text,\n });\n }\n }\n }\n break;\n }\n\n case \"interface_declaration\": {\n const topNode = extractTopLevelNode(inner);\n const name = inner.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"type\",\n name,\n lineStart: topNode.startPosition.row + 1,\n lineEnd: topNode.endPosition.row + 1,\n language,\n parent: null,\n docstring: extractDocstring(inner, language),\n exports: isExported(inner),\n text: topNode.text,\n });\n break;\n }\n\n case \"type_alias_declaration\": {\n const topNode = extractTopLevelNode(inner);\n const name = inner.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"type\",\n name,\n lineStart: topNode.startPosition.row + 1,\n lineEnd: topNode.endPosition.row + 1,\n language,\n parent: null,\n docstring: extractDocstring(inner, language),\n exports: isExported(inner),\n text: topNode.text,\n });\n break;\n }\n\n case \"lexical_declaration\": {\n const topNode = extractTopLevelNode(inner);\n // Extract variable name from declarators\n const declarator = inner.namedChildren.find(\n (c) => c.type === \"variable_declarator\",\n );\n const name = declarator?.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"constant\",\n name,\n lineStart: topNode.startPosition.row + 1,\n lineEnd: topNode.endPosition.row + 1,\n language,\n parent: parentClassName,\n docstring: extractDocstring(inner, language),\n exports: isExported(inner),\n text: topNode.text,\n });\n break;\n }\n\n default:\n // Recurse into other node types but don't handle unrecognized export_statement children\n if (child.type !== \"export_statement\") {\n // Don't recurse further for non-export top-level\n }\n break;\n }\n }\n }\n\n walk(rootNode, null);\n return nodes;\n}\n\nfunction extractPython(\n rootNode: Parser.SyntaxNode,\n _source: string,\n language: string,\n): ASTNode[] {\n const nodes: ASTNode[] = [];\n\n function walk(node: Parser.SyntaxNode, parentClassName: string | null): void {\n for (const child of node.namedChildren) {\n switch (child.type) {\n case \"import_statement\":\n case \"import_from_statement\": {\n nodes.push({\n type: \"import\",\n name: null,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: null,\n text: child.text,\n });\n break;\n }\n\n case \"function_definition\": {\n const name = child.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: parentClassName ? \"method\" : \"function\",\n name,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: parentClassName,\n params: extractParams(child, language),\n returnType: extractReturnType(child, language),\n docstring: extractDocstring(child, language),\n text: child.text,\n });\n break;\n }\n\n case \"decorated_definition\": {\n // Unwrap decorated definition to get the inner function/class\n const innerDef = child.namedChildren.find(\n (c) =>\n c.type === \"function_definition\" || c.type === \"class_definition\",\n );\n if (innerDef) {\n // Process as if it were the inner node, but use the decorated range\n const name = innerDef.childForFieldName(\"name\")?.text ?? null;\n\n if (innerDef.type === \"function_definition\") {\n nodes.push({\n type: parentClassName ? \"method\" : \"function\",\n name,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: parentClassName,\n params: extractParams(innerDef, language),\n returnType: extractReturnType(innerDef, language),\n docstring: extractDocstring(innerDef, language),\n text: child.text,\n });\n } else if (innerDef.type === \"class_definition\") {\n nodes.push({\n type: \"class\",\n name,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: null,\n docstring: extractDocstring(innerDef, language),\n text: child.text,\n });\n\n // Extract methods from class body\n const body = innerDef.childForFieldName(\"body\");\n if (body) walk(body, name);\n }\n }\n break;\n }\n\n case \"class_definition\": {\n const name = child.childForFieldName(\"name\")?.text ?? null;\n nodes.push({\n type: \"class\",\n name,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: null,\n docstring: extractDocstring(child, language),\n text: child.text,\n });\n\n // Extract methods from class body\n const body = child.childForFieldName(\"body\");\n if (body) walk(body, name);\n break;\n }\n\n case \"expression_statement\": {\n // Top-level assignments → constants\n const assignment = child.namedChildren.find(\n (c) => c.type === \"assignment\",\n );\n if (assignment && parentClassName === null) {\n const left = assignment.childForFieldName(\"left\");\n if (left?.type === \"identifier\") {\n nodes.push({\n type: \"constant\",\n name: left.text,\n lineStart: child.startPosition.row + 1,\n lineEnd: child.endPosition.row + 1,\n language,\n parent: null,\n text: child.text,\n });\n }\n }\n break;\n }\n\n default:\n break;\n }\n }\n }\n\n walk(rootNode, null);\n return nodes;\n}\n\n// ── Main entry point ─────────────────────────────────────────────────────────\n\n/** Parse a source file with Tree-sitter and extract AST nodes. */\nexport async function parseFile(\n filePath: string,\n language: string,\n): Promise<ASTNode[]> {\n await initParser();\n\n const lang = await getLanguage(language);\n if (!lang) return [];\n\n let source: string;\n try {\n source = await fs.readFile(filePath, \"utf-8\");\n } catch {\n return [];\n }\n\n const parser = new Parser();\n parser.setLanguage(lang);\n\n const tree = parser.parse(source);\n if (!tree) return [];\n\n try {\n if (language === \"python\") {\n return extractPython(tree.rootNode, source, language);\n }\n // TypeScript and JavaScript share the same extraction logic\n return extractTypeScript(tree.rootNode, source, language);\n } finally {\n tree.delete();\n parser.delete();\n }\n}\n","import { createHash } from \"node:crypto\";\nimport type { ASTNode } from \"./parser.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** A logical code chunk with content, location, and a deterministic content hash. */\nexport interface Chunk {\n id: string;\n filePath: string;\n lineStart: number;\n lineEnd: number;\n language: string;\n type: \"function\" | \"class\" | \"method\" | \"type\" | \"import\" | \"constant\" | \"config\";\n name: string | null;\n parent: string | null;\n text: string;\n imports: string[];\n exports: boolean;\n hash: string;\n}\n\nexport interface ChunkOptions {\n maxTokens?: number;\n overlapLines?: number;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst DEFAULT_MAX_TOKENS = 500;\nconst MERGE_THRESHOLD = 50;\nconst TOKEN_MULTIPLIER = 1.3;\n\n// ── Token estimation ─────────────────────────────────────────────────────────\n\n/** Rough token count estimate (~4 chars per token). */\nexport function estimateTokens(text: string): number {\n const wordCount = text.split(/\\s+/).filter((w) => w.length > 0).length;\n return Math.ceil(wordCount * TOKEN_MULTIPLIER);\n}\n\n// ── Hashing ──────────────────────────────────────────────────────────────────\n\nfunction makeChunkId(filePath: string, lineStart: number, lineEnd: number): string {\n const input = `${filePath}:${lineStart}:${lineEnd}`;\n return createHash(\"sha256\").update(input).digest(\"hex\").slice(0, 16);\n}\n\nfunction makeContentHash(text: string): string {\n return createHash(\"sha256\").update(text).digest(\"hex\").slice(0, 16);\n}\n\n// ── Splitting large nodes ────────────────────────────────────────────────────\n\ninterface SubChunk {\n lineStart: number;\n lineEnd: number;\n text: string;\n}\n\nfunction splitLargeNode(node: ASTNode, maxTokens: number): SubChunk[] {\n const lines = node.text.split(\"\\n\");\n const chunks: SubChunk[] = [];\n let currentLines: string[] = [];\n let currentStartOffset = 0;\n\n for (let i = 0; i < lines.length; i++) {\n currentLines.push(lines[i]);\n const currentText = currentLines.join(\"\\n\");\n const tokens = estimateTokens(currentText);\n\n if (tokens >= maxTokens && currentLines.length > 1) {\n // Emit what we have so far (excluding the current line)\n currentLines.pop();\n chunks.push({\n lineStart: node.lineStart + currentStartOffset,\n lineEnd: node.lineStart + currentStartOffset + currentLines.length - 1,\n text: currentLines.join(\"\\n\"),\n });\n\n currentStartOffset = i;\n currentLines = [lines[i]];\n }\n }\n\n // Emit remaining lines\n if (currentLines.length > 0) {\n chunks.push({\n lineStart: node.lineStart + currentStartOffset,\n lineEnd: node.lineStart + currentStartOffset + currentLines.length - 1,\n text: currentLines.join(\"\\n\"),\n });\n }\n\n return chunks;\n}\n\n// ── Import grouping ──────────────────────────────────────────────────────────\n\nfunction groupImports(imports: ASTNode[]): ASTNode | null {\n if (imports.length === 0) return null;\n\n const sorted = [...imports].sort((a, b) => a.lineStart - b.lineStart);\n return {\n type: \"import\",\n name: null,\n lineStart: sorted[0].lineStart,\n lineEnd: sorted[sorted.length - 1].lineEnd,\n language: sorted[0].language,\n parent: null,\n text: sorted.map((n) => n.text).join(\"\\n\"),\n };\n}\n\n// ── Merging small chunks ─────────────────────────────────────────────────────\n\n// Types that are semantically distinct and should never be merged together\nconst UNMERGEABLE_TYPES = new Set<Chunk[\"type\"]>([\n \"function\",\n \"method\",\n \"class\",\n \"type\",\n \"import\",\n]);\n\nfunction canMerge(a: Chunk, b: Chunk): boolean {\n // Never merge semantically distinct node types\n if (UNMERGEABLE_TYPES.has(a.type) || UNMERGEABLE_TYPES.has(b.type)) return false;\n // Only merge chunks of the same type\n if (a.type !== b.type) return false;\n return true;\n}\n\nfunction mergeSmallChunks(chunks: Chunk[], maxTokens: number): Chunk[] {\n if (chunks.length <= 1) return chunks;\n\n const merged: Chunk[] = [];\n let accumulator: Chunk | null = null;\n\n for (const chunk of chunks) {\n const chunkTokens = estimateTokens(chunk.text);\n\n if (accumulator === null) {\n if (chunkTokens < MERGE_THRESHOLD && !UNMERGEABLE_TYPES.has(chunk.type)) {\n accumulator = { ...chunk };\n } else {\n merged.push(chunk);\n }\n continue;\n }\n\n const accTokens = estimateTokens(accumulator.text);\n const combinedTokens = accTokens + chunkTokens;\n\n // Merge if both are small, same type, and combined fits\n if (\n chunkTokens < MERGE_THRESHOLD &&\n combinedTokens <= maxTokens &&\n canMerge(accumulator, chunk)\n ) {\n const combinedText = accumulator.text + \"\\n\" + chunk.text;\n accumulator = {\n ...accumulator,\n lineEnd: chunk.lineEnd,\n text: combinedText,\n name: accumulator.name ?? chunk.name,\n id: makeChunkId(accumulator.filePath, accumulator.lineStart, chunk.lineEnd),\n hash: makeContentHash(combinedText),\n };\n } else {\n // Flush accumulator and start fresh\n merged.push(accumulator);\n accumulator =\n chunkTokens < MERGE_THRESHOLD && !UNMERGEABLE_TYPES.has(chunk.type)\n ? { ...chunk }\n : null;\n if (accumulator === null) {\n merged.push(chunk);\n }\n }\n }\n\n if (accumulator) {\n merged.push(accumulator);\n }\n\n return merged;\n}\n\n// ── Extract import text list for context ─────────────────────────────────────\n\nfunction collectImportTexts(nodes: ASTNode[]): string[] {\n return nodes.filter((n) => n.type === \"import\").map((n) => n.text);\n}\n\n// ── Main entry point ─────────────────────────────────────────────────────────\n\n/** Split AST nodes into logical chunks. Merges small constants; keeps functions/classes whole. */\nexport function chunkFile(\n nodes: ASTNode[],\n filePath: string,\n options?: ChunkOptions,\n): Chunk[] {\n if (nodes.length === 0) return [];\n\n const maxTokens = options?.maxTokens ?? DEFAULT_MAX_TOKENS;\n const language = nodes[0].language;\n const importTexts = collectImportTexts(nodes);\n\n // Sort nodes by line position\n const sorted = [...nodes].sort((a, b) => a.lineStart - b.lineStart);\n\n // Separate imports from other nodes\n const importNodes = sorted.filter((n) => n.type === \"import\");\n const nonImportNodes = sorted.filter((n) => n.type !== \"import\");\n\n // Track which classes have separate method nodes\n const classesWithMethods = new Set<string>();\n for (const node of nonImportNodes) {\n if (node.type === \"method\" && node.parent) {\n classesWithMethods.add(node.parent);\n }\n }\n\n const rawChunks: Chunk[] = [];\n\n // 1. Group imports into a single chunk\n const groupedImport = groupImports(importNodes);\n if (groupedImport) {\n rawChunks.push({\n id: makeChunkId(filePath, groupedImport.lineStart, groupedImport.lineEnd),\n filePath,\n lineStart: groupedImport.lineStart,\n lineEnd: groupedImport.lineEnd,\n language,\n type: \"import\",\n name: null,\n parent: null,\n text: groupedImport.text,\n imports: [],\n exports: false,\n hash: makeContentHash(groupedImport.text),\n });\n }\n\n // 2. Process non-import nodes\n for (const node of nonImportNodes) {\n // Skip class node if it has separate method nodes — methods are chunked individually\n if (node.type === \"class\" && node.name && classesWithMethods.has(node.name)) {\n continue;\n }\n\n const tokenCount = estimateTokens(node.text);\n const nodeExports = node.exports ?? false;\n\n if (tokenCount <= maxTokens) {\n // Single chunk\n rawChunks.push({\n id: makeChunkId(filePath, node.lineStart, node.lineEnd),\n filePath,\n lineStart: node.lineStart,\n lineEnd: node.lineEnd,\n language,\n type: node.type === \"export\" ? \"constant\" : node.type,\n name: node.name,\n parent: node.parent,\n text: node.text,\n imports: node.type !== \"import\" ? importTexts : [],\n exports: nodeExports,\n hash: makeContentHash(node.text),\n });\n } else {\n // Split large node\n const subChunks = splitLargeNode(node, maxTokens);\n for (const sub of subChunks) {\n rawChunks.push({\n id: makeChunkId(filePath, sub.lineStart, sub.lineEnd),\n filePath,\n lineStart: sub.lineStart,\n lineEnd: sub.lineEnd,\n language,\n type: node.type === \"export\" ? \"constant\" : node.type,\n name: node.name,\n parent: node.parent,\n text: sub.text,\n imports: importTexts,\n exports: nodeExports,\n hash: makeContentHash(sub.text),\n });\n }\n }\n }\n\n // 3. Sort by line position\n rawChunks.sort((a, b) => a.lineStart - b.lineStart);\n\n // 4. Merge very small adjacent chunks\n return mergeSmallChunks(rawChunks, maxTokens);\n}\n","// ── Types ────────────────────────────────────────────────────────────────────\n\nexport type ProgressCallback = (done: number, total: number) => void;\n\nexport interface EmbeddingResult {\n chunkId: string;\n vector: Float32Array;\n dimensions: number;\n}\n\n/** Embedding provider: generates vector representations of text. */\nexport interface Embedder {\n readonly name: string;\n readonly dimensions: number;\n embed(\n texts: string[],\n onProgress?: ProgressCallback,\n ): Promise<Float32Array[]>;\n embedSingle(text: string): Promise<Float32Array>;\n}\n\n// ── Vector utilities ─────────────────────────────────────────────────────────\n\nexport function normalizeVector(vec: Float32Array): Float32Array {\n let sumSq = 0;\n for (const v of vec) sumSq += v * v;\n const norm = Math.sqrt(sumSq);\n if (norm === 0) return vec;\n return vec.map((v) => v / norm);\n}\n\nexport function cosineSimilarity(a: Float32Array, b: Float32Array): number {\n let dot = 0;\n let normA = 0;\n let normB = 0;\n for (let i = 0; i < a.length; i++) {\n dot += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n const denom = Math.sqrt(normA) * Math.sqrt(normB);\n return denom === 0 ? 0 : dot / denom;\n}\n\n// ── Text preparation ─────────────────────────────────────────────────────────\n\n/** Format a chunk into embedding-friendly text with file path and name prefix. */\nexport function prepareChunkText(\n filePath: string,\n parent: string | null,\n text: string,\n): string {\n const parts = [filePath];\n if (parent) parts.push(parent);\n parts.push(text);\n return parts.join(\"\\n\");\n}\n\n// ── Retry with exponential backoff ───────────────────────────────────────────\n\nconst MAX_RETRIES = 3;\nconst BASE_DELAY_MS = 500;\n\nasync function fetchWithRetry(\n url: string,\n init: RequestInit,\n): Promise<Response> {\n let lastError: Error | null = null;\n\n for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {\n const response = await fetch(url, init);\n\n if (response.ok) return response;\n\n if (response.status === 429 && attempt < MAX_RETRIES) {\n const delay = BASE_DELAY_MS * Math.pow(2, attempt);\n await new Promise((resolve) => setTimeout(resolve, delay));\n lastError = new Error(`HTTP ${response.status}: ${response.statusText}`);\n continue;\n }\n\n throw new Error(\n `Embedding API error: HTTP ${response.status} ${response.statusText}`,\n );\n }\n\n throw lastError ?? new Error(\"Embedding API request failed after retries\");\n}\n\n// ── Local embedder (Xenova/all-MiniLM-L6-v2 via @huggingface/transformers) ──\n\nconst LOCAL_MODEL_ID = \"Xenova/all-MiniLM-L6-v2\";\nconst LOCAL_DIMENSIONS = 384;\nconst LOCAL_BATCH_SIZE = 32;\n\ntype FeatureExtractionPipeline = (\n texts: string | string[],\n options: { pooling: string; normalize: boolean },\n) => Promise<{ dims: number[]; data: Float32Array }>;\n\nlet pipelineInstance: FeatureExtractionPipeline | null = null;\n\nasync function getLocalPipeline(): Promise<FeatureExtractionPipeline> {\n if (pipelineInstance) return pipelineInstance;\n\n const { pipeline, env } = await import(\"@huggingface/transformers\");\n env.cacheDir = getCacheDir();\n\n pipelineInstance = (await pipeline(\"feature-extraction\", LOCAL_MODEL_ID, {\n dtype: \"fp32\",\n })) as unknown as FeatureExtractionPipeline;\n\n return pipelineInstance;\n}\n\nfunction getCacheDir(): string {\n const home =\n process.env[\"HOME\"] ?? process.env[\"USERPROFILE\"] ?? \"/tmp\";\n return `${home}/.cache/kontext/models`;\n}\n\n/** Create a local embedder using Xenova/all-MiniLM-L6-v2 (384 dims, ONNX Runtime). */\nexport async function createLocalEmbedder(): Promise<Embedder> {\n const pipe = await getLocalPipeline();\n\n return {\n name: \"all-MiniLM-L6-v2\",\n dimensions: LOCAL_DIMENSIONS,\n\n async embed(\n texts: string[],\n onProgress?: ProgressCallback,\n ): Promise<Float32Array[]> {\n const results: Float32Array[] = [];\n\n for (let i = 0; i < texts.length; i += LOCAL_BATCH_SIZE) {\n const batch = texts.slice(i, i + LOCAL_BATCH_SIZE);\n const output = await pipe(batch, {\n pooling: \"mean\",\n normalize: true,\n });\n\n // Output shape: [batchSize, dimensions]\n for (let j = 0; j < batch.length; j++) {\n const offset = j * LOCAL_DIMENSIONS;\n const vec = new Float32Array(\n output.data.buffer,\n output.data.byteOffset + offset * 4,\n LOCAL_DIMENSIONS,\n );\n results.push(normalizeVector(vec));\n }\n\n onProgress?.(Math.min(i + batch.length, texts.length), texts.length);\n }\n\n return results;\n },\n\n async embedSingle(text: string): Promise<Float32Array> {\n const output = await pipe(text, {\n pooling: \"mean\",\n normalize: true,\n });\n\n const vec = new Float32Array(\n output.data.buffer,\n output.data.byteOffset,\n LOCAL_DIMENSIONS,\n );\n return normalizeVector(vec);\n },\n };\n}\n\n// ── Voyage embedder (VoyageCode3) ────────────────────────────────────────────\n\nconst VOYAGE_API_URL = \"https://api.voyageai.com/v1/embeddings\";\nconst VOYAGE_MODEL = \"voyage-code-3\";\nconst VOYAGE_DIMENSIONS = 1024;\nconst VOYAGE_BATCH_SIZE = 128;\n\n/** Create an embedder using Voyage AI's code embedding API. */\nexport function createVoyageEmbedder(apiKey: string): Embedder {\n return {\n name: VOYAGE_MODEL,\n dimensions: VOYAGE_DIMENSIONS,\n\n async embed(\n texts: string[],\n onProgress?: ProgressCallback,\n ): Promise<Float32Array[]> {\n const results: Float32Array[] = [];\n\n for (let i = 0; i < texts.length; i += VOYAGE_BATCH_SIZE) {\n const batch = texts.slice(i, i + VOYAGE_BATCH_SIZE);\n const vectors = await callVoyageAPI(apiKey, batch);\n results.push(...vectors);\n onProgress?.(Math.min(i + batch.length, texts.length), texts.length);\n }\n\n return results;\n },\n\n async embedSingle(text: string): Promise<Float32Array> {\n const vectors = await callVoyageAPI(apiKey, [text]);\n return vectors[0];\n },\n };\n}\n\ninterface EmbeddingAPIResponse {\n data: { embedding: number[] }[];\n}\n\nasync function callVoyageAPI(\n apiKey: string,\n texts: string[],\n): Promise<Float32Array[]> {\n const response = await fetchWithRetry(VOYAGE_API_URL, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model: VOYAGE_MODEL,\n input: texts,\n input_type: \"document\",\n }),\n });\n\n const json = (await response.json()) as EmbeddingAPIResponse;\n return json.data.map((d) => normalizeVector(new Float32Array(d.embedding)));\n}\n\n// ── OpenAI embedder (text-embedding-3-large) ─────────────────────────────────\n\nconst OPENAI_API_URL = \"https://api.openai.com/v1/embeddings\";\nconst OPENAI_MODEL = \"text-embedding-3-large\";\nconst OPENAI_DIMENSIONS = 1024; // truncated from 3072 for efficiency\nconst OPENAI_BATCH_SIZE = 128;\n\n/** Create an embedder using OpenAI's text-embedding-3-small API. */\nexport function createOpenAIEmbedder(apiKey: string): Embedder {\n return {\n name: OPENAI_MODEL,\n dimensions: OPENAI_DIMENSIONS,\n\n async embed(\n texts: string[],\n onProgress?: ProgressCallback,\n ): Promise<Float32Array[]> {\n const results: Float32Array[] = [];\n\n for (let i = 0; i < texts.length; i += OPENAI_BATCH_SIZE) {\n const batch = texts.slice(i, i + OPENAI_BATCH_SIZE);\n const vectors = await callOpenAIAPI(apiKey, batch);\n results.push(...vectors);\n onProgress?.(Math.min(i + batch.length, texts.length), texts.length);\n }\n\n return results;\n },\n\n async embedSingle(text: string): Promise<Float32Array> {\n const vectors = await callOpenAIAPI(apiKey, [text]);\n return vectors[0];\n },\n };\n}\n\nasync function callOpenAIAPI(\n apiKey: string,\n texts: string[],\n): Promise<Float32Array[]> {\n const response = await fetchWithRetry(OPENAI_API_URL, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify({\n model: OPENAI_MODEL,\n input: texts,\n dimensions: OPENAI_DIMENSIONS,\n }),\n });\n\n const json = (await response.json()) as EmbeddingAPIResponse;\n return json.data.map((d) => normalizeVector(new Float32Array(d.embedding)));\n}\n","// ── Error codes ──────────────────────────────────────────────────────────────\n\n/** String constants for all Kontext error codes. */\nexport const ErrorCode = {\n NOT_INITIALIZED: \"NOT_INITIALIZED\",\n INDEX_FAILED: \"INDEX_FAILED\",\n PARSE_FAILED: \"PARSE_FAILED\",\n CHUNK_FAILED: \"CHUNK_FAILED\",\n EMBEDDER_FAILED: \"EMBEDDER_FAILED\",\n SEARCH_FAILED: \"SEARCH_FAILED\",\n CONFIG_INVALID: \"CONFIG_INVALID\",\n DB_CORRUPTED: \"DB_CORRUPTED\",\n DB_WRITE_FAILED: \"DB_WRITE_FAILED\",\n WATCHER_FAILED: \"WATCHER_FAILED\",\n LLM_FAILED: \"LLM_FAILED\",\n} as const;\n\n/** Union type of all error code string values. */\nexport type ErrorCodeValue = (typeof ErrorCode)[keyof typeof ErrorCode];\n\n// ── Base error ───────────────────────────────────────────────────────────────\n\n/** Base error class for all Kontext errors. Carries a typed `code` and optional `cause`. */\nexport class KontextError extends Error {\n readonly code: ErrorCodeValue;\n\n constructor(message: string, code: ErrorCodeValue, cause?: Error) {\n super(message, { cause });\n this.name = \"KontextError\";\n this.code = code;\n }\n}\n\n// ── Subclasses ───────────────────────────────────────────────────────────────\n\n/** Error during indexing: file discovery, parsing, chunking, or embedding. */\nexport class IndexError extends KontextError {\n constructor(message: string, code: ErrorCodeValue, cause?: Error) {\n super(message, code, cause);\n this.name = \"IndexError\";\n }\n}\n\n/** Error during search: vector, FTS, AST, path, or fusion. */\nexport class SearchError extends KontextError {\n constructor(message: string, code: ErrorCodeValue, cause?: Error) {\n super(message, code, cause);\n this.name = \"SearchError\";\n }\n}\n\n/** Error reading, writing, or validating configuration. */\nexport class ConfigError extends KontextError {\n constructor(message: string, code: ErrorCodeValue, cause?: Error) {\n super(message, code, cause);\n this.name = \"ConfigError\";\n }\n}\n\n/** Error in SQLite storage operations. */\nexport class DatabaseError extends KontextError {\n constructor(message: string, code: ErrorCodeValue, cause?: Error) {\n super(message, code, cause);\n this.name = \"DatabaseError\";\n }\n}\n","import { KontextError } from \"./errors.js\";\nimport type { Logger } from \"./logger.js\";\n\n/** Standard error handler for CLI commands. Returns exit code: 1 for KontextError, 2 for unexpected. */\nexport function handleCommandError(\n err: unknown,\n logger: Logger,\n verbose: boolean,\n): number {\n if (err instanceof KontextError) {\n logger.error(`${err.message} [${err.code}]`);\n if (verbose && err.cause) {\n logger.debug(\"Cause:\", String(err.cause));\n }\n return 1;\n }\n\n if (err instanceof Error) {\n logger.error(`Unexpected error: ${err.message}`);\n if (verbose && err.stack) {\n logger.debug(err.stack);\n }\n } else {\n logger.error(`Unexpected error: ${String(err)}`);\n }\n\n return 2;\n}\n","// ── Log levels ───────────────────────────────────────────────────────────────\n\n/** Numeric log level constants. Lower = more verbose. */\nexport const LogLevel = {\n DEBUG: 0,\n INFO: 1,\n WARN: 2,\n ERROR: 3,\n SILENT: 4,\n} as const;\n\n/** Union type of all log level numeric values. */\nexport type LogLevelValue = (typeof LogLevel)[keyof typeof LogLevel];\n\n// ── Logger interface ─────────────────────────────────────────────────────────\n\n/** Leveled logger that writes to stderr. */\nexport interface Logger {\n debug(msg: string, ...args: unknown[]): void;\n info(msg: string, ...args: unknown[]): void;\n warn(msg: string, ...args: unknown[]): void;\n error(msg: string, ...args: unknown[]): void;\n}\n\n// ── Options ──────────────────────────────────────────────────────────────────\n\n/** Options for creating a logger instance. */\nexport interface LoggerOptions {\n level?: LogLevelValue;\n}\n\n// ── Factory ──────────────────────────────────────────────────────────────────\n\nfunction resolveLevel(options?: LoggerOptions): LogLevelValue {\n if (options?.level !== undefined) return options.level;\n if (process.env[\"CTX_DEBUG\"] === \"1\") return LogLevel.DEBUG;\n return LogLevel.INFO;\n}\n\nfunction formatArgs(args: unknown[]): string {\n return args.map((a) => (typeof a === \"string\" ? a : String(a))).join(\" \");\n}\n\nfunction write(level: string, msg: string, args: unknown[]): void {\n const extra = args.length > 0 ? ` ${formatArgs(args)}` : \"\";\n process.stderr.write(`[${level}] ${msg}${extra}\\n`);\n}\n\n/** Create a logger. Respects `CTX_DEBUG=1` env var for debug output. */\nexport function createLogger(options?: LoggerOptions): Logger {\n const minLevel = resolveLevel(options);\n\n return {\n debug(msg: string, ...args: unknown[]): void {\n if (minLevel <= LogLevel.DEBUG) write(\"debug\", msg, args);\n },\n info(msg: string, ...args: unknown[]): void {\n if (minLevel <= LogLevel.INFO) write(\"info\", msg, args);\n },\n warn(msg: string, ...args: unknown[]): void {\n if (minLevel <= LogLevel.WARN) write(\"warn\", msg, args);\n },\n error(msg: string, ...args: unknown[]): void {\n if (minLevel <= LogLevel.ERROR) write(\"error\", msg, args);\n },\n };\n}\n","import path from \"node:path\";\nimport fs from \"node:fs\";\nimport BetterSqlite3 from \"better-sqlite3\";\nimport * as sqliteVec from \"sqlite-vec\";\nimport {\n SCHEMA_SQL,\n FTS_SQL,\n FTS_TRIGGERS_SQL,\n VECTOR_TABLE_SQL,\n SCHEMA_VERSION as SCHEMA_V,\n} from \"./schema.js\";\nimport {\n getVectorCount,\n insertVector as vecInsert,\n deleteVectorsByChunkIds,\n searchVectors as vecSearch,\n} from \"./vectors.js\";\nimport type { VectorResult } from \"./vectors.js\";\n\nexport { SCHEMA_VERSION } from \"./schema.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\nexport interface FileInput {\n path: string;\n language: string;\n hash: string;\n size: number;\n}\n\nexport interface FileRecord {\n id: number;\n path: string;\n language: string;\n hash: string;\n lastIndexed: number;\n size: number;\n}\n\nexport interface ChunkInput {\n lineStart: number;\n lineEnd: number;\n type: string;\n name: string | null;\n parent: string | null;\n text: string;\n imports: string[];\n exports: boolean;\n hash: string;\n}\n\nexport interface ChunkRecord {\n id: number;\n fileId: number;\n lineStart: number;\n lineEnd: number;\n type: string;\n name: string | null;\n parent: string | null;\n text: string;\n imports: string[];\n exports: boolean;\n hash: string;\n}\n\nexport interface ChunkWithFile {\n id: number;\n fileId: number;\n filePath: string;\n language: string;\n lineStart: number;\n lineEnd: number;\n type: string;\n name: string | null;\n parent: string | null;\n text: string;\n}\n\nexport interface ChunkSearchFilters {\n name?: string;\n nameMode?: \"exact\" | \"prefix\" | \"contains\";\n type?: string;\n parent?: string;\n language?: string;\n}\n\nexport interface FTSResult {\n chunkId: number;\n name: string | null;\n rank: number;\n}\n\n/** Main database interface. Provides CRUD for files, chunks, vectors, FTS, and stats. */\nexport interface KontextDatabase {\n // Files\n upsertFile(file: FileInput): number;\n getFile(filePath: string): FileRecord | null;\n getFilesByHash(hashes: Map<string, string>): Map<string, FileRecord>;\n deleteFile(filePath: string): void;\n\n // Chunks\n insertChunks(fileId: number, chunks: ChunkInput[]): number[];\n getChunksByFile(fileId: number): ChunkRecord[];\n getChunksByIds(ids: number[]): ChunkWithFile[];\n deleteChunksByFile(fileId: number): void;\n\n // Dependencies\n insertDependency(sourceChunkId: number, targetChunkId: number, type: string): void;\n getDependencies(chunkId: number): { targetChunkId: number; type: string }[];\n getReverseDependencies(chunkId: number): { sourceChunkId: number; type: string }[];\n\n // Vectors\n insertVector(chunkId: number, vector: Float32Array): void;\n searchVectors(query: Float32Array, limit: number): VectorResult[];\n\n // AST / structured search\n searchChunks(filters: ChunkSearchFilters, limit: number): ChunkWithFile[];\n\n // FTS\n searchFTS(query: string, limit: number): FTSResult[];\n\n // All file paths (for incremental diff)\n getAllFilePaths(): string[];\n\n // Stats\n getFileCount(): number;\n getChunkCount(): number;\n getVectorCount(): number;\n getLanguageBreakdown(): Map<string, number>;\n getLastIndexed(): string | null;\n\n // Transactions\n transaction<T>(fn: () => T): T;\n\n // Maintenance\n vacuum(): void;\n close(): void;\n getSchemaVersion(): number;\n pragma(key: string): string;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst DEFAULT_DIMENSIONS = 384;\n\n// ── Factory ──────────────────────────────────────────────────────────────────\n\n/** Create or open a SQLite database at the given path. Initializes schema and loads sqlite-vec. */\nexport function createDatabase(\n dbPath: string,\n dimensions: number = DEFAULT_DIMENSIONS,\n): KontextDatabase {\n // Ensure directory exists\n const dir = path.dirname(dbPath);\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir, { recursive: true });\n }\n\n const db = new BetterSqlite3(dbPath);\n\n // Enable WAL mode and foreign keys\n db.pragma(\"journal_mode = WAL\");\n db.pragma(\"foreign_keys = ON\");\n\n // Load sqlite-vec extension\n sqliteVec.load(db);\n\n // Run schema migrations\n initializeSchema(db, dimensions);\n\n // ── Prepared statements ──────────────────────────────────────────────────\n\n const stmtUpsertFile = db.prepare(`\n INSERT INTO files (path, language, hash, last_indexed, size)\n VALUES (@path, @language, @hash, @lastIndexed, @size)\n ON CONFLICT(path) DO UPDATE SET\n language = excluded.language,\n hash = excluded.hash,\n last_indexed = excluded.last_indexed,\n size = excluded.size\n `);\n\n const stmtGetFile = db.prepare(\n \"SELECT id, path, language, hash, last_indexed as lastIndexed, size FROM files WHERE path = ?\",\n );\n\n const stmtDeleteFile = db.prepare(\"DELETE FROM files WHERE path = ?\");\n\n const stmtInsertChunk = db.prepare(`\n INSERT INTO chunks (file_id, line_start, line_end, type, name, parent, text, imports, exports, hash)\n VALUES (@fileId, @lineStart, @lineEnd, @type, @name, @parent, @text, @imports, @exports, @hash)\n `);\n\n const stmtGetChunksByFile = db.prepare(\n \"SELECT id, file_id as fileId, line_start as lineStart, line_end as lineEnd, type, name, parent, text, imports, exports, hash FROM chunks WHERE file_id = ? ORDER BY line_start\",\n );\n\n const stmtGetChunkIdsByFile = db.prepare(\n \"SELECT id FROM chunks WHERE file_id = ?\",\n );\n\n const stmtDeleteChunksByFile = db.prepare(\n \"DELETE FROM chunks WHERE file_id = ?\",\n );\n\n const stmtSearchFTS = db.prepare(\n \"SELECT rowid as chunkId, name, rank FROM chunks_fts WHERE chunks_fts MATCH ? ORDER BY rank LIMIT ?\",\n );\n\n const stmtGetAllFiles = db.prepare(\n \"SELECT id, path, language, hash, last_indexed as lastIndexed, size FROM files\",\n );\n\n const stmtInsertDep = db.prepare(\n \"INSERT INTO dependencies (source_chunk_id, target_chunk_id, type) VALUES (?, ?, ?)\",\n );\n\n const stmtGetDeps = db.prepare(\n \"SELECT target_chunk_id as targetChunkId, type FROM dependencies WHERE source_chunk_id = ?\",\n );\n\n const stmtGetReverseDeps = db.prepare(\n \"SELECT source_chunk_id as sourceChunkId, type FROM dependencies WHERE target_chunk_id = ?\",\n );\n\n const stmtFileCount = db.prepare(\"SELECT COUNT(*) as count FROM files\");\n const stmtChunkCount = db.prepare(\"SELECT COUNT(*) as count FROM chunks\");\n const stmtLanguageBreakdown = db.prepare(\n \"SELECT language, COUNT(*) as count FROM files GROUP BY language ORDER BY count DESC\",\n );\n const stmtLastIndexed = db.prepare(\n \"SELECT MAX(last_indexed) as lastIndexed FROM files\",\n );\n\n // ── Implementation ───────────────────────────────────────────────────────\n\n return {\n upsertFile(file: FileInput): number {\n const result = stmtUpsertFile.run({\n path: file.path,\n language: file.language,\n hash: file.hash,\n lastIndexed: Date.now(),\n size: file.size,\n });\n if (result.changes > 0 && result.lastInsertRowid) {\n return Number(result.lastInsertRowid);\n }\n // On update, fetch the id\n const existing = stmtGetFile.get(file.path) as FileRecord | undefined;\n return existing?.id ?? 0;\n },\n\n getFile(filePath: string): FileRecord | null {\n const row = stmtGetFile.get(filePath) as FileRecord | undefined;\n return row ?? null;\n },\n\n getFilesByHash(hashes: Map<string, string>): Map<string, FileRecord> {\n const result = new Map<string, FileRecord>();\n const allFiles = stmtGetAllFiles.all() as FileRecord[];\n for (const file of allFiles) {\n const expectedHash = hashes.get(file.path);\n if (expectedHash !== undefined && expectedHash === file.hash) {\n result.set(file.path, file);\n }\n }\n return result;\n },\n\n getAllFilePaths(): string[] {\n const rows = stmtGetAllFiles.all() as FileRecord[];\n return rows.map((r) => r.path);\n },\n\n getFileCount(): number {\n return (stmtFileCount.get() as { count: number }).count;\n },\n\n getChunkCount(): number {\n return (stmtChunkCount.get() as { count: number }).count;\n },\n\n getVectorCount(): number {\n return getVectorCount(db);\n },\n\n getLanguageBreakdown(): Map<string, number> {\n const rows = stmtLanguageBreakdown.all() as { language: string; count: number }[];\n const map = new Map<string, number>();\n for (const row of rows) {\n map.set(row.language, row.count);\n }\n return map;\n },\n\n getLastIndexed(): string | null {\n const row = stmtLastIndexed.get() as { lastIndexed: string | null };\n return row.lastIndexed;\n },\n\n deleteFile(filePath: string): void {\n // Get chunk ids first for vector cleanup\n const file = stmtGetFile.get(filePath) as FileRecord | undefined;\n if (file) {\n const chunkRows = stmtGetChunkIdsByFile.all(file.id) as { id: number }[];\n const chunkIds = chunkRows.map((r) => r.id);\n if (chunkIds.length > 0) {\n deleteVectorsByChunkIds(db, chunkIds);\n }\n }\n // CASCADE will handle chunks and FTS triggers\n stmtDeleteFile.run(filePath);\n },\n\n insertChunks(fileId: number, chunks: ChunkInput[]): number[] {\n const ids: number[] = [];\n for (const chunk of chunks) {\n const result = stmtInsertChunk.run({\n fileId,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n type: chunk.type,\n name: chunk.name,\n parent: chunk.parent,\n text: chunk.text,\n imports: JSON.stringify(chunk.imports),\n exports: chunk.exports ? 1 : 0,\n hash: chunk.hash,\n });\n ids.push(Number(result.lastInsertRowid));\n }\n return ids;\n },\n\n getChunksByFile(fileId: number): ChunkRecord[] {\n const rows = stmtGetChunksByFile.all(fileId) as {\n id: number;\n fileId: number;\n lineStart: number;\n lineEnd: number;\n type: string;\n name: string | null;\n parent: string | null;\n text: string;\n imports: string;\n exports: number;\n hash: string;\n }[];\n\n return rows.map((r) => ({\n ...r,\n imports: JSON.parse(r.imports) as string[],\n exports: r.exports === 1,\n }));\n },\n\n getChunksByIds(ids: number[]): ChunkWithFile[] {\n if (ids.length === 0) return [];\n const placeholders = ids.map(() => \"?\").join(\",\");\n const rows = db\n .prepare(\n `SELECT c.id, c.file_id as fileId, f.path as filePath, f.language,\n c.line_start as lineStart, c.line_end as lineEnd,\n c.type, c.name, c.parent, c.text\n FROM chunks c\n JOIN files f ON f.id = c.file_id\n WHERE c.id IN (${placeholders})`,\n )\n .all(...ids) as ChunkWithFile[];\n return rows;\n },\n\n searchChunks(filters: ChunkSearchFilters, limit: number): ChunkWithFile[] {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (filters.name) {\n switch (filters.nameMode ?? \"contains\") {\n case \"exact\":\n conditions.push(\"c.name = ?\");\n params.push(filters.name);\n break;\n case \"prefix\":\n conditions.push(\"c.name LIKE ? || '%'\");\n params.push(filters.name);\n break;\n case \"contains\":\n conditions.push(\"c.name LIKE '%' || ? || '%'\");\n params.push(filters.name);\n break;\n }\n }\n\n if (filters.type) {\n conditions.push(\"c.type = ?\");\n params.push(filters.type);\n }\n\n if (filters.parent) {\n conditions.push(\"c.parent = ?\");\n params.push(filters.parent);\n }\n\n if (filters.language) {\n conditions.push(\"f.language = ?\");\n params.push(filters.language);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(\" AND \")}` : \"\";\n\n const sql = `\n SELECT c.id, c.file_id as fileId, f.path as filePath, f.language,\n c.line_start as lineStart, c.line_end as lineEnd,\n c.type, c.name, c.parent, c.text\n FROM chunks c\n JOIN files f ON f.id = c.file_id\n ${where}\n ORDER BY c.name, c.line_start\n LIMIT ?\n `;\n\n params.push(limit);\n return db.prepare(sql).all(...params) as ChunkWithFile[];\n },\n\n deleteChunksByFile(fileId: number): void {\n const chunkRows = stmtGetChunkIdsByFile.all(fileId) as { id: number }[];\n const chunkIds = chunkRows.map((r) => r.id);\n if (chunkIds.length > 0) {\n deleteVectorsByChunkIds(db, chunkIds);\n }\n stmtDeleteChunksByFile.run(fileId);\n },\n\n insertDependency(sourceChunkId: number, targetChunkId: number, type: string): void {\n stmtInsertDep.run(sourceChunkId, targetChunkId, type);\n },\n\n getDependencies(chunkId: number): { targetChunkId: number; type: string }[] {\n return stmtGetDeps.all(chunkId) as { targetChunkId: number; type: string }[];\n },\n\n getReverseDependencies(chunkId: number): { sourceChunkId: number; type: string }[] {\n return stmtGetReverseDeps.all(chunkId) as { sourceChunkId: number; type: string }[];\n },\n\n insertVector(chunkId: number, vector: Float32Array): void {\n vecInsert(db, chunkId, vector);\n },\n\n searchVectors(query: Float32Array, limit: number): VectorResult[] {\n return vecSearch(db, query, limit);\n },\n\n searchFTS(query: string, limit: number): FTSResult[] {\n const rows = stmtSearchFTS.all(query, limit) as {\n chunkId: number;\n name: string | null;\n rank: number;\n }[];\n return rows;\n },\n\n transaction<T>(fn: () => T): T {\n return db.transaction(fn)();\n },\n\n vacuum(): void {\n db.exec(\"VACUUM\");\n },\n\n close(): void {\n db.close();\n },\n\n getSchemaVersion(): number {\n const row = db\n .prepare(\"SELECT value FROM meta WHERE key = 'schema_version'\")\n .get() as { value: string } | undefined;\n return row ? parseInt(row.value, 10) : 0;\n },\n\n pragma(key: string): string {\n const result = db.pragma(key) as { journal_mode: string }[];\n if (Array.isArray(result) && result.length > 0) {\n return Object.values(result[0])[0] as string;\n }\n return String(result);\n },\n };\n}\n\n// ── Schema initialization ────────────────────────────────────────────────────\n\nfunction initializeSchema(\n db: BetterSqlite3.Database,\n dimensions: number,\n): void {\n const currentVersion = getMetaVersion(db);\n\n if (currentVersion >= SCHEMA_V) return;\n\n db.exec(SCHEMA_SQL);\n db.exec(VECTOR_TABLE_SQL(dimensions));\n db.exec(FTS_SQL);\n db.exec(FTS_TRIGGERS_SQL);\n\n db.prepare(\n \"INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', ?)\",\n ).run(String(SCHEMA_V));\n}\n\nfunction getMetaVersion(db: BetterSqlite3.Database): number {\n try {\n const row = db\n .prepare(\"SELECT value FROM meta WHERE key = 'schema_version'\")\n .get() as { value: string } | undefined;\n return row ? parseInt(row.value, 10) : 0;\n } catch {\n // meta table doesn't exist yet\n return 0;\n }\n}\n","export const SCHEMA_VERSION = 1;\n\nexport const SCHEMA_SQL = `\n CREATE TABLE IF NOT EXISTS meta (\n key TEXT PRIMARY KEY,\n value TEXT\n );\n\n CREATE TABLE IF NOT EXISTS files (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n path TEXT UNIQUE NOT NULL,\n language TEXT NOT NULL,\n hash TEXT NOT NULL,\n last_indexed INTEGER NOT NULL,\n size INTEGER NOT NULL\n );\n\n CREATE TABLE IF NOT EXISTS chunks (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,\n line_start INTEGER NOT NULL,\n line_end INTEGER NOT NULL,\n type TEXT NOT NULL,\n name TEXT,\n parent TEXT,\n text TEXT NOT NULL,\n imports JSON,\n exports INTEGER DEFAULT 0,\n hash TEXT NOT NULL\n );\n\n CREATE TABLE IF NOT EXISTS dependencies (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n source_chunk_id INTEGER NOT NULL REFERENCES chunks(id) ON DELETE CASCADE,\n target_chunk_id INTEGER NOT NULL REFERENCES chunks(id) ON DELETE CASCADE,\n type TEXT NOT NULL\n );\n\n CREATE INDEX IF NOT EXISTS idx_chunks_file ON chunks(file_id);\n CREATE INDEX IF NOT EXISTS idx_chunks_name ON chunks(name);\n CREATE INDEX IF NOT EXISTS idx_deps_source ON dependencies(source_chunk_id);\n CREATE INDEX IF NOT EXISTS idx_deps_target ON dependencies(target_chunk_id);\n`;\n\nexport const FTS_SQL = `\n CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(\n name, text, parent,\n content=chunks,\n content_rowid=id\n );\n`;\n\nexport const FTS_TRIGGERS_SQL = `\n CREATE TRIGGER IF NOT EXISTS chunks_fts_ai AFTER INSERT ON chunks BEGIN\n INSERT INTO chunks_fts(rowid, name, text, parent)\n VALUES (new.id, new.name, new.text, new.parent);\n END;\n\n CREATE TRIGGER IF NOT EXISTS chunks_fts_ad AFTER DELETE ON chunks BEGIN\n INSERT INTO chunks_fts(chunks_fts, rowid, name, text, parent)\n VALUES ('delete', old.id, old.name, old.text, old.parent);\n END;\n\n CREATE TRIGGER IF NOT EXISTS chunks_fts_au AFTER UPDATE ON chunks BEGIN\n INSERT INTO chunks_fts(chunks_fts, rowid, name, text, parent)\n VALUES ('delete', old.id, old.name, old.text, old.parent);\n INSERT INTO chunks_fts(rowid, name, text, parent)\n VALUES (new.id, new.name, new.text, new.parent);\n END;\n`;\n\nexport const VECTOR_TABLE_SQL = (dimensions: number): string =>\n `CREATE VIRTUAL TABLE IF NOT EXISTS chunk_vectors USING vec0(\n embedding float[${dimensions}]\n );`;\n","import type BetterSqlite3 from \"better-sqlite3\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\nexport interface VectorResult {\n chunkId: number;\n distance: number;\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction vecToBuffer(vec: Float32Array): Buffer {\n return Buffer.from(vec.buffer, vec.byteOffset, vec.byteLength);\n}\n\n// ── Operations ───────────────────────────────────────────────────────────────\n\nexport function insertVector(\n db: BetterSqlite3.Database,\n chunkId: number,\n vector: Float32Array,\n): void {\n // sqlite-vec requires literal integer rowid — parameterized rowid fails\n db.prepare(\n `INSERT INTO chunk_vectors(rowid, embedding) VALUES (${chunkId}, ?)`,\n ).run(vecToBuffer(vector));\n}\n\nexport function deleteVectorsByChunkIds(\n db: BetterSqlite3.Database,\n chunkIds: number[],\n): void {\n if (chunkIds.length === 0) return;\n const placeholders = chunkIds.map(() => \"?\").join(\",\");\n db.prepare(\n `DELETE FROM chunk_vectors WHERE rowid IN (${placeholders})`,\n ).run(...chunkIds);\n}\n\nexport function getVectorCount(db: BetterSqlite3.Database): number {\n const row = db.prepare(\"SELECT COUNT(*) as count FROM chunk_vectors\").get() as {\n count: number;\n };\n return row.count;\n}\n\nexport function searchVectors(\n db: BetterSqlite3.Database,\n query: Float32Array,\n limit: number,\n): VectorResult[] {\n const rows = db\n .prepare(\n `SELECT rowid, distance\n FROM chunk_vectors\n WHERE embedding MATCH ?\n AND k = ${limit}\n ORDER BY distance`,\n )\n .all(vecToBuffer(query)) as { rowid: number; distance: number }[];\n\n return rows.map((r) => ({\n chunkId: r.rowid,\n distance: r.distance,\n }));\n}\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { createDatabase } from \"../../storage/db.js\";\nimport type { KontextDatabase } from \"../../storage/db.js\";\nimport { vectorSearch } from \"../../search/vector.js\";\nimport { ftsSearch } from \"../../search/fts.js\";\nimport { astSearch } from \"../../search/ast.js\";\nimport { KontextError, SearchError, ErrorCode } from \"../../utils/errors.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\nimport { pathSearch } from \"../../search/path.js\";\nimport { fusionMerge } from \"../../search/fusion.js\";\nimport type { StrategyResult, StrategyName } from \"../../search/fusion.js\";\nimport type { SearchResult } from \"../../search/types.js\";\nimport { createLocalEmbedder } from \"../../indexer/embedder.js\";\nimport type { Embedder } from \"../../indexer/embedder.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Options for the query command. */\nexport interface QueryOptions {\n limit: number;\n strategies: StrategyName[];\n language?: string;\n format: \"json\" | \"text\";\n}\n\nexport interface QueryOutputResult {\n file: string;\n lines: [number, number];\n name: string | null;\n type: string;\n score: number;\n snippet: string;\n language: string;\n}\n\nexport interface QueryOutput {\n query: string;\n results: QueryOutputResult[];\n stats: {\n strategies: string[];\n totalResults: number;\n searchTimeMs: number;\n };\n text?: string;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst DB_FILENAME = \"index.db\";\nconst SNIPPET_MAX_LENGTH = 200;\n\nconst STRATEGY_WEIGHTS: Record<StrategyName, number> = {\n vector: 1.0,\n fts: 0.8,\n ast: 0.9,\n path: 0.7,\n dependency: 0.6,\n};\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction truncateSnippet(text: string): string {\n const oneLine = text.replace(/\\n/g, \" \").replace(/\\s+/g, \" \").trim();\n if (oneLine.length <= SNIPPET_MAX_LENGTH) return oneLine;\n return oneLine.slice(0, SNIPPET_MAX_LENGTH) + \"...\";\n}\n\nfunction toOutputResult(r: SearchResult): QueryOutputResult {\n return {\n file: r.filePath,\n lines: [r.lineStart, r.lineEnd],\n name: r.name,\n type: r.type,\n score: Math.round(r.score * 100) / 100,\n snippet: truncateSnippet(r.text),\n language: r.language,\n };\n}\n\nfunction formatTextOutput(query: string, results: QueryOutputResult[]): string {\n if (results.length === 0) {\n return `No results for \"${query}\"`;\n }\n\n const lines = [`Results for \"${query}\":\\n`];\n\n for (let i = 0; i < results.length; i++) {\n const r = results[i];\n const nameLabel = r.name ? `${r.name} [${r.type}]` : `[${r.type}]`;\n lines.push(`${i + 1}. ${r.file}:${r.lines[0]}-${r.lines[1]} (score: ${r.score})`);\n lines.push(` ${nameLabel}`);\n lines.push(` ${r.snippet}`);\n lines.push(\"\");\n }\n\n return lines.join(\"\\n\");\n}\n\n/** Heuristic: extract likely symbol names from a query string */\nfunction extractSymbolNames(query: string): string[] {\n // Match camelCase, PascalCase, snake_case identifiers\n const matches = query.match(/[A-Z]?[a-z]+(?:[A-Z][a-z]+)*|[a-z]+(?:_[a-z]+)+|[A-Z][a-zA-Z]+/g);\n return matches ?? [];\n}\n\n/** Heuristic: check if query looks like a file path pattern */\nfunction isPathLike(query: string): boolean {\n return query.includes(\"/\") || query.includes(\"*\") || query.includes(\".\");\n}\n\n// ── Main query function ──────────────────────────────────────────────────────\n\n/** Execute a multi-strategy search with RRF fusion. Returns ranked results. */\nexport async function runQuery(\n projectPath: string,\n query: string,\n options: QueryOptions,\n): Promise<QueryOutput> {\n const absoluteRoot = path.resolve(projectPath);\n const dbPath = path.join(absoluteRoot, CTX_DIR, DB_FILENAME);\n\n if (!fs.existsSync(dbPath)) {\n throw new KontextError(\n `Project not initialized. Run \"ctx init\" first. (${CTX_DIR}/${DB_FILENAME} not found)`,\n ErrorCode.NOT_INITIALIZED,\n );\n }\n\n const start = performance.now();\n const db = createDatabase(dbPath);\n\n try {\n const strategyResults = await runStrategies(db, query, options);\n const fused = fusionMerge(strategyResults, options.limit);\n const outputResults = fused.map(toOutputResult);\n\n const searchTimeMs = Math.round(performance.now() - start);\n const text =\n options.format === \"text\"\n ? formatTextOutput(query, outputResults)\n : undefined;\n\n return {\n query,\n results: outputResults,\n stats: {\n strategies: strategyResults.map((s) => s.strategy),\n totalResults: outputResults.length,\n searchTimeMs,\n },\n text,\n };\n } finally {\n db.close();\n }\n}\n\n// ── Strategy dispatch ────────────────────────────────────────────────────────\n\nasync function runStrategies(\n db: KontextDatabase,\n query: string,\n options: QueryOptions,\n): Promise<StrategyResult[]> {\n const results: StrategyResult[] = [];\n const filters = options.language ? { language: options.language } : undefined;\n const limit = options.limit * 3; // Fetch extra for fusion\n\n for (const strategy of options.strategies) {\n const weight = STRATEGY_WEIGHTS[strategy];\n const searchResults = await executeStrategy(\n db,\n strategy,\n query,\n limit,\n filters,\n );\n\n if (searchResults.length > 0) {\n results.push({ strategy, weight, results: searchResults });\n }\n }\n\n return results;\n}\n\nasync function executeStrategy(\n db: KontextDatabase,\n strategy: StrategyName,\n query: string,\n limit: number,\n filters?: { language?: string },\n): Promise<SearchResult[]> {\n switch (strategy) {\n case \"vector\": {\n const embedder = await loadEmbedder();\n return vectorSearch(db, embedder, query, limit, filters);\n }\n\n case \"fts\":\n return ftsSearch(db, query, limit, filters);\n\n case \"ast\": {\n const symbols = extractSymbolNames(query);\n if (symbols.length === 0) return [];\n\n const allResults: SearchResult[] = [];\n for (const name of symbols) {\n const results = astSearch(\n db,\n { name, language: filters?.language },\n limit,\n );\n allResults.push(...results);\n }\n\n // Deduplicate by chunkId\n const seen = new Set<number>();\n return allResults.filter((r) => {\n if (seen.has(r.chunkId)) return false;\n seen.add(r.chunkId);\n return true;\n });\n }\n\n case \"path\": {\n if (!isPathLike(query)) return [];\n return pathSearch(db, query, limit);\n }\n\n case \"dependency\":\n return [];\n }\n}\n\n// ── Embedder singleton ───────────────────────────────────────────────────────\n\nlet embedderInstance: Embedder | null = null;\n\nasync function loadEmbedder(): Promise<Embedder> {\n if (embedderInstance) return embedderInstance;\n embedderInstance = await createLocalEmbedder();\n return embedderInstance;\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerQueryCommand(program: Command): void {\n program\n .command(\"query <query>\")\n .description(\"Multi-strategy code search\")\n .option(\"-l, --limit <n>\", \"Max results\", \"10\")\n .option(\n \"-s, --strategy <list>\",\n \"Comma-separated strategies: vector,fts,ast,path\",\n \"fts,ast\",\n )\n .option(\"--language <lang>\", \"Filter by language\")\n .option(\"-f, --format <fmt>\", \"Output format: json|text\", \"json\")\n .option(\"--no-vectors\", \"Skip vector search\")\n .action(async (query: string, opts: Record<string, string>) => {\n const projectPath = process.cwd();\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n const strategies = (opts[\"strategy\"] ?? \"fts,ast\")\n .split(\",\")\n .map((s) => s.trim()) as StrategyName[];\n\n try {\n const output = await runQuery(projectPath, query, {\n limit: parseInt(opts[\"limit\"] ?? \"10\", 10),\n strategies,\n language: opts[\"language\"] as string | undefined,\n format: (opts[\"format\"] ?? \"json\") as \"json\" | \"text\",\n });\n\n if (output.text) {\n console.log(output.text);\n } else {\n console.log(JSON.stringify(output, null, 2));\n }\n } catch (err) {\n const wrapped = err instanceof KontextError ? err\n : new SearchError(\n err instanceof Error ? err.message : String(err),\n ErrorCode.SEARCH_FAILED,\n err instanceof Error ? err : undefined,\n );\n process.exitCode = handleCommandError(wrapped, logger, verbose);\n }\n });\n}\n","import type { KontextDatabase, ChunkWithFile } from \"../storage/db.js\";\nimport type { Embedder } from \"../indexer/embedder.js\";\nimport type { SearchResult, SearchFilters } from \"./types.js\";\n\nexport type { SearchResult, SearchFilters } from \"./types.js\";\n\n// ── Score normalization ──────────────────────────────────────────────────────\n\nfunction distanceToScore(distance: number): number {\n return 1 / (1 + distance);\n}\n\n// ── Vector search ────────────────────────────────────────────────────────────\n\n/** KNN vector similarity search. Scores normalized as 1/(1+distance). */\nexport async function vectorSearch(\n db: KontextDatabase,\n embedder: Embedder,\n query: string,\n limit: number,\n filters?: SearchFilters,\n): Promise<SearchResult[]> {\n // 1. Embed the query\n const queryVec = await embedder.embedSingle(query);\n\n // 2. KNN search — fetch extra if filtering, to compensate for post-filter losses\n const fetchLimit = filters?.language ? limit * 3 : limit;\n const vectorResults = db.searchVectors(queryVec, fetchLimit);\n\n if (vectorResults.length === 0) return [];\n\n // 3. Fetch chunk + file metadata for all returned IDs\n const chunkIds = vectorResults.map((r) => r.chunkId);\n const chunks = db.getChunksByIds(chunkIds);\n\n // Build lookup map for O(1) access\n const chunkMap = new Map<number, ChunkWithFile>();\n for (const chunk of chunks) {\n chunkMap.set(chunk.id, chunk);\n }\n\n // 4. Join vector results with chunk metadata, apply filters\n const results: SearchResult[] = [];\n\n for (const vr of vectorResults) {\n const chunk = chunkMap.get(vr.chunkId);\n if (!chunk) continue;\n\n // Post-filter by language\n if (filters?.language && chunk.language !== filters.language) continue;\n\n results.push({\n chunkId: vr.chunkId,\n filePath: chunk.filePath,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n name: chunk.name,\n type: chunk.type,\n text: chunk.text,\n score: distanceToScore(vr.distance),\n language: chunk.language,\n });\n }\n\n // 5. Sort by score descending (highest first) and enforce limit\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, limit);\n}\n","import type { KontextDatabase, ChunkWithFile } from \"../storage/db.js\";\nimport type { SearchResult, SearchFilters } from \"./types.js\";\n\nexport type { SearchResult, SearchFilters } from \"./types.js\";\n\n// ── Score normalization ──────────────────────────────────────────────────────\n\nfunction bm25ToScore(rank: number): number {\n // FTS5 rank is negative (lower = better). Normalize to 0-1.\n return 1 / (1 + Math.abs(rank));\n}\n\n// ── FTS search ───────────────────────────────────────────────────────────────\n\n/** Full-text search via SQLite FTS5 with BM25 ranking. Scores normalized as 1/(1+|rank|). */\nexport function ftsSearch(\n db: KontextDatabase,\n query: string,\n limit: number,\n filters?: SearchFilters,\n): SearchResult[] {\n // 1. FTS5 search — fetch extra if filtering\n const fetchLimit = filters?.language ? limit * 3 : limit;\n const ftsResults = db.searchFTS(query, fetchLimit);\n\n if (ftsResults.length === 0) return [];\n\n // 2. Fetch chunk + file metadata\n const chunkIds = ftsResults.map((r) => r.chunkId);\n const chunks = db.getChunksByIds(chunkIds);\n\n const chunkMap = new Map<number, ChunkWithFile>();\n for (const chunk of chunks) {\n chunkMap.set(chunk.id, chunk);\n }\n\n // 3. Join FTS results with metadata, apply filters\n const results: SearchResult[] = [];\n\n for (const fts of ftsResults) {\n const chunk = chunkMap.get(fts.chunkId);\n if (!chunk) continue;\n\n if (filters?.language && chunk.language !== filters.language) continue;\n\n results.push({\n chunkId: fts.chunkId,\n filePath: chunk.filePath,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n name: chunk.name,\n type: chunk.type,\n text: chunk.text,\n score: bm25ToScore(fts.rank),\n language: chunk.language,\n });\n }\n\n // 4. Sort by score descending and enforce limit\n results.sort((a, b) => b.score - a.score);\n return results.slice(0, limit);\n}\n","import type { KontextDatabase } from \"../storage/db.js\";\nimport type { SearchResult } from \"./types.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\nexport interface ASTFilters {\n name?: string;\n type?: string;\n parent?: string;\n language?: string;\n matchMode?: \"exact\" | \"prefix\" | \"fuzzy\";\n}\n\n// ── Score constants ──────────────────────────────────────────────────────────\n\nconst SCORE_EXACT = 1.0;\nconst SCORE_PREFIX = 0.8;\nconst SCORE_FUZZY = 0.5;\n\n// ── AST search ───────────────────────────────────────────────────────────────\n\n/** AST-aware symbol search by name, type, parent, and language. Supports exact/prefix/fuzzy matching. */\nexport function astSearch(\n db: KontextDatabase,\n filters: ASTFilters,\n limit: number,\n): SearchResult[] {\n const matchMode = filters.matchMode ?? \"fuzzy\";\n\n const nameMode =\n matchMode === \"exact\"\n ? (\"exact\" as const)\n : matchMode === \"prefix\"\n ? (\"prefix\" as const)\n : (\"contains\" as const);\n\n const score =\n matchMode === \"exact\"\n ? SCORE_EXACT\n : matchMode === \"prefix\"\n ? SCORE_PREFIX\n : SCORE_FUZZY;\n\n const chunks = db.searchChunks(\n {\n name: filters.name,\n nameMode,\n type: filters.type,\n parent: filters.parent,\n language: filters.language,\n },\n limit,\n );\n\n return chunks.map((chunk) => ({\n chunkId: chunk.id,\n filePath: chunk.filePath,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n name: chunk.name,\n type: chunk.type,\n text: chunk.text,\n score,\n language: chunk.language,\n }));\n}\n","import type { KontextDatabase } from \"../storage/db.js\";\nimport type { SearchResult } from \"./types.js\";\n\n// ── Glob matching ────────────────────────────────────────────────────────────\n\n/**\n * Convert a simple glob pattern to a RegExp.\n * Supports: ** (any path), * (any segment chars), ? (single char)\n */\nfunction globToRegExp(pattern: string): RegExp {\n let re = \"\";\n let i = 0;\n\n while (i < pattern.length) {\n const ch = pattern[i];\n\n if (ch === \"*\" && pattern[i + 1] === \"*\") {\n // ** matches any number of path segments\n re += \".*\";\n i += 2;\n // Skip trailing slash after **\n if (pattern[i] === \"/\") i++;\n } else if (ch === \"*\") {\n // * matches anything except /\n re += \"[^/]*\";\n i++;\n } else if (ch === \"?\") {\n re += \"[^/]\";\n i++;\n } else if (\".+^${}()|[]\\\\\".includes(ch)) {\n re += \"\\\\\" + ch;\n i++;\n } else {\n re += ch;\n i++;\n }\n }\n\n return new RegExp(`^${re}$`);\n}\n\n// ── Path search ──────────────────────────────────────────────────────────────\n\n/** Search files by glob pattern. Converts globs to SQL LIKE clauses. */\nexport function pathSearch(\n db: KontextDatabase,\n pattern: string,\n limit: number,\n): SearchResult[] {\n const allPaths = db.getAllFilePaths();\n const regex = globToRegExp(pattern);\n const matchingPaths = allPaths.filter((p) => regex.test(p));\n\n if (matchingPaths.length === 0) return [];\n\n // Get all chunks for matching files\n const results: SearchResult[] = [];\n\n for (const filePath of matchingPaths) {\n if (results.length >= limit) break;\n\n const file = db.getFile(filePath);\n if (!file) continue;\n\n const chunks = db.getChunksByFile(file.id);\n for (const chunk of chunks) {\n if (results.length >= limit) break;\n\n results.push({\n chunkId: chunk.id,\n filePath: file.path,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n name: chunk.name,\n type: chunk.type,\n text: chunk.text,\n score: 1.0,\n language: file.language,\n });\n }\n }\n\n return results;\n}\n\n// ── Dependency trace (BFS) ───────────────────────────────────────────────────\n\nconst DEPTH_SCORE_BASE = 1.0;\nconst DEPTH_SCORE_DECAY = 0.2;\n\n/** BFS traversal of the import/dependency graph. Scores decay with depth. */\nexport function dependencyTrace(\n db: KontextDatabase,\n chunkId: number,\n direction: \"imports\" | \"importedBy\",\n depth: number,\n): SearchResult[] {\n const visited = new Set<number>();\n visited.add(chunkId); // Don't include the starting chunk itself\n\n const results: SearchResult[] = [];\n let frontier = [chunkId];\n\n for (let d = 0; d < depth; d++) {\n const nextFrontier: number[] = [];\n\n for (const currentId of frontier) {\n const neighbors = getNeighbors(db, currentId, direction);\n\n for (const neighborId of neighbors) {\n if (visited.has(neighborId)) continue;\n visited.add(neighborId);\n nextFrontier.push(neighborId);\n }\n }\n\n if (nextFrontier.length === 0) break;\n\n // Fetch metadata for this depth level\n const chunks = db.getChunksByIds(nextFrontier);\n const score = DEPTH_SCORE_BASE - d * DEPTH_SCORE_DECAY;\n\n for (const chunk of chunks) {\n results.push({\n chunkId: chunk.id,\n filePath: chunk.filePath,\n lineStart: chunk.lineStart,\n lineEnd: chunk.lineEnd,\n name: chunk.name,\n type: chunk.type,\n text: chunk.text,\n score,\n language: chunk.language,\n });\n }\n\n frontier = nextFrontier;\n }\n\n return results;\n}\n\nfunction getNeighbors(\n db: KontextDatabase,\n chunkId: number,\n direction: \"imports\" | \"importedBy\",\n): number[] {\n if (direction === \"imports\") {\n return db.getDependencies(chunkId).map((d) => d.targetChunkId);\n }\n return db.getReverseDependencies(chunkId).map((d) => d.sourceChunkId);\n}\n","import type { SearchResult } from \"./types.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Names of available search strategies. */\nexport type StrategyName = \"vector\" | \"fts\" | \"ast\" | \"path\" | \"dependency\";\n\n/** Results from a single search strategy, ready for fusion. */\nexport interface StrategyResult {\n strategy: StrategyName;\n weight: number;\n results: SearchResult[];\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\n/** Standard RRF damping constant */\nconst K = 60;\n\n// ── Reciprocal Rank Fusion ───────────────────────────────────────────────────\n\n/** Merge results from multiple strategies using Reciprocal Rank Fusion (K=60). */\nexport function fusionMerge(\n strategyResults: StrategyResult[],\n limit: number,\n): SearchResult[] {\n // Accumulate RRF scores per chunkId\n const scoreMap = new Map<number, number>();\n const resultMap = new Map<number, SearchResult>();\n\n for (const { weight, results } of strategyResults) {\n for (let rank = 0; rank < results.length; rank++) {\n const result = results[rank];\n const rrfScore = weight * (1 / (K + rank + 1)); // rank is 1-indexed in formula\n\n const existing = scoreMap.get(result.chunkId) ?? 0;\n scoreMap.set(result.chunkId, existing + rrfScore);\n\n // Keep the first occurrence's metadata\n if (!resultMap.has(result.chunkId)) {\n resultMap.set(result.chunkId, result);\n }\n }\n }\n\n if (scoreMap.size === 0) return [];\n\n // Build sorted results\n const entries = [...scoreMap.entries()].sort((a, b) => b[1] - a[1]);\n\n // Normalize scores to 0-1\n const maxScore = entries[0][1];\n\n const results: SearchResult[] = [];\n for (const [chunkId, rawScore] of entries.slice(0, limit)) {\n const base = resultMap.get(chunkId);\n if (!base) continue;\n results.push({\n ...base,\n score: maxScore > 0 ? rawScore / maxScore : 0,\n });\n }\n return results;\n}\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { createDatabase } from \"../../storage/db.js\";\nimport type { KontextDatabase } from \"../../storage/db.js\";\nimport { vectorSearch } from \"../../search/vector.js\";\nimport { ftsSearch } from \"../../search/fts.js\";\nimport { astSearch } from \"../../search/ast.js\";\nimport { pathSearch } from \"../../search/path.js\";\nimport { fusionMerge } from \"../../search/fusion.js\";\nimport { KontextError, SearchError, ErrorCode } from \"../../utils/errors.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\nimport type { StrategyResult } from \"../../search/fusion.js\";\nimport type { SearchResult } from \"../../search/types.js\";\nimport {\n createGeminiProvider,\n createOpenAIProvider,\n createAnthropicProvider,\n steer,\n planSearch,\n} from \"../../steering/llm.js\";\nimport type { LLMProvider, StrategyPlan, SearchExecutor } from \"../../steering/llm.js\";\nimport { createLocalEmbedder } from \"../../indexer/embedder.js\";\nimport type { Embedder } from \"../../indexer/embedder.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Options for the LLM-steered ask command. */\nexport interface AskOptions {\n limit: number;\n format: \"json\" | \"text\";\n provider?: LLMProvider;\n providerName?: string;\n noExplain?: boolean;\n}\n\nexport interface AskOutputResult {\n file: string;\n lines: [number, number];\n name: string | null;\n type: string;\n score: number;\n snippet: string;\n language: string;\n}\n\nexport interface AskOutput {\n query: string;\n interpretation: string;\n results: AskOutputResult[];\n explanation: string;\n stats: {\n strategies: string[];\n tokensUsed: number;\n costEstimate: number;\n totalResults: number;\n };\n fallback?: boolean;\n text?: string;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst DB_FILENAME = \"index.db\";\nconst SNIPPET_MAX_LENGTH = 200;\n\nconst FALLBACK_NOTICE =\n \"No LLM provider configured. Set CTX_GEMINI_KEY, CTX_OPENAI_KEY, or CTX_ANTHROPIC_KEY. Running basic search instead.\";\n\n// ── Provider detection ───────────────────────────────────────────────────────\n\nconst PROVIDER_ENV_MAP: Record<string, string> = {\n gemini: \"CTX_GEMINI_KEY\",\n openai: \"CTX_OPENAI_KEY\",\n anthropic: \"CTX_ANTHROPIC_KEY\",\n};\n\nconst PROVIDER_FACTORIES: Record<string, (key: string) => LLMProvider> = {\n gemini: createGeminiProvider,\n openai: createOpenAIProvider,\n anthropic: createAnthropicProvider,\n};\n\nconst DETECTION_ORDER = [\"gemini\", \"openai\", \"anthropic\"];\n\n/** Auto-detect LLM provider from env vars (CTX_GEMINI_KEY → CTX_OPENAI_KEY → CTX_ANTHROPIC_KEY). */\nexport function detectProvider(explicit?: string): LLMProvider | null {\n if (explicit) {\n const envVar = PROVIDER_ENV_MAP[explicit];\n const apiKey = envVar ? process.env[envVar] : undefined;\n if (!apiKey) return null;\n const factory = PROVIDER_FACTORIES[explicit];\n return factory ? factory(apiKey) : null;\n }\n\n for (const name of DETECTION_ORDER) {\n const envVar = PROVIDER_ENV_MAP[name];\n const apiKey = envVar ? process.env[envVar] : undefined;\n if (apiKey) {\n const factory = PROVIDER_FACTORIES[name];\n if (factory) return factory(apiKey);\n }\n }\n\n return null;\n}\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction truncateSnippet(text: string): string {\n const oneLine = text.replace(/\\n/g, \" \").replace(/\\s+/g, \" \").trim();\n if (oneLine.length <= SNIPPET_MAX_LENGTH) return oneLine;\n return oneLine.slice(0, SNIPPET_MAX_LENGTH) + \"...\";\n}\n\nfunction toOutputResult(r: SearchResult): AskOutputResult {\n return {\n file: r.filePath,\n lines: [r.lineStart, r.lineEnd],\n name: r.name,\n type: r.type,\n score: Math.round(r.score * 100) / 100,\n snippet: truncateSnippet(r.text),\n language: r.language,\n };\n}\n\nfunction formatTextOutput(output: AskOutput): string {\n const lines: string[] = [];\n\n if (output.fallback) {\n lines.push(FALLBACK_NOTICE);\n lines.push(\"\");\n }\n\n if (output.interpretation) {\n lines.push(`Understanding: ${output.interpretation}`);\n lines.push(\"\");\n }\n\n if (output.results.length === 0) {\n lines.push(`No results found for \"${output.query}\"`);\n } else {\n lines.push(`Found ${output.results.length} relevant location(s):`);\n lines.push(\"\");\n\n for (let i = 0; i < output.results.length; i++) {\n const r = output.results[i];\n const nameLabel = r.name ? `${r.name} [${r.type}]` : `[${r.type}]`;\n lines.push(`${i + 1}. ${r.file}:${r.lines[0]}-${r.lines[1]} (score: ${r.score})`);\n lines.push(` ${nameLabel}`);\n lines.push(` ${r.snippet}`);\n lines.push(\"\");\n }\n }\n\n if (output.explanation) {\n lines.push(\"Explanation:\");\n lines.push(output.explanation);\n lines.push(\"\");\n }\n\n lines.push(\"─────────\");\n const cost = output.stats.costEstimate.toFixed(4);\n lines.push(\n `Tokens: ${output.stats.tokensUsed.toLocaleString()} | Cost: ~$${cost} | Strategies: ${output.stats.strategies.join(\", \")}`,\n );\n\n return lines.join(\"\\n\");\n}\n\n// ── Search executor factory ──────────────────────────────────────────────────\n\nfunction createSearchExecutor(db: KontextDatabase): SearchExecutor {\n return async (strategies: StrategyPlan[], limit: number): Promise<SearchResult[]> => {\n const strategyResults: StrategyResult[] = [];\n const fetchLimit = limit * 3;\n\n for (const plan of strategies) {\n const results = await executeStrategy(db, plan, fetchLimit);\n if (results.length > 0) {\n strategyResults.push({\n strategy: plan.strategy,\n weight: plan.weight,\n results,\n });\n }\n }\n\n return fusionMerge(strategyResults, limit);\n };\n}\n\nasync function executeStrategy(\n db: KontextDatabase,\n plan: StrategyPlan,\n limit: number,\n): Promise<SearchResult[]> {\n switch (plan.strategy) {\n case \"vector\": {\n const embedder = await loadEmbedder();\n return vectorSearch(db, embedder, plan.query, limit);\n }\n case \"fts\":\n return ftsSearch(db, plan.query, limit);\n case \"ast\":\n return astSearch(db, { name: plan.query }, limit);\n case \"path\":\n return pathSearch(db, plan.query, limit);\n case \"dependency\":\n return [];\n }\n}\n\n// ── Embedder singleton ───────────────────────────────────────────────────────\n\nlet embedderInstance: Embedder | null = null;\n\nasync function loadEmbedder(): Promise<Embedder> {\n if (embedderInstance) return embedderInstance;\n embedderInstance = await createLocalEmbedder();\n return embedderInstance;\n}\n\n// ── Fallback search (no LLM) ────────────────────────────────────────────────\n\nasync function fallbackSearch(\n db: KontextDatabase,\n query: string,\n limit: number,\n): Promise<AskOutput> {\n const executor = createSearchExecutor(db);\n const fallbackStrategies: StrategyPlan[] = [\n { strategy: \"fts\", query, weight: 0.8, reason: \"fallback keyword search\" },\n { strategy: \"ast\", query, weight: 0.9, reason: \"fallback structural search\" },\n ];\n\n const results = await executor(fallbackStrategies, limit);\n\n return {\n query,\n interpretation: \"\",\n results: results.map(toOutputResult),\n explanation: \"\",\n stats: {\n strategies: fallbackStrategies.map((s) => s.strategy),\n tokensUsed: 0,\n costEstimate: 0,\n totalResults: results.length,\n },\n fallback: true,\n };\n}\n\n// ── Main ask function ────────────────────────────────────────────────────────\n\n/** LLM-steered natural language search. Falls back to basic multi-strategy search without API key. */\nexport async function runAsk(\n projectPath: string,\n query: string,\n options: AskOptions,\n): Promise<AskOutput> {\n const absoluteRoot = path.resolve(projectPath);\n const dbPath = path.join(absoluteRoot, CTX_DIR, DB_FILENAME);\n\n if (!fs.existsSync(dbPath)) {\n throw new KontextError(\n `Project not initialized. Run \"ctx init\" first. (${CTX_DIR}/${DB_FILENAME} not found)`,\n ErrorCode.NOT_INITIALIZED,\n );\n }\n\n const db = createDatabase(dbPath);\n\n try {\n const provider = options.provider ?? null;\n\n if (!provider) {\n const output = await fallbackSearch(db, query, options.limit);\n if (options.format === \"text\") {\n output.text = formatTextOutput(output);\n }\n return output;\n }\n\n const executor = createSearchExecutor(db);\n\n if (options.noExplain) {\n return await runNoExplain(provider, query, options, executor);\n }\n\n return await runWithSteering(provider, query, options, executor);\n } finally {\n db.close();\n }\n}\n\nasync function runNoExplain(\n provider: LLMProvider,\n query: string,\n options: AskOptions,\n executor: SearchExecutor,\n): Promise<AskOutput> {\n const plan = await planSearch(provider, query);\n const results = await executor(plan.strategies, options.limit);\n\n const output: AskOutput = {\n query,\n interpretation: plan.interpretation,\n results: results.map(toOutputResult),\n explanation: \"\",\n stats: {\n strategies: plan.strategies.map((s) => s.strategy),\n tokensUsed: 0,\n costEstimate: 0,\n totalResults: results.length,\n },\n };\n\n if (options.format === \"text\") {\n output.text = formatTextOutput(output);\n }\n\n return output;\n}\n\nasync function runWithSteering(\n provider: LLMProvider,\n query: string,\n options: AskOptions,\n executor: SearchExecutor,\n): Promise<AskOutput> {\n const result = await steer(provider, query, options.limit, executor);\n\n const output: AskOutput = {\n query,\n interpretation: result.interpretation,\n results: result.results.map(toOutputResult),\n explanation: result.explanation,\n stats: {\n strategies: result.strategies.map((s) => s.strategy),\n tokensUsed: result.tokensUsed,\n costEstimate: result.costEstimate,\n totalResults: result.results.length,\n },\n };\n\n if (options.format === \"text\") {\n output.text = formatTextOutput(output);\n }\n\n return output;\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerAskCommand(program: Command): void {\n program\n .command(\"ask <query>\")\n .description(\"LLM-steered natural language code search\")\n .option(\"-l, --limit <n>\", \"Max results\", \"10\")\n .option(\"-p, --provider <name>\", \"LLM provider: gemini|openai|anthropic\")\n .option(\"-f, --format <fmt>\", \"Output format: json|text\", \"text\")\n .option(\"--no-explain\", \"Skip explanation, just return search results\")\n .action(async (query: string, opts: Record<string, string | boolean>) => {\n const projectPath = process.cwd();\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n const providerName = opts[\"provider\"] as string | undefined;\n const provider = detectProvider(providerName);\n\n try {\n const output = await runAsk(projectPath, query, {\n limit: parseInt(String(opts[\"limit\"] ?? \"10\"), 10),\n format: (opts[\"format\"] ?? \"text\") as \"json\" | \"text\",\n provider: provider ?? undefined,\n noExplain: opts[\"explain\"] === false,\n });\n\n if (output.text) {\n console.log(output.text);\n } else {\n console.log(JSON.stringify(output, null, 2));\n }\n } catch (err) {\n const wrapped = err instanceof KontextError ? err\n : new SearchError(\n err instanceof Error ? err.message : String(err),\n ErrorCode.SEARCH_FAILED,\n err instanceof Error ? err : undefined,\n );\n process.exitCode = handleCommandError(wrapped, logger, verbose);\n }\n });\n}\n","import type { SearchResult } from \"../search/types.js\";\nimport type { StrategyName } from \"../search/fusion.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\nexport interface ChatMessage {\n role: \"system\" | \"user\" | \"assistant\";\n content: string;\n}\n\n/** LLM provider for the steering layer. Wraps Gemini, OpenAI, or Anthropic chat APIs. */\nexport interface LLMProvider {\n name: string;\n chat(messages: ChatMessage[]): Promise<string>;\n}\n\nexport interface StrategyPlan {\n strategy: StrategyName;\n query: string;\n weight: number;\n reason: string;\n}\n\nexport interface SearchPlan {\n interpretation: string;\n strategies: StrategyPlan[];\n}\n\n/** Full result from LLM-steered search: plan, results, explanation, and cost. */\nexport interface SteeringResult {\n interpretation: string;\n strategies: StrategyPlan[];\n results: SearchResult[];\n explanation: string;\n tokensUsed: number;\n costEstimate: number;\n}\n\nexport type SearchExecutor = (\n strategies: StrategyPlan[],\n limit: number,\n) => Promise<SearchResult[]>;\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst GEMINI_URL =\n \"https://generativelanguage.googleapis.com/v1beta/models/gemini-3-flash-preview:generateContent\";\nconst OPENAI_URL = \"https://api.openai.com/v1/responses\";\nconst ANTHROPIC_URL = \"https://api.anthropic.com/v1/messages\";\n\nconst PLAN_SYSTEM_PROMPT = `You are a code search strategy planner. Given a user query about code, output a JSON object with:\n- \"interpretation\": a one-line summary of what the user is looking for\n- \"strategies\": an array of search strategy objects, each with:\n - \"strategy\": one of \"vector\", \"fts\", \"ast\", \"path\", \"dependency\"\n - \"query\": the optimized query string for that strategy\n - \"weight\": a number 0-1 indicating importance\n - \"reason\": brief explanation of why this strategy is used\n\nChoose strategies based on query type:\n- Conceptual/natural language → vector (semantic search)\n- Keywords/identifiers → fts (full-text search)\n- Symbol names (functions, classes) → ast (structural search)\n- File paths or patterns → path (path glob search)\n- Import/dependency chains → dependency\n\nOutput ONLY valid JSON, no markdown.`;\n\nconst SYNTHESIZE_SYSTEM_PROMPT = `You are a code search assistant. Given search results, write a brief, helpful explanation of what was found. Be concise (2-4 sentences). Reference specific files and function names. Do not use markdown.`;\n\n// ── Gemini provider ──────────────────────────────────────────────────────────\n\nexport function createGeminiProvider(apiKey: string): LLMProvider {\n return {\n name: \"gemini\",\n async chat(messages: ChatMessage[]): Promise<string> {\n const contents = messages.map((m) => ({\n role: m.role === \"assistant\" ? \"model\" : \"user\",\n parts: [{ text: m.content }],\n }));\n\n const systemInstruction = messages.find((m) => m.role === \"system\");\n const nonSystemContents = contents.filter(\n (_, i) => messages[i].role !== \"system\",\n );\n\n const body: Record<string, unknown> = {\n contents: nonSystemContents,\n generationConfig: {\n temperature: 0.1,\n maxOutputTokens: 6000,\n },\n };\n\n if (systemInstruction) {\n body[\"systemInstruction\"] = {\n parts: [{ text: systemInstruction.content }],\n };\n }\n\n const response = await fetch(`${GEMINI_URL}?key=${apiKey}`, {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const errorText = await response.text();\n throw new Error(`Gemini API error (${response.status}): ${errorText}`);\n }\n\n const data = (await response.json()) as {\n candidates: { content: { parts: { text: string }[] } }[];\n };\n\n return data.candidates[0].content.parts[0].text;\n },\n };\n}\n\n// ── OpenAI provider ──────────────────────────────────────────────────────────\n\nexport function createOpenAIProvider(apiKey: string): LLMProvider {\n return {\n name: \"openai\",\n async chat(messages: ChatMessage[]): Promise<string> {\n const systemMessage = messages.find((m) => m.role === \"system\");\n const userMessages = messages.filter((m) => m.role !== \"system\");\n const userInput = userMessages.map((m) => m.content).join(\"\\n\\n\");\n\n const body: Record<string, unknown> = {\n model: \"gpt-5-mini\",\n input: userInput,\n max_output_tokens: 6000,\n reasoning: { effort: \"low\" },\n };\n\n if (systemMessage) {\n body[\"instructions\"] = systemMessage.content;\n }\n\n const response = await fetch(OPENAI_URL, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n Authorization: `Bearer ${apiKey}`,\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const errorText = await response.text();\n throw new Error(`OpenAI API error (${response.status}): ${errorText}`);\n }\n\n const data = (await response.json()) as { output_text: string };\n\n return data.output_text;\n },\n };\n}\n\n// ── Anthropic provider ───────────────────────────────────────────────────────\n\nexport function createAnthropicProvider(apiKey: string): LLMProvider {\n return {\n name: \"anthropic\",\n async chat(messages: ChatMessage[]): Promise<string> {\n const systemMessage = messages.find((m) => m.role === \"system\");\n const nonSystemMessages = messages\n .filter((m) => m.role !== \"system\")\n .map((m) => ({ role: m.role, content: m.content }));\n\n const body: Record<string, unknown> = {\n model: \"claude-3-5-haiku-20241022\",\n max_tokens: 6000,\n temperature: 0.1,\n messages: nonSystemMessages,\n };\n\n if (systemMessage) {\n body[\"system\"] = systemMessage.content;\n }\n\n const response = await fetch(ANTHROPIC_URL, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n \"x-api-key\": apiKey,\n \"anthropic-version\": \"2023-06-01\",\n },\n body: JSON.stringify(body),\n });\n\n if (!response.ok) {\n const errorText = await response.text();\n throw new Error(\n `Anthropic API error (${response.status}): ${errorText}`,\n );\n }\n\n const data = (await response.json()) as {\n content: { type: string; text: string }[];\n };\n\n return data.content[0].text;\n },\n };\n}\n\n// ── Plan step ────────────────────────────────────────────────────────────────\n\nconst VALID_STRATEGIES = new Set<string>([\n \"vector\",\n \"fts\",\n \"ast\",\n \"path\",\n \"dependency\",\n]);\n\nfunction buildFallbackPlan(query: string): SearchPlan {\n const strategies: StrategyPlan[] = [\n { strategy: \"fts\", query, weight: 0.8, reason: \"Full-text keyword search\" },\n { strategy: \"ast\", query, weight: 0.9, reason: \"Structural symbol search\" },\n ];\n\n return {\n interpretation: `Searching for: ${query}`,\n strategies,\n };\n}\n\nfunction parseSearchPlan(raw: string, query: string): SearchPlan {\n // Try to extract JSON from the response (may contain markdown fences)\n const jsonMatch = raw.match(/\\{[\\s\\S]*\\}/);\n if (!jsonMatch) return buildFallbackPlan(query);\n\n const parsed = JSON.parse(jsonMatch[0]) as {\n interpretation?: string;\n strategies?: StrategyPlan[];\n };\n\n if (\n !parsed.interpretation ||\n !Array.isArray(parsed.strategies) ||\n parsed.strategies.length === 0\n ) {\n return buildFallbackPlan(query);\n }\n\n // Validate strategy names\n const validStrategies = parsed.strategies.filter((s) =>\n VALID_STRATEGIES.has(s.strategy),\n );\n\n if (validStrategies.length === 0) return buildFallbackPlan(query);\n\n return {\n interpretation: parsed.interpretation,\n strategies: validStrategies,\n };\n}\n\n/** Ask the LLM to interpret a query and plan which search strategies to use. */\nexport async function planSearch(\n provider: LLMProvider,\n query: string,\n): Promise<SearchPlan> {\n try {\n const response = await provider.chat([\n { role: \"system\", content: PLAN_SYSTEM_PROMPT },\n { role: \"user\", content: query },\n ]);\n\n return parseSearchPlan(response, query);\n } catch {\n return buildFallbackPlan(query);\n }\n}\n\n// ── Synthesize step ──────────────────────────────────────────────────────────\n\nfunction formatResultsForLLM(results: SearchResult[]): string {\n return results\n .slice(0, 10)\n .map(\n (r, i) =>\n `${i + 1}. ${r.filePath}:${r.lineStart}-${r.lineEnd} ${r.name ?? \"(unnamed)\"} [${r.type}] (score: ${r.score.toFixed(2)})\\n ${r.text.slice(0, 150)}`,\n )\n .join(\"\\n\\n\");\n}\n\nexport async function synthesizeExplanation(\n provider: LLMProvider,\n query: string,\n results: SearchResult[],\n): Promise<string> {\n if (results.length === 0) {\n return `No results found for \"${query}\".`;\n }\n\n const formattedResults = formatResultsForLLM(results);\n\n const response = await provider.chat([\n { role: \"system\", content: SYNTHESIZE_SYSTEM_PROMPT },\n {\n role: \"user\",\n content: `Query: \"${query}\"\\n\\nSearch results:\\n${formattedResults}`,\n },\n ]);\n\n return response;\n}\n\n// ── Steer (full pipeline) ────────────────────────────────────────────────────\n\nfunction estimateTokens(text: string): number {\n // Rough estimate: ~4 chars per token\n return Math.ceil(text.length / 4);\n}\n\n/** Full steering pipeline: plan → search → synthesize. Falls back to basic search on failure. */\nexport async function steer(\n provider: LLMProvider,\n query: string,\n limit: number,\n searchExecutor: SearchExecutor,\n): Promise<SteeringResult> {\n let totalTokens = 0;\n\n // Step 1: Plan\n const plan = await planSearch(provider, query);\n totalTokens += estimateTokens(PLAN_SYSTEM_PROMPT + query);\n totalTokens += estimateTokens(JSON.stringify(plan));\n\n // Step 2: Execute search\n const results = await searchExecutor(plan.strategies, limit);\n\n // Step 3: Synthesize\n let explanation: string;\n try {\n explanation = await synthesizeExplanation(provider, query, results);\n totalTokens += estimateTokens(SYNTHESIZE_SYSTEM_PROMPT + query);\n totalTokens += estimateTokens(explanation);\n } catch {\n explanation = results.length > 0\n ? `Found ${results.length} result(s) for \"${query}\".`\n : `No results found for \"${query}\".`;\n }\n\n // Rough cost estimate (assuming ~$0.15/1M input tokens for budget models)\n const costEstimate = (totalTokens / 1_000_000) * 0.15;\n\n return {\n interpretation: plan.interpretation,\n strategies: plan.strategies,\n results,\n explanation,\n tokensUsed: totalTokens,\n costEstimate,\n };\n}\n","import type { Command } from \"commander\";\n\nexport function registerFindCommand(program: Command): void {\n program\n .command(\"find <query>\")\n .description(\"Natural language code search\")\n .option(\"--full\", \"Include source code in output\")\n .option(\"--json\", \"Machine-readable JSON output\")\n .option(\"--no-llm\", \"Skip steering LLM, raw vector search only\")\n .option(\"-l, --limit <n>\", \"Max results\", \"5\")\n .option(\"--language <lang>\", \"Filter by language\")\n .action((_query: string, _options: Record<string, unknown>) => {\n console.log(\"ctx find — not yet implemented\");\n });\n}\n","import type { Command } from \"commander\";\n\nexport function registerUpdateCommand(program: Command): void {\n program\n .command(\"update\")\n .description(\"Incremental re-index of changed files\")\n .action(() => {\n console.log(\"ctx update — not yet implemented\");\n });\n}\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { createDatabase } from \"../../storage/db.js\";\nimport type { KontextDatabase } from \"../../storage/db.js\";\nimport { createWatcher } from \"../../watcher/watcher.js\";\nimport type { FileChange, WatcherHandle } from \"../../watcher/watcher.js\";\nimport { initParser, parseFile } from \"../../indexer/parser.js\";\nimport { chunkFile } from \"../../indexer/chunker.js\";\nimport type { Chunk } from \"../../indexer/chunker.js\";\nimport { prepareChunkText, createLocalEmbedder } from \"../../indexer/embedder.js\";\nimport type { Embedder } from \"../../indexer/embedder.js\";\nimport { KontextError, IndexError, ErrorCode } from \"../../utils/errors.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\nimport { LANGUAGE_MAP } from \"../../indexer/discovery.js\";\nimport { runInit } from \"./init.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Options for the watch command. */\nexport interface WatchOptions {\n init?: boolean;\n debounceMs?: number;\n log?: (msg: string) => void;\n skipEmbedding?: boolean;\n}\n\n/** Handle for a running watch session. Call stop() for graceful shutdown. */\nexport interface WatchHandle {\n stop(): Promise<void>;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst DB_FILENAME = \"index.db\";\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction timestamp(): string {\n return new Date().toLocaleTimeString(\"en-GB\", { hour12: false });\n}\n\nfunction detectLanguage(filePath: string): string | null {\n const ext = path.extname(filePath).toLowerCase();\n return LANGUAGE_MAP[ext] ?? null;\n}\n\nfunction formatDuration(ms: number): string {\n if (ms < 1000) return `${Math.round(ms)}ms`;\n return `${(ms / 1000).toFixed(1)}s`;\n}\n\nasync function hashFile(absolutePath: string): Promise<string> {\n const { createHash } = await import(\"node:crypto\");\n const content = fs.readFileSync(absolutePath);\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\n// ── Re-index pipeline ────────────────────────────────────────────────────────\n\ninterface ReindexResult {\n filesProcessed: number;\n chunksUpdated: number;\n durationMs: number;\n}\n\nasync function reindexChanges(\n db: KontextDatabase,\n changes: FileChange[],\n projectPath: string,\n options: { skipEmbedding?: boolean; log: (msg: string) => void },\n): Promise<ReindexResult> {\n const start = performance.now();\n const log = options.log;\n\n let filesProcessed = 0;\n let chunksUpdated = 0;\n\n const allChunksWithMeta: { fileRelPath: string; chunk: Chunk }[] = [];\n\n for (const change of changes) {\n const absolutePath = path.join(projectPath, change.path);\n const language = detectLanguage(change.path);\n\n if (change.type === \"unlink\") {\n log(`[${timestamp()}] Deleted: ${change.path}`);\n const existingFile = db.getFile(change.path);\n if (existingFile) {\n db.deleteFile(change.path);\n }\n filesProcessed++;\n continue;\n }\n\n if (!language) continue;\n if (!fs.existsSync(absolutePath)) continue;\n\n const label = change.type === \"add\" ? \"Added\" : \"Changed\";\n log(`[${timestamp()}] ${label}: ${change.path}`);\n\n // Delete old chunks for this file\n const existingFile = db.getFile(change.path);\n if (existingFile) {\n db.deleteChunksByFile(existingFile.id);\n }\n\n // Parse\n let nodes;\n try {\n nodes = await parseFile(absolutePath, language);\n } catch {\n log(`[${timestamp()}] ⚠ Skipping ${change.path} (parse error)`);\n continue;\n }\n\n // Chunk\n const chunks = chunkFile(nodes, change.path);\n\n // Compute file hash\n const hash = await hashFile(absolutePath);\n const size = fs.statSync(absolutePath).size;\n\n // Upsert file record\n const fileId = db.upsertFile({\n path: change.path,\n language,\n hash,\n size,\n });\n\n // Insert chunks\n const chunkIds = db.insertChunks(\n fileId,\n chunks.map((c) => ({\n lineStart: c.lineStart,\n lineEnd: c.lineEnd,\n type: c.type,\n name: c.name,\n parent: c.parent,\n text: c.text,\n imports: c.imports,\n exports: c.exports,\n hash: c.hash,\n })),\n );\n\n for (let i = 0; i < chunks.length; i++) {\n allChunksWithMeta.push({\n fileRelPath: change.path,\n chunk: { ...chunks[i], id: String(chunkIds[i]) },\n });\n }\n\n chunksUpdated += chunks.length;\n filesProcessed++;\n }\n\n // Embedding (if enabled)\n if (!options.skipEmbedding && allChunksWithMeta.length > 0) {\n const embedder = await loadEmbedder();\n\n const texts = allChunksWithMeta.map((cm) =>\n prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text),\n );\n\n const vectors = await embedder.embed(texts);\n\n db.transaction(() => {\n for (let i = 0; i < allChunksWithMeta.length; i++) {\n const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);\n db.insertVector(chunkDbId, vectors[i]);\n }\n });\n }\n\n const durationMs = performance.now() - start;\n return { filesProcessed, chunksUpdated, durationMs };\n}\n\n// ── Embedder singleton ───────────────────────────────────────────────────────\n\nlet embedderInstance: Embedder | null = null;\n\nasync function loadEmbedder(): Promise<Embedder> {\n if (embedderInstance) return embedderInstance;\n embedderInstance = await createLocalEmbedder();\n return embedderInstance;\n}\n\n// ── Main watch function ──────────────────────────────────────────────────────\n\n/** Start watching a project for file changes. Re-indexes incrementally on each change batch. */\nexport async function runWatch(\n projectPath: string,\n options: WatchOptions = {},\n): Promise<WatchHandle> {\n const absoluteRoot = path.resolve(projectPath);\n const dbPath = path.join(absoluteRoot, CTX_DIR, DB_FILENAME);\n const log = options.log ?? console.log;\n\n // Optionally run init first\n if (options.init) {\n await runInit(absoluteRoot, { log, skipEmbedding: options.skipEmbedding });\n }\n\n // Validate .ctx/ exists\n if (!fs.existsSync(dbPath)) {\n throw new KontextError(\n `Project not initialized. Run \"ctx init\" first or use --init flag. (${CTX_DIR}/${DB_FILENAME} not found)`,\n ErrorCode.NOT_INITIALIZED,\n );\n }\n\n // Initialize parser\n await initParser();\n\n // Open DB\n const db = createDatabase(dbPath);\n\n // Create watcher\n let watcherHandle: WatcherHandle | null = null;\n\n const watcher = createWatcher(\n {\n projectPath: absoluteRoot,\n dbPath,\n debounceMs: options.debounceMs,\n },\n {\n onChange: (changes: FileChange[]) => {\n void (async () => {\n try {\n const result = await reindexChanges(db, changes, absoluteRoot, {\n skipEmbedding: options.skipEmbedding,\n log,\n });\n\n if (result.filesProcessed > 0) {\n log(\n `[${timestamp()}] Re-indexed: ${result.filesProcessed} file(s), ${result.chunksUpdated} chunks updated (${formatDuration(result.durationMs)})`,\n );\n }\n } catch (err) {\n log(\n `[${timestamp()}] Error: ${err instanceof Error ? err.message : String(err)}`,\n );\n }\n })();\n },\n onError: (err) => {\n log(`[${timestamp()}] Watcher error: ${err.message}`);\n },\n },\n );\n\n // Start watching\n await watcher.start();\n watcherHandle = watcher;\n\n log(`Watching ${absoluteRoot} for changes...`);\n\n return {\n async stop(): Promise<void> {\n if (watcherHandle) {\n await watcherHandle.stop();\n watcherHandle = null;\n }\n db.close();\n log(\"Stopped watching. Database saved.\");\n },\n };\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerWatchCommand(program: Command): void {\n program\n .command(\"watch [path]\")\n .description(\"Watch mode — re-index on file changes\")\n .option(\"--init\", \"Run init before starting watch\")\n .option(\"--debounce <ms>\", \"Debounce interval in ms\", \"500\")\n .option(\"--embed\", \"Enable embedding during watch (slower)\")\n .action(async (inputPath: string | undefined, opts: Record<string, string | boolean>) => {\n const projectPath = inputPath ?? process.cwd();\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n const skipEmbedding = opts[\"embed\"] !== true;\n\n try {\n const handle = await runWatch(projectPath, {\n init: opts[\"init\"] === true,\n debounceMs: parseInt(String(opts[\"debounce\"] ?? \"500\"), 10),\n skipEmbedding,\n });\n\n // Handle Ctrl+C\n const shutdown = () => {\n void handle.stop().then(() => process.exit(0));\n };\n process.on(\"SIGINT\", shutdown);\n process.on(\"SIGTERM\", shutdown);\n } catch (err) {\n const wrapped = err instanceof KontextError ? err\n : new IndexError(\n err instanceof Error ? err.message : String(err),\n ErrorCode.WATCHER_FAILED,\n err instanceof Error ? err : undefined,\n );\n process.exitCode = handleCommandError(wrapped, logger, verbose);\n }\n });\n}\n","import { watch, type FSWatcher } from \"chokidar\";\nimport path from \"node:path\";\nimport { LANGUAGE_MAP } from \"../indexer/discovery.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** A single file change event from the watcher. */\nexport interface FileChange {\n type: \"add\" | \"change\" | \"unlink\";\n path: string;\n}\n\nexport interface WatcherOptions {\n projectPath: string;\n dbPath?: string;\n debounceMs?: number;\n ignored?: string[];\n}\n\nexport interface WatcherEvents {\n onChange: (changes: FileChange[]) => void;\n onError: (error: Error) => void;\n}\n\n/** Handle returned by createWatcher. Call start() to begin, stop() to clean up. */\nexport interface WatcherHandle {\n start(): Promise<void>;\n stop(): Promise<void>;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst DEFAULT_DEBOUNCE_MS = 500;\n\nconst ALWAYS_IGNORED_DIRS = new Set([\n \"node_modules\",\n \".git\",\n \".ctx\",\n \"dist\",\n \"build\",\n \"__pycache__\",\n]);\n\nconst WATCHED_EXTENSIONS = new Set(Object.keys(LANGUAGE_MAP));\n\n// ── Implementation ───────────────────────────────────────────────────────────\n\nfunction isWatchedFile(filePath: string): boolean {\n const ext = path.extname(filePath).toLowerCase();\n return WATCHED_EXTENSIONS.has(ext);\n}\n\n/** Create a file watcher that debounces changes and filters by code extensions. */\nexport function createWatcher(\n options: WatcherOptions,\n events: WatcherEvents,\n): WatcherHandle {\n const debounceMs = options.debounceMs ?? DEFAULT_DEBOUNCE_MS;\n const projectPath = path.resolve(options.projectPath);\n\n const extraIgnored = new Set(options.ignored ?? []);\n\n function isIgnored(filePath: string): boolean {\n const segments = filePath.split(path.sep);\n for (const seg of segments) {\n if (ALWAYS_IGNORED_DIRS.has(seg)) return true;\n if (extraIgnored.has(seg)) return true;\n }\n return false;\n }\n\n let watcher: FSWatcher | null = null;\n let pendingChanges = new Map<string, FileChange>();\n let debounceTimer: ReturnType<typeof setTimeout> | null = null;\n\n function flush(): void {\n if (pendingChanges.size === 0) return;\n\n const batch = [...pendingChanges.values()];\n pendingChanges = new Map();\n\n events.onChange(batch);\n }\n\n function scheduleFlush(): void {\n if (debounceTimer) clearTimeout(debounceTimer);\n debounceTimer = setTimeout(flush, debounceMs);\n }\n\n function handleEvent(type: \"add\" | \"change\" | \"unlink\", filePath: string): void {\n if (!isWatchedFile(filePath)) return;\n\n // filePath is relative to cwd (chokidar cwd option)\n pendingChanges.set(filePath, { type, path: filePath });\n scheduleFlush();\n }\n\n return {\n start(): Promise<void> {\n return new Promise<void>((resolve) => {\n watcher = watch(\".\", {\n cwd: projectPath,\n ignored: (fp: string) => isIgnored(fp),\n ignoreInitial: true,\n persistent: true,\n });\n\n watcher.on(\"add\", (fp) => handleEvent(\"add\", fp));\n watcher.on(\"change\", (fp) => handleEvent(\"change\", fp));\n watcher.on(\"unlink\", (fp) => handleEvent(\"unlink\", fp));\n watcher.on(\"error\", (err: unknown) => {\n events.onError(err instanceof Error ? err : new Error(String(err)));\n });\n watcher.on(\"ready\", () => resolve());\n });\n },\n\n async stop(): Promise<void> {\n if (debounceTimer) {\n clearTimeout(debounceTimer);\n debounceTimer = null;\n }\n pendingChanges.clear();\n\n if (watcher) {\n await watcher.close();\n watcher = null;\n }\n },\n };\n}\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { createDatabase } from \"../../storage/db.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\ninterface ProjectConfig {\n model: string;\n dimensions: number;\n}\n\n/** Structured output from the status command. */\nexport interface StatusOutput {\n initialized: boolean;\n fileCount: number;\n chunkCount: number;\n vectorCount: number;\n dbSizeBytes: number;\n lastIndexed: string | null;\n languages: Map<string, number>;\n config: ProjectConfig | null;\n text: string;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst DB_FILENAME = \"index.db\";\nconst CONFIG_FILENAME = \"config.json\";\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction formatBytes(bytes: number): string {\n if (bytes < 1024) return `${bytes} B`;\n if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;\n return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;\n}\n\nfunction formatTimestamp(raw: string): string {\n const num = Number(raw);\n if (Number.isNaN(num)) return raw;\n const date = new Date(num);\n return date.toISOString().replace(\"T\", \" \").replace(/\\.\\d+Z$/, \"\");\n}\n\nfunction capitalize(s: string): string {\n return s.charAt(0).toUpperCase() + s.slice(1);\n}\n\nfunction readConfig(ctxDir: string): ProjectConfig | null {\n const configPath = path.join(ctxDir, CONFIG_FILENAME);\n if (!fs.existsSync(configPath)) return null;\n\n try {\n const raw = fs.readFileSync(configPath, \"utf-8\");\n const parsed = JSON.parse(raw) as { model?: string; dimensions?: number };\n return {\n model: parsed.model ?? \"unknown\",\n dimensions: parsed.dimensions ?? 0,\n };\n } catch {\n return null;\n }\n}\n\n// ── Format text output ───────────────────────────────────────────────────────\n\nfunction formatNotInitialized(projectPath: string): string {\n return [\n `Kontext Status — ${projectPath}`,\n \"\",\n ' Not initialized. Run \"ctx init\" first.',\n \"\",\n ].join(\"\\n\");\n}\n\nfunction formatStatus(projectPath: string, output: StatusOutput): string {\n const lines: string[] = [\n `Kontext Status — ${projectPath}`,\n \"\",\n ` Initialized: Yes`,\n ` Database: ${CTX_DIR}/${DB_FILENAME} (${formatBytes(output.dbSizeBytes)})`,\n ];\n\n if (output.lastIndexed) {\n lines.push(` Last indexed: ${formatTimestamp(output.lastIndexed)}`);\n }\n\n lines.push(\"\");\n lines.push(` Files: ${output.fileCount.toLocaleString()}`);\n lines.push(` Chunks: ${output.chunkCount.toLocaleString()}`);\n lines.push(` Vectors: ${output.vectorCount.toLocaleString()}`);\n\n if (output.languages.size > 0) {\n lines.push(\"\");\n lines.push(\" Languages:\");\n\n const maxLangLen = Math.max(\n ...[...output.languages.keys()].map((k) => capitalize(k).length),\n );\n\n for (const [lang, count] of output.languages) {\n const label = capitalize(lang).padEnd(maxLangLen + 2);\n lines.push(` ${label}${count} file${count !== 1 ? \"s\" : \"\"}`);\n }\n }\n\n if (output.config) {\n lines.push(\"\");\n lines.push(\n ` Embedder: local (${output.config.model}, ${output.config.dimensions} dims)`,\n );\n }\n\n lines.push(\"\");\n return lines.join(\"\\n\");\n}\n\n// ── Main status function ─────────────────────────────────────────────────────\n\n/** Gather index statistics: file/chunk/vector counts, languages, DB size, config. */\nexport async function runStatus(projectPath: string): Promise<StatusOutput> {\n const absoluteRoot = path.resolve(projectPath);\n const ctxDir = path.join(absoluteRoot, CTX_DIR);\n const dbPath = path.join(ctxDir, DB_FILENAME);\n\n if (!fs.existsSync(dbPath)) {\n const output: StatusOutput = {\n initialized: false,\n fileCount: 0,\n chunkCount: 0,\n vectorCount: 0,\n dbSizeBytes: 0,\n lastIndexed: null,\n languages: new Map(),\n config: null,\n text: formatNotInitialized(absoluteRoot),\n };\n return output;\n }\n\n const db = createDatabase(dbPath);\n\n try {\n const fileCount = db.getFileCount();\n const chunkCount = db.getChunkCount();\n const vectorCount = db.getVectorCount();\n const languages = db.getLanguageBreakdown();\n const lastIndexed = db.getLastIndexed();\n const config = readConfig(ctxDir);\n const dbSizeBytes = fs.statSync(dbPath).size;\n\n const output: StatusOutput = {\n initialized: true,\n fileCount,\n chunkCount,\n vectorCount,\n dbSizeBytes,\n lastIndexed,\n languages,\n config,\n text: \"\",\n };\n\n output.text = formatStatus(absoluteRoot, output);\n return output;\n } finally {\n db.close();\n }\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerStatusCommand(program: Command): void {\n program\n .command(\"status [path]\")\n .description(\"Show index statistics\")\n .action(async (inputPath?: string) => {\n const projectPath = inputPath ?? process.cwd();\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n\n try {\n const output = await runStatus(projectPath);\n console.log(output.text);\n } catch (err) {\n process.exitCode = handleCommandError(err, logger, verbose);\n }\n });\n}\n","import type { Command } from \"commander\";\n\nexport function registerSymbolsCommand(program: Command): void {\n program\n .command(\"symbols\")\n .description(\"List all indexed symbols\")\n .option(\"--type <type>\", \"Filter by symbol type (function, class, etc.)\")\n .action((_options: Record<string, unknown>) => {\n console.log(\"ctx symbols — not yet implemented\");\n });\n}\n","import type { Command } from \"commander\";\n\nexport function registerDepsCommand(program: Command): void {\n program\n .command(\"deps <file>\")\n .description(\"Show dependency graph for a file\")\n .action((_file: string) => {\n console.log(\"ctx deps — not yet implemented\");\n });\n}\n","import type { Command } from \"commander\";\n\nexport function registerChunkCommand(program: Command): void {\n program\n .command(\"chunk <location>\")\n .description(\"Show the chunk containing a file:line location\")\n .action((_location: string) => {\n console.log(\"ctx chunk — not yet implemented\");\n });\n}\n","import type { Command } from \"commander\";\nimport fs from \"node:fs\";\nimport path from \"node:path\";\nimport { ConfigError, ErrorCode } from \"../../utils/errors.js\";\nimport { handleCommandError } from \"../../utils/error-boundary.js\";\nimport { createLogger, LogLevel } from \"../../utils/logger.js\";\n\n// ── Types ────────────────────────────────────────────────────────────────────\n\n/** Project-level configuration stored in .ctx/config.json. */\nexport interface KontextConfig {\n embedder: {\n provider: string;\n model: string;\n dimensions: number;\n };\n search: {\n defaultLimit: number;\n strategies: string[];\n weights: Record<string, number>;\n };\n watch: {\n debounceMs: number;\n ignored: string[];\n };\n llm: {\n provider: string | null;\n model: string | null;\n };\n}\n\nexport interface ConfigShowOutput {\n config: KontextConfig;\n text: string;\n}\n\n// ── Constants ────────────────────────────────────────────────────────────────\n\nconst CTX_DIR = \".ctx\";\nconst CONFIG_FILENAME = \"config.json\";\n\n/** Default configuration values for a new project. */\nexport const DEFAULT_CONFIG: KontextConfig = {\n embedder: {\n provider: \"local\",\n model: \"Xenova/all-MiniLM-L6-v2\",\n dimensions: 384,\n },\n search: {\n defaultLimit: 10,\n strategies: [\"vector\", \"fts\", \"ast\", \"path\"],\n weights: { vector: 1.0, fts: 0.8, ast: 0.9, path: 0.7, dependency: 0.6 },\n },\n watch: {\n debounceMs: 500,\n ignored: [],\n },\n llm: {\n provider: null,\n model: null,\n },\n};\n\n// ── Validation ───────────────────────────────────────────────────────────────\n\nconst VALID_EMBEDDER_PROVIDERS = new Set([\"local\", \"voyage\", \"openai\"]);\nconst VALID_LLM_PROVIDERS = new Set([\"gemini\", \"openai\", \"anthropic\"]);\n\ninterface ValidationRule {\n validate: (value: unknown) => boolean;\n message: string;\n}\n\nconst VALIDATION_RULES: Record<string, ValidationRule> = {\n \"embedder.provider\": {\n validate: (v) => typeof v === \"string\" && VALID_EMBEDDER_PROVIDERS.has(v),\n message: `Must be one of: ${[...VALID_EMBEDDER_PROVIDERS].join(\", \")}`,\n },\n \"embedder.dimensions\": {\n validate: (v) => typeof v === \"number\" && v > 0 && Number.isInteger(v),\n message: \"Must be a positive integer\",\n },\n \"search.defaultLimit\": {\n validate: (v) => typeof v === \"number\" && v > 0 && Number.isInteger(v),\n message: \"Must be a positive integer\",\n },\n \"watch.debounceMs\": {\n validate: (v) => typeof v === \"number\" && v >= 0 && Number.isInteger(v),\n message: \"Must be a non-negative integer\",\n },\n \"llm.provider\": {\n validate: (v) => v === null || (typeof v === \"string\" && VALID_LLM_PROVIDERS.has(v)),\n message: `Must be null or one of: ${[...VALID_LLM_PROVIDERS].join(\", \")}`,\n },\n};\n\n// ── Helpers ──────────────────────────────────────────────────────────────────\n\nfunction resolveCtxDir(projectPath: string): string {\n const absoluteRoot = path.resolve(projectPath);\n const ctxDir = path.join(absoluteRoot, CTX_DIR);\n\n if (!fs.existsSync(ctxDir)) {\n throw new ConfigError(\n `Project not initialized. Run \"ctx init\" first. (${CTX_DIR}/ not found)`,\n ErrorCode.NOT_INITIALIZED,\n );\n }\n\n return ctxDir;\n}\n\nfunction configPath(ctxDir: string): string {\n return path.join(ctxDir, CONFIG_FILENAME);\n}\n\nfunction readConfig(ctxDir: string): KontextConfig {\n const filePath = configPath(ctxDir);\n\n if (!fs.existsSync(filePath)) {\n // Create default config\n writeConfig(ctxDir, DEFAULT_CONFIG);\n return structuredClone(DEFAULT_CONFIG);\n }\n\n const raw = fs.readFileSync(filePath, \"utf-8\");\n const parsed = JSON.parse(raw) as Partial<KontextConfig>;\n\n // Merge with defaults to fill missing keys\n return mergeWithDefaults(parsed);\n}\n\nfunction writeConfig(ctxDir: string, config: KontextConfig): void {\n fs.writeFileSync(\n configPath(ctxDir),\n JSON.stringify(config, null, 2) + \"\\n\",\n );\n}\n\nfunction mergeWithDefaults(partial: Partial<KontextConfig>): KontextConfig {\n return {\n embedder: { ...DEFAULT_CONFIG.embedder, ...partial.embedder },\n search: {\n ...DEFAULT_CONFIG.search,\n ...partial.search,\n weights: { ...DEFAULT_CONFIG.search.weights, ...partial.search?.weights },\n },\n watch: { ...DEFAULT_CONFIG.watch, ...partial.watch },\n llm: { ...DEFAULT_CONFIG.llm, ...partial.llm },\n };\n}\n\nfunction getNestedValue(obj: Record<string, unknown>, key: string): unknown {\n const parts = key.split(\".\");\n let current: unknown = obj;\n\n for (const part of parts) {\n if (current === null || current === undefined || typeof current !== \"object\") {\n return undefined;\n }\n current = (current as Record<string, unknown>)[part];\n }\n\n return current;\n}\n\nfunction setNestedValue(obj: Record<string, unknown>, key: string, value: unknown): void {\n const parts = key.split(\".\");\n let current: Record<string, unknown> = obj;\n\n for (let i = 0; i < parts.length - 1; i++) {\n const part = parts[i];\n if (typeof current[part] !== \"object\" || current[part] === null) {\n current[part] = {};\n }\n current = current[part] as Record<string, unknown>;\n }\n\n current[parts[parts.length - 1]] = value;\n}\n\nfunction parseValue(rawValue: string): unknown {\n // Handle null\n if (rawValue === \"null\") return null;\n\n // Handle boolean\n if (rawValue === \"true\") return true;\n if (rawValue === \"false\") return false;\n\n // Handle numbers\n const num = Number(rawValue);\n if (!Number.isNaN(num) && rawValue.trim() !== \"\") return num;\n\n // Handle JSON arrays/objects\n if (rawValue.startsWith(\"[\") || rawValue.startsWith(\"{\")) {\n try {\n return JSON.parse(rawValue) as unknown;\n } catch {\n // Fall through to string\n }\n }\n\n // Default: string\n return rawValue;\n}\n\n// ── Public API ───────────────────────────────────────────────────────────────\n\n/** Read and return the full project configuration. Creates defaults if missing. */\nexport function runConfigShow(projectPath: string): ConfigShowOutput {\n const ctxDir = resolveCtxDir(projectPath);\n const config = readConfig(ctxDir);\n\n return {\n config,\n text: JSON.stringify(config, null, 2),\n };\n}\n\n/** Get a config value by dot-notation key (e.g., \"search.defaultLimit\"). */\nexport function runConfigGet(projectPath: string, key: string): unknown {\n const ctxDir = resolveCtxDir(projectPath);\n const config = readConfig(ctxDir);\n return getNestedValue(config as unknown as Record<string, unknown>, key);\n}\n\n/** Set a config value by dot-notation key. Validates against known rules. */\nexport function runConfigSet(\n projectPath: string,\n key: string,\n rawValue: string,\n): void {\n const ctxDir = resolveCtxDir(projectPath);\n const config = readConfig(ctxDir);\n const value = parseValue(rawValue);\n\n // Validate if rule exists\n const rule = VALIDATION_RULES[key];\n if (rule && !rule.validate(value)) {\n throw new ConfigError(`Invalid value for \"${key}\": ${rule.message}`, ErrorCode.CONFIG_INVALID);\n }\n\n setNestedValue(config as unknown as Record<string, unknown>, key, value);\n writeConfig(ctxDir, config);\n}\n\n/** Reset all configuration to defaults. */\nexport function runConfigReset(projectPath: string): void {\n const ctxDir = resolveCtxDir(projectPath);\n writeConfig(ctxDir, structuredClone(DEFAULT_CONFIG));\n}\n\n// ── CLI registration ─────────────────────────────────────────────────────────\n\nexport function registerConfigCommand(program: Command): void {\n const cmd = program\n .command(\"config\")\n .description(\"Show or modify configuration\");\n\n function configErrorHandler(err: unknown): void {\n const verbose = program.opts()[\"verbose\"] === true;\n const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });\n process.exitCode = handleCommandError(err, logger, verbose);\n }\n\n cmd\n .command(\"show\")\n .description(\"Show current configuration\")\n .action(() => {\n try {\n const output = runConfigShow(process.cwd());\n console.log(output.text);\n } catch (err) {\n configErrorHandler(err);\n }\n });\n\n cmd\n .command(\"get <key>\")\n .description(\"Get a configuration value (dot notation)\")\n .action((key: string) => {\n try {\n const value = runConfigGet(process.cwd(), key);\n console.log(\n typeof value === \"object\" ? JSON.stringify(value, null, 2) : String(value),\n );\n } catch (err) {\n configErrorHandler(err);\n }\n });\n\n cmd\n .command(\"set <key> <value>\")\n .description(\"Set a configuration value (dot notation)\")\n .action((key: string, value: string) => {\n try {\n runConfigSet(process.cwd(), key, value);\n console.log(`Set ${key} = ${value}`);\n } catch (err) {\n configErrorHandler(err);\n }\n });\n\n cmd\n .command(\"reset\")\n .description(\"Reset configuration to defaults\")\n .action(() => {\n try {\n runConfigReset(process.cwd());\n console.log(\"Configuration reset to defaults.\");\n } catch (err) {\n configErrorHandler(err);\n }\n });\n}\n","import type { Command } from \"commander\";\n\nexport function registerAuthCommand(program: Command): void {\n program\n .command(\"auth\")\n .description(\"Set API keys for LLM and embedding providers\")\n .action(() => {\n console.log(\"ctx auth — not yet implemented\");\n });\n}\n"],"mappings":";;;AAAA,SAAS,eAAe;;;ACCxB,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;ACDjB,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,YAAY;AAsBZ,IAAM,eAAuC;AAAA,EAClD,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,OAAO;AAAA,EACP,OAAO;AAAA,EACP,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,MAAM;AAAA,EACN,MAAM;AAAA,EACN,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,SAAS;AAAA,EACT,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,OAAO;AAAA,EACP,QAAQ;AAAA,EACR,QAAQ;AACV;AAIA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,SAAS,YAAY,UAAiC;AACpD,QAAM,WAAW,KAAK,SAAS,QAAQ;AAGvC,MAAI,SAAS,WAAW,GAAG,KAAK,CAAC,SAAS,SAAS,KAAK,CAAC,GAAG;AAC1D,UAAM,SAAS;AACf,WAAO,aAAa,MAAM,KAAK;AAAA,EACjC;AAEA,QAAM,MAAM,KAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,SAAO,aAAa,GAAG,KAAK;AAC9B;AAEA,eAAe,eAAe,UAAqC;AACjE,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,SAAS,UAAU,OAAO;AACnD,WAAO,QACJ,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,CAAC,EACzB,OAAO,CAAC,SAAS,KAAK,SAAS,KAAK,CAAC,KAAK,WAAW,GAAG,CAAC;AAAA,EAC9D,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAEA,eAAe,SACb,UACA,gBACuB;AACvB,MAAI;AACF,WAAO,iBACH,MAAM,GAAG,KAAK,QAAQ,IACtB,MAAM,GAAG,MAAM,QAAQ;AAAA,EAC7B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cACpB,SAC2B;AAC3B,QAAM,EAAE,MAAM,cAAc,CAAC,GAAG,iBAAiB,KAAK,IAAI;AAC1D,QAAM,eAAe,KAAK,QAAQ,IAAI;AAGtC,QAAM,KAAK,OAAO;AAClB,KAAG,IAAI,cAAc;AAGrB,QAAM,iBAAiB,MAAM;AAAA,IAC3B,KAAK,KAAK,cAAc,YAAY;AAAA,EACtC;AACA,KAAG,IAAI,cAAc;AAGrB,QAAM,iBAAiB,MAAM;AAAA,IAC3B,KAAK,KAAK,cAAc,YAAY;AAAA,EACtC;AACA,KAAG,IAAI,cAAc;AAGrB,KAAG,IAAI,WAAW;AAElB,QAAM,UAA4B,CAAC;AACnC,QAAM,cAAc,cAAc,cAAc,IAAI,gBAAgB,OAAO;AAC3E,SAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAC5D;AAEA,eAAe,cACb,KACA,MACA,IACA,gBACA,SACe;AACf,MAAI;AACJ,MAAI;AACF,cAAU,MAAM,GAAG,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAAA,EACzD,QAAQ;AAEN;AAAA,EACF;AAEA,aAAW,SAAS,SAAS;AAC3B,UAAM,eAAe,KAAK,KAAK,KAAK,MAAM,IAAI;AAC9C,UAAM,eAAe,KAAK,SAAS,MAAM,YAAY;AAGrD,UAAM,qBAAqB,aAAa,MAAM,KAAK,GAAG,EAAE,KAAK,GAAG;AAGhE,QAAI,MAAM,YAAY,KAAK,MAAM,eAAe,GAAG;AACjD,YAAMC,QAAO,MAAM,SAAS,cAAc,cAAc;AACxD,UAAI,CAACA,MAAM;AAEX,UAAIA,MAAK,YAAY,GAAG;AACtB,YAAI,GAAG,QAAQ,qBAAqB,GAAG,KAAK,GAAG,QAAQ,kBAAkB,GAAG;AAC1E;AAAA,QACF;AACA,cAAM,cAAc,cAAc,MAAM,IAAI,gBAAgB,OAAO;AACnE;AAAA,MACF;AAGA,UAAI,CAACA,MAAK,OAAO,EAAG;AAAA,IACtB;AAEA,QAAI,CAAC,MAAM,OAAO,KAAK,CAAC,MAAM,eAAe,EAAG;AAGhD,QAAI,GAAG,QAAQ,kBAAkB,EAAG;AAGpC,UAAM,WAAW,YAAY,YAAY;AACzC,QAAI,aAAa,KAAM;AAGvB,UAAM,OAAO,MAAM,SAAS,cAAc,cAAc;AACxD,QAAI,CAAC,QAAQ,CAAC,KAAK,OAAO,EAAG;AAE7B,YAAQ,KAAK;AAAA,MACX,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,MAAM,KAAK;AAAA,MACX,cAAc,KAAK;AAAA,IACrB,CAAC;AAAA,EACH;AACF;;;ACvOA,SAAS,kBAAkB;AAC3B,OAAOC,SAAQ;AAyBf,eAAsB,gBAAgB,cAAuC;AAC3E,QAAM,UAAU,MAAMA,IAAG,SAAS,YAAY;AAC9C,SAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC1D;AAKA,eAAsB,eACpB,YACA,IAC4B;AAC5B,QAAM,QAAQ,YAAY,IAAI;AAE9B,QAAM,QAAkB,CAAC;AACzB,QAAM,WAAqB,CAAC;AAC5B,QAAM,YAAsB,CAAC;AAC7B,QAAM,SAAS,oBAAI,IAAoB;AAGvC,QAAM,kBAAkB,IAAI,IAAI,WAAW,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AAG7D,QAAM,QAAQ;AAAA,IACZ,WAAW,IAAI,OAAO,SAAS;AAC7B,YAAM,cAAc,MAAM,gBAAgB,KAAK,YAAY;AAC3D,YAAM,WAAW,GAAG,QAAQ,KAAK,IAAI;AAErC,UAAI,CAAC,UAAU;AACb,cAAM,KAAK,KAAK,IAAI;AACpB,eAAO,IAAI,KAAK,MAAM,WAAW;AAAA,MACnC,WAAW,SAAS,SAAS,aAAa;AACxC,iBAAS,KAAK,KAAK,IAAI;AACvB,eAAO,IAAI,KAAK,MAAM,WAAW;AAAA,MACnC,OAAO;AACL,kBAAU,KAAK,KAAK,IAAI;AAAA,MAC1B;AAAA,IACF,CAAC;AAAA,EACH;AAGA,QAAM,UAAU,GAAG,gBAAgB;AACnC,QAAM,UAAU,QAAQ,OAAO,CAAC,MAAM,CAAC,gBAAgB,IAAI,CAAC,CAAC;AAG7D,QAAM,KAAK;AACX,WAAS,KAAK;AACd,UAAQ,KAAK;AACb,YAAU,KAAK;AAEf,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,YAAY,IAAI,IAAI;AAAA,EAChC;AACF;;;ACpFA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AACjB,SAAS,qBAAqB;AAC9B,OAAO,YAAY;AAsBnB,IAAM,gBAAwC;AAAA,EAC5C,YAAY;AAAA,EACZ,YAAY;AAAA,EACZ,QAAQ;AACV;AAIA,IAAMC,WAAU,cAAc,YAAY,GAAG;AAC7C,IAAI,cAAc;AAClB,IAAM,gBAAgB,oBAAI,IAA6B;AAEvD,SAAS,gBAAgB,UAA0B;AACjD,MAAI,aAAa,oBAAoB;AACnC,WAAOD,MAAK,KAAKA,MAAK,QAAQC,SAAQ,QAAQ,iBAAiB,CAAC,GAAG,QAAQ;AAAA,EAC7E;AACA,SAAOD,MAAK,KAAKA,MAAK,QAAQC,SAAQ,QAAQ,gCAAgC,CAAC,GAAG,OAAO,QAAQ;AACnG;AAGA,eAAsB,aAA4B;AAChD,MAAI,YAAa;AACjB,QAAM,OAAO,KAAK;AAAA,IAChB,YAAY,CAAC,eAAuB,gBAAgB,UAAU;AAAA,EAChE,CAAC;AACD,gBAAc;AAChB;AAEA,eAAeC,aAAY,UAAmD;AAC5E,QAAM,cAAc,cAAc,QAAQ;AAC1C,MAAI,CAAC,YAAa,QAAO;AAEzB,QAAM,SAAS,cAAc,IAAI,QAAQ;AACzC,MAAI,OAAQ,QAAO;AAEnB,QAAM,WAAW,gBAAgB,WAAW;AAC5C,QAAM,OAAO,MAAM,OAAO,SAAS,KAAK,QAAQ;AAChD,gBAAc,IAAI,UAAU,IAAI;AAChC,SAAO;AACT;AAIA,SAAS,iBACP,MACA,UACoB;AACpB,MAAI,aAAa,UAAU;AAEzB,UAAM,OAAO,KAAK,kBAAkB,MAAM;AAC1C,QAAI,MAAM;AACR,YAAM,YAAY,KAAK,cAAc,CAAC;AACtC,UAAI,WAAW,SAAS,wBAAwB;AAC9C,cAAM,UAAU,UAAU,cAAc,CAAC;AACzC,YAAI,SAAS,SAAS,UAAU;AAE9B,gBAAM,MAAM,QAAQ;AACpB,iBAAO,IAAI,QAAQ,0BAA0B,EAAE,EAAE,KAAK;AAAA,QACxD;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAGA,QAAM,OAAO,qBAAqB,IAAI;AACtC,MAAI,KAAM,QAAO,kBAAkB,KAAK,IAAI;AAC5C,SAAO;AACT;AAEA,SAAS,qBAAqB,MAAmD;AAE/E,MAAI,YAAsC,KAAK;AAG/C,MAAI,KAAK,QAAQ,SAAS,oBAAoB;AAC5C,gBAAY,KAAK,OAAO;AAAA,EAC1B;AAEA,MAAI,WAAW,SAAS,UAAW,QAAO;AAC1C,SAAO;AACT;AAEA,SAAS,kBAAkB,MAAsB;AAC/C,SAAO,KACJ,QAAQ,eAAe,EAAE,EACzB,QAAQ,YAAY,EAAE,EACtB,QAAQ,cAAc,EAAE,EACxB,KAAK;AACV;AAIA,SAAS,cACP,MACA,UACsB;AACtB,QAAM,aACJ,KAAK,kBAAkB,YAAY,KACnC,KAAK,kBAAkB,mBAAmB;AAE5C,MAAI,CAAC,WAAY,QAAO;AAExB,MAAI,aAAa,UAAU;AACzB,WAAO,WAAW,cACf,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS,EAClC,IAAI,CAAC,MAAM,EAAE,IAAI,EACjB,OAAO,CAAC,MAAM,MAAM,UAAU,MAAM,KAAK;AAAA,EAC9C;AAGA,SAAO,WAAW,cACf,OAAO,CAAC,MAAM,EAAE,SAAS,SAAS,EAClC,IAAI,CAAC,MAAM,EAAE,IAAI;AACtB;AAEA,SAAS,kBACP,MACA,UACoB;AACpB,MAAI,aAAa,UAAU;AACzB,UAAMC,WAAU,KAAK,kBAAkB,aAAa;AACpD,WAAOA,UAAS;AAAA,EAClB;AAGA,QAAM,UAAU,KAAK,kBAAkB,aAAa;AACpD,MAAI,SAAS;AAEX,UAAM,OAAO,QAAQ;AACrB,WAAO,KAAK,WAAW,GAAG,IAAI,KAAK,MAAM,CAAC,EAAE,KAAK,IAAI;AAAA,EACvD;AACA,SAAO;AACT;AAIA,SAAS,WAAW,MAAkC;AACpD,SAAO,KAAK,QAAQ,SAAS;AAC/B;AAEA,SAAS,oBAAoB,MAA4C;AAEvE,MAAI,KAAK,QAAQ,SAAS,mBAAoB,QAAO,KAAK;AAC1D,SAAO;AACT;AAEA,SAAS,kBACP,UACA,QACA,UACW;AACX,QAAM,QAAmB,CAAC;AAE1B,WAAS,KAAK,MAAyB,iBAAsC;AAC3E,eAAW,SAAS,KAAK,eAAe;AAEtC,YAAM,QACJ,MAAM,SAAS,qBACV,MAAM,cAAc;AAAA,QACnB,CAAC,MACC,EAAE,SAAS,0BACX,EAAE,SAAS,uBACX,EAAE,SAAS,yBACX,EAAE,SAAS,2BACX,EAAE,SAAS,4BACX,EAAE,SAAS;AAAA,MACf,KAAK,QACL;AAEN,cAAQ,MAAM,MAAM;AAAA,QAClB,KAAK,oBAAoB;AACvB,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,WAAW,MAAM,cAAc,MAAM;AAAA,YACrC,SAAS,MAAM,YAAY,MAAM;AAAA,YACjC;AAAA,YACA,QAAQ;AAAA,YACR,MAAM,MAAM;AAAA,UACd,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,wBAAwB;AAC3B,gBAAM,UAAU,oBAAoB,KAAK;AACzC,gBAAM,OAAO,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM,kBAAkB,WAAW;AAAA,YACnC;AAAA,YACA,WAAW,QAAQ,cAAc,MAAM;AAAA,YACvC,SAAS,QAAQ,YAAY,MAAM;AAAA,YACnC;AAAA,YACA,QAAQ;AAAA,YACR,QAAQ,cAAc,OAAO,QAAQ;AAAA,YACrC,YAAY,kBAAkB,OAAO,QAAQ;AAAA,YAC7C,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,SAAS,WAAW,KAAK;AAAA,YACzB,MAAM,QAAQ;AAAA,UAChB,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK;AAAA,QACL,KAAK,8BAA8B;AACjC,gBAAM,UAAU,oBAAoB,KAAK;AACzC,gBAAM,YAAY,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AAC3D,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,WAAW,QAAQ,cAAc,MAAM;AAAA,YACvC,SAAS,QAAQ,YAAY,MAAM;AAAA,YACnC;AAAA,YACA,QAAQ;AAAA,YACR,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,SAAS,WAAW,KAAK;AAAA,YACzB,MAAM,QAAQ;AAAA,UAChB,CAAC;AAGD,gBAAM,YAAY,MAAM,kBAAkB,MAAM;AAChD,cAAI,WAAW;AACb,uBAAW,UAAU,UAAU,eAAe;AAC5C,kBAAI,OAAO,SAAS,qBAAqB;AACvC,sBAAM,aAAa,OAAO,kBAAkB,MAAM,GAAG,QAAQ;AAC7D,sBAAM,KAAK;AAAA,kBACT,MAAM;AAAA,kBACN,MAAM;AAAA,kBACN,WAAW,OAAO,cAAc,MAAM;AAAA,kBACtC,SAAS,OAAO,YAAY,MAAM;AAAA,kBAClC;AAAA,kBACA,QAAQ;AAAA,kBACR,QAAQ,cAAc,QAAQ,QAAQ;AAAA,kBACtC,YAAY,kBAAkB,QAAQ,QAAQ;AAAA,kBAC9C,WAAW,iBAAiB,QAAQ,QAAQ;AAAA,kBAC5C,SAAS,WAAW,KAAK;AAAA,kBACzB,MAAM,OAAO;AAAA,gBACf,CAAC;AAAA,cACH;AAAA,YACF;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,yBAAyB;AAC5B,gBAAM,UAAU,oBAAoB,KAAK;AACzC,gBAAM,OAAO,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,WAAW,QAAQ,cAAc,MAAM;AAAA,YACvC,SAAS,QAAQ,YAAY,MAAM;AAAA,YACnC;AAAA,YACA,QAAQ;AAAA,YACR,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,SAAS,WAAW,KAAK;AAAA,YACzB,MAAM,QAAQ;AAAA,UAChB,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,0BAA0B;AAC7B,gBAAM,UAAU,oBAAoB,KAAK;AACzC,gBAAM,OAAO,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,WAAW,QAAQ,cAAc,MAAM;AAAA,YACvC,SAAS,QAAQ,YAAY,MAAM;AAAA,YACnC;AAAA,YACA,QAAQ;AAAA,YACR,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,SAAS,WAAW,KAAK;AAAA,YACzB,MAAM,QAAQ;AAAA,UAChB,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,uBAAuB;AAC1B,gBAAM,UAAU,oBAAoB,KAAK;AAEzC,gBAAM,aAAa,MAAM,cAAc;AAAA,YACrC,CAAC,MAAM,EAAE,SAAS;AAAA,UACpB;AACA,gBAAM,OAAO,YAAY,kBAAkB,MAAM,GAAG,QAAQ;AAC5D,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,WAAW,QAAQ,cAAc,MAAM;AAAA,YACvC,SAAS,QAAQ,YAAY,MAAM;AAAA,YACnC;AAAA,YACA,QAAQ;AAAA,YACR,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,SAAS,WAAW,KAAK;AAAA,YACzB,MAAM,QAAQ;AAAA,UAChB,CAAC;AACD;AAAA,QACF;AAAA,QAEA;AAEE,cAAI,MAAM,SAAS,oBAAoB;AAAA,UAEvC;AACA;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAEA,OAAK,UAAU,IAAI;AACnB,SAAO;AACT;AAEA,SAAS,cACP,UACA,SACA,UACW;AACX,QAAM,QAAmB,CAAC;AAE1B,WAAS,KAAK,MAAyB,iBAAsC;AAC3E,eAAW,SAAS,KAAK,eAAe;AACtC,cAAQ,MAAM,MAAM;AAAA,QAClB,KAAK;AAAA,QACL,KAAK,yBAAyB;AAC5B,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN,MAAM;AAAA,YACN,WAAW,MAAM,cAAc,MAAM;AAAA,YACrC,SAAS,MAAM,YAAY,MAAM;AAAA,YACjC;AAAA,YACA,QAAQ;AAAA,YACR,MAAM,MAAM;AAAA,UACd,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,uBAAuB;AAC1B,gBAAM,OAAO,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM,kBAAkB,WAAW;AAAA,YACnC;AAAA,YACA,WAAW,MAAM,cAAc,MAAM;AAAA,YACrC,SAAS,MAAM,YAAY,MAAM;AAAA,YACjC;AAAA,YACA,QAAQ;AAAA,YACR,QAAQ,cAAc,OAAO,QAAQ;AAAA,YACrC,YAAY,kBAAkB,OAAO,QAAQ;AAAA,YAC7C,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,MAAM,MAAM;AAAA,UACd,CAAC;AACD;AAAA,QACF;AAAA,QAEA,KAAK,wBAAwB;AAE3B,gBAAM,WAAW,MAAM,cAAc;AAAA,YACnC,CAAC,MACC,EAAE,SAAS,yBAAyB,EAAE,SAAS;AAAA,UACnD;AACA,cAAI,UAAU;AAEZ,kBAAM,OAAO,SAAS,kBAAkB,MAAM,GAAG,QAAQ;AAEzD,gBAAI,SAAS,SAAS,uBAAuB;AAC3C,oBAAM,KAAK;AAAA,gBACT,MAAM,kBAAkB,WAAW;AAAA,gBACnC;AAAA,gBACA,WAAW,MAAM,cAAc,MAAM;AAAA,gBACrC,SAAS,MAAM,YAAY,MAAM;AAAA,gBACjC;AAAA,gBACA,QAAQ;AAAA,gBACR,QAAQ,cAAc,UAAU,QAAQ;AAAA,gBACxC,YAAY,kBAAkB,UAAU,QAAQ;AAAA,gBAChD,WAAW,iBAAiB,UAAU,QAAQ;AAAA,gBAC9C,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH,WAAW,SAAS,SAAS,oBAAoB;AAC/C,oBAAM,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN;AAAA,gBACA,WAAW,MAAM,cAAc,MAAM;AAAA,gBACrC,SAAS,MAAM,YAAY,MAAM;AAAA,gBACjC;AAAA,gBACA,QAAQ;AAAA,gBACR,WAAW,iBAAiB,UAAU,QAAQ;AAAA,gBAC9C,MAAM,MAAM;AAAA,cACd,CAAC;AAGD,oBAAM,OAAO,SAAS,kBAAkB,MAAM;AAC9C,kBAAI,KAAM,MAAK,MAAM,IAAI;AAAA,YAC3B;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA,KAAK,oBAAoB;AACvB,gBAAM,OAAO,MAAM,kBAAkB,MAAM,GAAG,QAAQ;AACtD,gBAAM,KAAK;AAAA,YACT,MAAM;AAAA,YACN;AAAA,YACA,WAAW,MAAM,cAAc,MAAM;AAAA,YACrC,SAAS,MAAM,YAAY,MAAM;AAAA,YACjC;AAAA,YACA,QAAQ;AAAA,YACR,WAAW,iBAAiB,OAAO,QAAQ;AAAA,YAC3C,MAAM,MAAM;AAAA,UACd,CAAC;AAGD,gBAAM,OAAO,MAAM,kBAAkB,MAAM;AAC3C,cAAI,KAAM,MAAK,MAAM,IAAI;AACzB;AAAA,QACF;AAAA,QAEA,KAAK,wBAAwB;AAE3B,gBAAM,aAAa,MAAM,cAAc;AAAA,YACrC,CAAC,MAAM,EAAE,SAAS;AAAA,UACpB;AACA,cAAI,cAAc,oBAAoB,MAAM;AAC1C,kBAAM,OAAO,WAAW,kBAAkB,MAAM;AAChD,gBAAI,MAAM,SAAS,cAAc;AAC/B,oBAAM,KAAK;AAAA,gBACT,MAAM;AAAA,gBACN,MAAM,KAAK;AAAA,gBACX,WAAW,MAAM,cAAc,MAAM;AAAA,gBACrC,SAAS,MAAM,YAAY,MAAM;AAAA,gBACjC;AAAA,gBACA,QAAQ;AAAA,gBACR,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF;AACA;AAAA,QACF;AAAA,QAEA;AACE;AAAA,MACJ;AAAA,IACF;AAAA,EACF;AAEA,OAAK,UAAU,IAAI;AACnB,SAAO;AACT;AAKA,eAAsB,UACpB,UACA,UACoB;AACpB,QAAM,WAAW;AAEjB,QAAM,OAAO,MAAMD,aAAY,QAAQ;AACvC,MAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,MAAI;AACJ,MAAI;AACF,aAAS,MAAMH,IAAG,SAAS,UAAU,OAAO;AAAA,EAC9C,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,SAAS,IAAI,OAAO;AAC1B,SAAO,YAAY,IAAI;AAEvB,QAAM,OAAO,OAAO,MAAM,MAAM;AAChC,MAAI,CAAC,KAAM,QAAO,CAAC;AAEnB,MAAI;AACF,QAAI,aAAa,UAAU;AACzB,aAAO,cAAc,KAAK,UAAU,QAAQ,QAAQ;AAAA,IACtD;AAEA,WAAO,kBAAkB,KAAK,UAAU,QAAQ,QAAQ;AAAA,EAC1D,UAAE;AACA,SAAK,OAAO;AACZ,WAAO,OAAO;AAAA,EAChB;AACF;;;AC5fA,SAAS,cAAAK,mBAAkB;AA4B3B,IAAM,qBAAqB;AAC3B,IAAM,kBAAkB;AACxB,IAAM,mBAAmB;AAKlB,SAAS,eAAe,MAAsB;AACnD,QAAM,YAAY,KAAK,MAAM,KAAK,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;AAChE,SAAO,KAAK,KAAK,YAAY,gBAAgB;AAC/C;AAIA,SAAS,YAAY,UAAkB,WAAmB,SAAyB;AACjF,QAAM,QAAQ,GAAG,QAAQ,IAAI,SAAS,IAAI,OAAO;AACjD,SAAOA,YAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AACrE;AAEA,SAAS,gBAAgB,MAAsB;AAC7C,SAAOA,YAAW,QAAQ,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AACpE;AAUA,SAAS,eAAe,MAAe,WAA+B;AACpE,QAAM,QAAQ,KAAK,KAAK,MAAM,IAAI;AAClC,QAAM,SAAqB,CAAC;AAC5B,MAAI,eAAyB,CAAC;AAC9B,MAAI,qBAAqB;AAEzB,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,iBAAa,KAAK,MAAM,CAAC,CAAC;AAC1B,UAAM,cAAc,aAAa,KAAK,IAAI;AAC1C,UAAM,SAAS,eAAe,WAAW;AAEzC,QAAI,UAAU,aAAa,aAAa,SAAS,GAAG;AAElD,mBAAa,IAAI;AACjB,aAAO,KAAK;AAAA,QACV,WAAW,KAAK,YAAY;AAAA,QAC5B,SAAS,KAAK,YAAY,qBAAqB,aAAa,SAAS;AAAA,QACrE,MAAM,aAAa,KAAK,IAAI;AAAA,MAC9B,CAAC;AAED,2BAAqB;AACrB,qBAAe,CAAC,MAAM,CAAC,CAAC;AAAA,IAC1B;AAAA,EACF;AAGA,MAAI,aAAa,SAAS,GAAG;AAC3B,WAAO,KAAK;AAAA,MACV,WAAW,KAAK,YAAY;AAAA,MAC5B,SAAS,KAAK,YAAY,qBAAqB,aAAa,SAAS;AAAA,MACrE,MAAM,aAAa,KAAK,IAAI;AAAA,IAC9B,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAIA,SAAS,aAAa,SAAoC;AACxD,MAAI,QAAQ,WAAW,EAAG,QAAO;AAEjC,QAAM,SAAS,CAAC,GAAG,OAAO,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AACpE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM;AAAA,IACN,WAAW,OAAO,CAAC,EAAE;AAAA,IACrB,SAAS,OAAO,OAAO,SAAS,CAAC,EAAE;AAAA,IACnC,UAAU,OAAO,CAAC,EAAE;AAAA,IACpB,QAAQ;AAAA,IACR,MAAM,OAAO,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI;AAAA,EAC3C;AACF;AAKA,IAAM,oBAAoB,oBAAI,IAAmB;AAAA,EAC/C;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAED,SAAS,SAAS,GAAU,GAAmB;AAE7C,MAAI,kBAAkB,IAAI,EAAE,IAAI,KAAK,kBAAkB,IAAI,EAAE,IAAI,EAAG,QAAO;AAE3E,MAAI,EAAE,SAAS,EAAE,KAAM,QAAO;AAC9B,SAAO;AACT;AAEA,SAAS,iBAAiB,QAAiB,WAA4B;AACrE,MAAI,OAAO,UAAU,EAAG,QAAO;AAE/B,QAAM,SAAkB,CAAC;AACzB,MAAI,cAA4B;AAEhC,aAAW,SAAS,QAAQ;AAC1B,UAAM,cAAc,eAAe,MAAM,IAAI;AAE7C,QAAI,gBAAgB,MAAM;AACxB,UAAI,cAAc,mBAAmB,CAAC,kBAAkB,IAAI,MAAM,IAAI,GAAG;AACvE,sBAAc,EAAE,GAAG,MAAM;AAAA,MAC3B,OAAO;AACL,eAAO,KAAK,KAAK;AAAA,MACnB;AACA;AAAA,IACF;AAEA,UAAM,YAAY,eAAe,YAAY,IAAI;AACjD,UAAM,iBAAiB,YAAY;AAGnC,QACE,cAAc,mBACd,kBAAkB,aAClB,SAAS,aAAa,KAAK,GAC3B;AACA,YAAM,eAAe,YAAY,OAAO,OAAO,MAAM;AACrD,oBAAc;AAAA,QACZ,GAAG;AAAA,QACH,SAAS,MAAM;AAAA,QACf,MAAM;AAAA,QACN,MAAM,YAAY,QAAQ,MAAM;AAAA,QAChC,IAAI,YAAY,YAAY,UAAU,YAAY,WAAW,MAAM,OAAO;AAAA,QAC1E,MAAM,gBAAgB,YAAY;AAAA,MACpC;AAAA,IACF,OAAO;AAEL,aAAO,KAAK,WAAW;AACvB,oBACE,cAAc,mBAAmB,CAAC,kBAAkB,IAAI,MAAM,IAAI,IAC9D,EAAE,GAAG,MAAM,IACX;AACN,UAAI,gBAAgB,MAAM;AACxB,eAAO,KAAK,KAAK;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,aAAa;AACf,WAAO,KAAK,WAAW;AAAA,EACzB;AAEA,SAAO;AACT;AAIA,SAAS,mBAAmB,OAA4B;AACtD,SAAO,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AACnE;AAKO,SAAS,UACd,OACA,UACA,SACS;AACT,MAAI,MAAM,WAAW,EAAG,QAAO,CAAC;AAEhC,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,WAAW,MAAM,CAAC,EAAE;AAC1B,QAAM,cAAc,mBAAmB,KAAK;AAG5C,QAAM,SAAS,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AAGlE,QAAM,cAAc,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,QAAQ;AAC5D,QAAM,iBAAiB,OAAO,OAAO,CAAC,MAAM,EAAE,SAAS,QAAQ;AAG/D,QAAM,qBAAqB,oBAAI,IAAY;AAC3C,aAAW,QAAQ,gBAAgB;AACjC,QAAI,KAAK,SAAS,YAAY,KAAK,QAAQ;AACzC,yBAAmB,IAAI,KAAK,MAAM;AAAA,IACpC;AAAA,EACF;AAEA,QAAM,YAAqB,CAAC;AAG5B,QAAM,gBAAgB,aAAa,WAAW;AAC9C,MAAI,eAAe;AACjB,cAAU,KAAK;AAAA,MACb,IAAI,YAAY,UAAU,cAAc,WAAW,cAAc,OAAO;AAAA,MACxE;AAAA,MACA,WAAW,cAAc;AAAA,MACzB,SAAS,cAAc;AAAA,MACvB;AAAA,MACA,MAAM;AAAA,MACN,MAAM;AAAA,MACN,QAAQ;AAAA,MACR,MAAM,cAAc;AAAA,MACpB,SAAS,CAAC;AAAA,MACV,SAAS;AAAA,MACT,MAAM,gBAAgB,cAAc,IAAI;AAAA,IAC1C,CAAC;AAAA,EACH;AAGA,aAAW,QAAQ,gBAAgB;AAEjC,QAAI,KAAK,SAAS,WAAW,KAAK,QAAQ,mBAAmB,IAAI,KAAK,IAAI,GAAG;AAC3E;AAAA,IACF;AAEA,UAAM,aAAa,eAAe,KAAK,IAAI;AAC3C,UAAM,cAAc,KAAK,WAAW;AAEpC,QAAI,cAAc,WAAW;AAE3B,gBAAU,KAAK;AAAA,QACb,IAAI,YAAY,UAAU,KAAK,WAAW,KAAK,OAAO;AAAA,QACtD;AAAA,QACA,WAAW,KAAK;AAAA,QAChB,SAAS,KAAK;AAAA,QACd;AAAA,QACA,MAAM,KAAK,SAAS,WAAW,aAAa,KAAK;AAAA,QACjD,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK;AAAA,QACb,MAAM,KAAK;AAAA,QACX,SAAS,KAAK,SAAS,WAAW,cAAc,CAAC;AAAA,QACjD,SAAS;AAAA,QACT,MAAM,gBAAgB,KAAK,IAAI;AAAA,MACjC,CAAC;AAAA,IACH,OAAO;AAEL,YAAM,YAAY,eAAe,MAAM,SAAS;AAChD,iBAAW,OAAO,WAAW;AAC3B,kBAAU,KAAK;AAAA,UACb,IAAI,YAAY,UAAU,IAAI,WAAW,IAAI,OAAO;AAAA,UACpD;AAAA,UACA,WAAW,IAAI;AAAA,UACf,SAAS,IAAI;AAAA,UACb;AAAA,UACA,MAAM,KAAK,SAAS,WAAW,aAAa,KAAK;AAAA,UACjD,MAAM,KAAK;AAAA,UACX,QAAQ,KAAK;AAAA,UACb,MAAM,IAAI;AAAA,UACV,SAAS;AAAA,UACT,SAAS;AAAA,UACT,MAAM,gBAAgB,IAAI,IAAI;AAAA,QAChC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAGA,YAAU,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AAGlD,SAAO,iBAAiB,WAAW,SAAS;AAC9C;;;AClRO,SAAS,gBAAgB,KAAiC;AAC/D,MAAI,QAAQ;AACZ,aAAW,KAAK,IAAK,UAAS,IAAI;AAClC,QAAM,OAAO,KAAK,KAAK,KAAK;AAC5B,MAAI,SAAS,EAAG,QAAO;AACvB,SAAO,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI;AAChC;AAkBO,SAAS,iBACd,UACA,QACA,MACQ;AACR,QAAM,QAAQ,CAAC,QAAQ;AACvB,MAAI,OAAQ,OAAM,KAAK,MAAM;AAC7B,QAAM,KAAK,IAAI;AACf,SAAO,MAAM,KAAK,IAAI;AACxB;AAmCA,IAAM,iBAAiB;AACvB,IAAM,mBAAmB;AACzB,IAAM,mBAAmB;AAOzB,IAAI,mBAAqD;AAEzD,eAAe,mBAAuD;AACpE,MAAI,iBAAkB,QAAO;AAE7B,QAAM,EAAE,UAAU,IAAI,IAAI,MAAM,OAAO,2BAA2B;AAClE,MAAI,WAAW,YAAY;AAE3B,qBAAoB,MAAM,SAAS,sBAAsB,gBAAgB;AAAA,IACvE,OAAO;AAAA,EACT,CAAC;AAED,SAAO;AACT;AAEA,SAAS,cAAsB;AAC7B,QAAM,OACJ,QAAQ,IAAI,MAAM,KAAK,QAAQ,IAAI,aAAa,KAAK;AACvD,SAAO,GAAG,IAAI;AAChB;AAGA,eAAsB,sBAAyC;AAC7D,QAAM,OAAO,MAAM,iBAAiB;AAEpC,SAAO;AAAA,IACL,MAAM;AAAA,IACN,YAAY;AAAA,IAEZ,MAAM,MACJ,OACA,YACyB;AACzB,YAAM,UAA0B,CAAC;AAEjC,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,kBAAkB;AACvD,cAAM,QAAQ,MAAM,MAAM,GAAG,IAAI,gBAAgB;AACjD,cAAM,SAAS,MAAM,KAAK,OAAO;AAAA,UAC/B,SAAS;AAAA,UACT,WAAW;AAAA,QACb,CAAC;AAGD,iBAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,gBAAM,SAAS,IAAI;AACnB,gBAAM,MAAM,IAAI;AAAA,YACd,OAAO,KAAK;AAAA,YACZ,OAAO,KAAK,aAAa,SAAS;AAAA,YAClC;AAAA,UACF;AACA,kBAAQ,KAAK,gBAAgB,GAAG,CAAC;AAAA,QACnC;AAEA,qBAAa,KAAK,IAAI,IAAI,MAAM,QAAQ,MAAM,MAAM,GAAG,MAAM,MAAM;AAAA,MACrE;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,YAAY,MAAqC;AACrD,YAAM,SAAS,MAAM,KAAK,MAAM;AAAA,QAC9B,SAAS;AAAA,QACT,WAAW;AAAA,MACb,CAAC;AAED,YAAM,MAAM,IAAI;AAAA,QACd,OAAO,KAAK;AAAA,QACZ,OAAO,KAAK;AAAA,QACZ;AAAA,MACF;AACA,aAAO,gBAAgB,GAAG;AAAA,IAC5B;AAAA,EACF;AACF;;;AC1KO,IAAM,YAAY;AAAA,EACvB,iBAAiB;AAAA,EACjB,cAAc;AAAA,EACd,cAAc;AAAA,EACd,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,eAAe;AAAA,EACf,gBAAgB;AAAA,EAChB,cAAc;AAAA,EACd,iBAAiB;AAAA,EACjB,gBAAgB;AAAA,EAChB,YAAY;AACd;AAQO,IAAM,eAAN,cAA2B,MAAM;AAAA,EAC7B;AAAA,EAET,YAAY,SAAiB,MAAsB,OAAe;AAChE,UAAM,SAAS,EAAE,MAAM,CAAC;AACxB,SAAK,OAAO;AACZ,SAAK,OAAO;AAAA,EACd;AACF;AAKO,IAAM,aAAN,cAAyB,aAAa;AAAA,EAC3C,YAAY,SAAiB,MAAsB,OAAe;AAChE,UAAM,SAAS,MAAM,KAAK;AAC1B,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,cAAN,cAA0B,aAAa;AAAA,EAC5C,YAAY,SAAiB,MAAsB,OAAe;AAChE,UAAM,SAAS,MAAM,KAAK;AAC1B,SAAK,OAAO;AAAA,EACd;AACF;AAGO,IAAM,cAAN,cAA0B,aAAa;AAAA,EAC5C,YAAY,SAAiB,MAAsB,OAAe;AAChE,UAAM,SAAS,MAAM,KAAK;AAC1B,SAAK,OAAO;AAAA,EACd;AACF;;;ACrDO,SAAS,mBACd,KACA,QACA,SACQ;AACR,MAAI,eAAe,cAAc;AAC/B,WAAO,MAAM,GAAG,IAAI,OAAO,KAAK,IAAI,IAAI,GAAG;AAC3C,QAAI,WAAW,IAAI,OAAO;AACxB,aAAO,MAAM,UAAU,OAAO,IAAI,KAAK,CAAC;AAAA,IAC1C;AACA,WAAO;AAAA,EACT;AAEA,MAAI,eAAe,OAAO;AACxB,WAAO,MAAM,qBAAqB,IAAI,OAAO,EAAE;AAC/C,QAAI,WAAW,IAAI,OAAO;AACxB,aAAO,MAAM,IAAI,KAAK;AAAA,IACxB;AAAA,EACF,OAAO;AACL,WAAO,MAAM,qBAAqB,OAAO,GAAG,CAAC,EAAE;AAAA,EACjD;AAEA,SAAO;AACT;;;ACxBO,IAAM,WAAW;AAAA,EACtB,OAAO;AAAA,EACP,MAAM;AAAA,EACN,MAAM;AAAA,EACN,OAAO;AAAA,EACP,QAAQ;AACV;AAwBA,SAAS,aAAa,SAAwC;AAC5D,MAAI,SAAS,UAAU,OAAW,QAAO,QAAQ;AACjD,MAAI,QAAQ,IAAI,WAAW,MAAM,IAAK,QAAO,SAAS;AACtD,SAAO,SAAS;AAClB;AAEA,SAAS,WAAW,MAAyB;AAC3C,SAAO,KAAK,IAAI,CAAC,MAAO,OAAO,MAAM,WAAW,IAAI,OAAO,CAAC,CAAE,EAAE,KAAK,GAAG;AAC1E;AAEA,SAAS,MAAM,OAAe,KAAa,MAAuB;AAChE,QAAM,QAAQ,KAAK,SAAS,IAAI,IAAI,WAAW,IAAI,CAAC,KAAK;AACzD,UAAQ,OAAO,MAAM,IAAI,KAAK,KAAK,GAAG,GAAG,KAAK;AAAA,CAAI;AACpD;AAGO,SAAS,aAAa,SAAiC;AAC5D,QAAM,WAAW,aAAa,OAAO;AAErC,SAAO;AAAA,IACL,MAAM,QAAgB,MAAuB;AAC3C,UAAI,YAAY,SAAS,MAAO,OAAM,SAAS,KAAK,IAAI;AAAA,IAC1D;AAAA,IACA,KAAK,QAAgB,MAAuB;AAC1C,UAAI,YAAY,SAAS,KAAM,OAAM,QAAQ,KAAK,IAAI;AAAA,IACxD;AAAA,IACA,KAAK,QAAgB,MAAuB;AAC1C,UAAI,YAAY,SAAS,KAAM,OAAM,QAAQ,KAAK,IAAI;AAAA,IACxD;AAAA,IACA,MAAM,QAAgB,MAAuB;AAC3C,UAAI,YAAY,SAAS,MAAO,OAAM,SAAS,KAAK,IAAI;AAAA,IAC1D;AAAA,EACF;AACF;;;AClEA,OAAOC,WAAU;AACjB,OAAOC,SAAQ;AACf,OAAO,mBAAmB;AAC1B,YAAY,eAAe;;;ACHpB,IAAM,iBAAiB;AAEvB,IAAM,aAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0CnB,IAAM,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAQhB,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmBzB,IAAM,mBAAmB,CAAC,eAC/B;AAAA,sBACoB,UAAU;AAAA;;;AC9DhC,SAAS,YAAY,KAA2B;AAC9C,SAAO,OAAO,KAAK,IAAI,QAAQ,IAAI,YAAY,IAAI,UAAU;AAC/D;AAIO,SAAS,aACd,IACA,SACA,QACM;AAEN,KAAG;AAAA,IACD,uDAAuD,OAAO;AAAA,EAChE,EAAE,IAAI,YAAY,MAAM,CAAC;AAC3B;AAEO,SAAS,wBACd,IACA,UACM;AACN,MAAI,SAAS,WAAW,EAAG;AAC3B,QAAM,eAAe,SAAS,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AACrD,KAAG;AAAA,IACD,6CAA6C,YAAY;AAAA,EAC3D,EAAE,IAAI,GAAG,QAAQ;AACnB;AAEO,SAAS,eAAe,IAAoC;AACjE,QAAM,MAAM,GAAG,QAAQ,6CAA6C,EAAE,IAAI;AAG1E,SAAO,IAAI;AACb;AAEO,SAAS,cACd,IACA,OACA,OACgB;AAChB,QAAM,OAAO,GACV;AAAA,IACC;AAAA;AAAA;AAAA,mBAGa,KAAK;AAAA;AAAA,EAEpB,EACC,IAAI,YAAY,KAAK,CAAC;AAEzB,SAAO,KAAK,IAAI,CAAC,OAAO;AAAA,IACtB,SAAS,EAAE;AAAA,IACX,UAAU,EAAE;AAAA,EACd,EAAE;AACJ;;;AF8EA,IAAM,qBAAqB;AAKpB,SAAS,eACd,QACA,aAAqB,oBACJ;AAEjB,QAAM,MAAMC,MAAK,QAAQ,MAAM;AAC/B,MAAI,CAACC,IAAG,WAAW,GAAG,GAAG;AACvB,IAAAA,IAAG,UAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,EACvC;AAEA,QAAM,KAAK,IAAI,cAAc,MAAM;AAGnC,KAAG,OAAO,oBAAoB;AAC9B,KAAG,OAAO,mBAAmB;AAG7B,EAAU,eAAK,EAAE;AAGjB,mBAAiB,IAAI,UAAU;AAI/B,QAAM,iBAAiB,GAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQjC;AAED,QAAM,cAAc,GAAG;AAAA,IACrB;AAAA,EACF;AAEA,QAAM,iBAAiB,GAAG,QAAQ,kCAAkC;AAEpE,QAAM,kBAAkB,GAAG,QAAQ;AAAA;AAAA;AAAA,GAGlC;AAED,QAAM,sBAAsB,GAAG;AAAA,IAC7B;AAAA,EACF;AAEA,QAAM,wBAAwB,GAAG;AAAA,IAC/B;AAAA,EACF;AAEA,QAAM,yBAAyB,GAAG;AAAA,IAChC;AAAA,EACF;AAEA,QAAM,gBAAgB,GAAG;AAAA,IACvB;AAAA,EACF;AAEA,QAAM,kBAAkB,GAAG;AAAA,IACzB;AAAA,EACF;AAEA,QAAM,gBAAgB,GAAG;AAAA,IACvB;AAAA,EACF;AAEA,QAAM,cAAc,GAAG;AAAA,IACrB;AAAA,EACF;AAEA,QAAM,qBAAqB,GAAG;AAAA,IAC5B;AAAA,EACF;AAEA,QAAM,gBAAgB,GAAG,QAAQ,qCAAqC;AACtE,QAAM,iBAAiB,GAAG,QAAQ,sCAAsC;AACxE,QAAM,wBAAwB,GAAG;AAAA,IAC/B;AAAA,EACF;AACA,QAAM,kBAAkB,GAAG;AAAA,IACzB;AAAA,EACF;AAIA,SAAO;AAAA,IACL,WAAW,MAAyB;AAClC,YAAM,SAAS,eAAe,IAAI;AAAA,QAChC,MAAM,KAAK;AAAA,QACX,UAAU,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,QACX,aAAa,KAAK,IAAI;AAAA,QACtB,MAAM,KAAK;AAAA,MACb,CAAC;AACD,UAAI,OAAO,UAAU,KAAK,OAAO,iBAAiB;AAChD,eAAO,OAAO,OAAO,eAAe;AAAA,MACtC;AAEA,YAAM,WAAW,YAAY,IAAI,KAAK,IAAI;AAC1C,aAAO,UAAU,MAAM;AAAA,IACzB;AAAA,IAEA,QAAQ,UAAqC;AAC3C,YAAM,MAAM,YAAY,IAAI,QAAQ;AACpC,aAAO,OAAO;AAAA,IAChB;AAAA,IAEA,eAAe,QAAsD;AACnE,YAAM,SAAS,oBAAI,IAAwB;AAC3C,YAAM,WAAW,gBAAgB,IAAI;AACrC,iBAAW,QAAQ,UAAU;AAC3B,cAAM,eAAe,OAAO,IAAI,KAAK,IAAI;AACzC,YAAI,iBAAiB,UAAa,iBAAiB,KAAK,MAAM;AAC5D,iBAAO,IAAI,KAAK,MAAM,IAAI;AAAA,QAC5B;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,IAEA,kBAA4B;AAC1B,YAAM,OAAO,gBAAgB,IAAI;AACjC,aAAO,KAAK,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IAC/B;AAAA,IAEA,eAAuB;AACrB,aAAQ,cAAc,IAAI,EAAwB;AAAA,IACpD;AAAA,IAEA,gBAAwB;AACtB,aAAQ,eAAe,IAAI,EAAwB;AAAA,IACrD;AAAA,IAEA,iBAAyB;AACvB,aAAO,eAAe,EAAE;AAAA,IAC1B;AAAA,IAEA,uBAA4C;AAC1C,YAAM,OAAO,sBAAsB,IAAI;AACvC,YAAM,MAAM,oBAAI,IAAoB;AACpC,iBAAW,OAAO,MAAM;AACtB,YAAI,IAAI,IAAI,UAAU,IAAI,KAAK;AAAA,MACjC;AACA,aAAO;AAAA,IACT;AAAA,IAEA,iBAAgC;AAC9B,YAAM,MAAM,gBAAgB,IAAI;AAChC,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,WAAW,UAAwB;AAEjC,YAAM,OAAO,YAAY,IAAI,QAAQ;AACrC,UAAI,MAAM;AACR,cAAM,YAAY,sBAAsB,IAAI,KAAK,EAAE;AACnD,cAAM,WAAW,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE;AAC1C,YAAI,SAAS,SAAS,GAAG;AACvB,kCAAwB,IAAI,QAAQ;AAAA,QACtC;AAAA,MACF;AAEA,qBAAe,IAAI,QAAQ;AAAA,IAC7B;AAAA,IAEA,aAAa,QAAgB,QAAgC;AAC3D,YAAM,MAAgB,CAAC;AACvB,iBAAW,SAAS,QAAQ;AAC1B,cAAM,SAAS,gBAAgB,IAAI;AAAA,UACjC;AAAA,UACA,WAAW,MAAM;AAAA,UACjB,SAAS,MAAM;AAAA,UACf,MAAM,MAAM;AAAA,UACZ,MAAM,MAAM;AAAA,UACZ,QAAQ,MAAM;AAAA,UACd,MAAM,MAAM;AAAA,UACZ,SAAS,KAAK,UAAU,MAAM,OAAO;AAAA,UACrC,SAAS,MAAM,UAAU,IAAI;AAAA,UAC7B,MAAM,MAAM;AAAA,QACd,CAAC;AACD,YAAI,KAAK,OAAO,OAAO,eAAe,CAAC;AAAA,MACzC;AACA,aAAO;AAAA,IACT;AAAA,IAEA,gBAAgB,QAA+B;AAC7C,YAAM,OAAO,oBAAoB,IAAI,MAAM;AAc3C,aAAO,KAAK,IAAI,CAAC,OAAO;AAAA,QACtB,GAAG;AAAA,QACH,SAAS,KAAK,MAAM,EAAE,OAAO;AAAA,QAC7B,SAAS,EAAE,YAAY;AAAA,MACzB,EAAE;AAAA,IACJ;AAAA,IAEA,eAAe,KAAgC;AAC7C,UAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAC9B,YAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,GAAG;AAChD,YAAM,OAAO,GACV;AAAA,QACC;AAAA;AAAA;AAAA;AAAA;AAAA,4BAKkB,YAAY;AAAA,MAChC,EACC,IAAI,GAAG,GAAG;AACb,aAAO;AAAA,IACT;AAAA,IAEA,aAAa,SAA6B,OAAgC;AACxE,YAAM,aAAuB,CAAC;AAC9B,YAAM,SAAoB,CAAC;AAE3B,UAAI,QAAQ,MAAM;AAChB,gBAAQ,QAAQ,YAAY,YAAY;AAAA,UACtC,KAAK;AACH,uBAAW,KAAK,YAAY;AAC5B,mBAAO,KAAK,QAAQ,IAAI;AACxB;AAAA,UACF,KAAK;AACH,uBAAW,KAAK,sBAAsB;AACtC,mBAAO,KAAK,QAAQ,IAAI;AACxB;AAAA,UACF,KAAK;AACH,uBAAW,KAAK,6BAA6B;AAC7C,mBAAO,KAAK,QAAQ,IAAI;AACxB;AAAA,QACJ;AAAA,MACF;AAEA,UAAI,QAAQ,MAAM;AAChB,mBAAW,KAAK,YAAY;AAC5B,eAAO,KAAK,QAAQ,IAAI;AAAA,MAC1B;AAEA,UAAI,QAAQ,QAAQ;AAClB,mBAAW,KAAK,cAAc;AAC9B,eAAO,KAAK,QAAQ,MAAM;AAAA,MAC5B;AAEA,UAAI,QAAQ,UAAU;AACpB,mBAAW,KAAK,gBAAgB;AAChC,eAAO,KAAK,QAAQ,QAAQ;AAAA,MAC9B;AAEA,YAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAE5E,YAAM,MAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMR,KAAK;AAAA;AAAA;AAAA;AAKT,aAAO,KAAK,KAAK;AACjB,aAAO,GAAG,QAAQ,GAAG,EAAE,IAAI,GAAG,MAAM;AAAA,IACtC;AAAA,IAEA,mBAAmB,QAAsB;AACvC,YAAM,YAAY,sBAAsB,IAAI,MAAM;AAClD,YAAM,WAAW,UAAU,IAAI,CAAC,MAAM,EAAE,EAAE;AAC1C,UAAI,SAAS,SAAS,GAAG;AACvB,gCAAwB,IAAI,QAAQ;AAAA,MACtC;AACA,6BAAuB,IAAI,MAAM;AAAA,IACnC;AAAA,IAEA,iBAAiB,eAAuB,eAAuB,MAAoB;AACjF,oBAAc,IAAI,eAAe,eAAe,IAAI;AAAA,IACtD;AAAA,IAEA,gBAAgB,SAA4D;AAC1E,aAAO,YAAY,IAAI,OAAO;AAAA,IAChC;AAAA,IAEA,uBAAuB,SAA4D;AACjF,aAAO,mBAAmB,IAAI,OAAO;AAAA,IACvC;AAAA,IAEA,aAAa,SAAiB,QAA4B;AACxD,mBAAU,IAAI,SAAS,MAAM;AAAA,IAC/B;AAAA,IAEA,cAAc,OAAqB,OAA+B;AAChE,aAAO,cAAU,IAAI,OAAO,KAAK;AAAA,IACnC;AAAA,IAEA,UAAU,OAAe,OAA4B;AACnD,YAAM,OAAO,cAAc,IAAI,OAAO,KAAK;AAK3C,aAAO;AAAA,IACT;AAAA,IAEA,YAAe,IAAgB;AAC7B,aAAO,GAAG,YAAY,EAAE,EAAE;AAAA,IAC5B;AAAA,IAEA,SAAe;AACb,SAAG,KAAK,QAAQ;AAAA,IAClB;AAAA,IAEA,QAAc;AACZ,SAAG,MAAM;AAAA,IACX;AAAA,IAEA,mBAA2B;AACzB,YAAM,MAAM,GACT,QAAQ,qDAAqD,EAC7D,IAAI;AACP,aAAO,MAAM,SAAS,IAAI,OAAO,EAAE,IAAI;AAAA,IACzC;AAAA,IAEA,OAAO,KAAqB;AAC1B,YAAM,SAAS,GAAG,OAAO,GAAG;AAC5B,UAAI,MAAM,QAAQ,MAAM,KAAK,OAAO,SAAS,GAAG;AAC9C,eAAO,OAAO,OAAO,OAAO,CAAC,CAAC,EAAE,CAAC;AAAA,MACnC;AACA,aAAO,OAAO,MAAM;AAAA,IACtB;AAAA,EACF;AACF;AAIA,SAAS,iBACP,IACA,YACM;AACN,QAAM,iBAAiB,eAAe,EAAE;AAExC,MAAI,kBAAkB,eAAU;AAEhC,KAAG,KAAK,UAAU;AAClB,KAAG,KAAK,iBAAiB,UAAU,CAAC;AACpC,KAAG,KAAK,OAAO;AACf,KAAG,KAAK,gBAAgB;AAExB,KAAG;AAAA,IACD;AAAA,EACF,EAAE,IAAI,OAAO,cAAQ,CAAC;AACxB;AAEA,SAAS,eAAe,IAAoC;AAC1D,MAAI;AACF,UAAM,MAAM,GACT,QAAQ,qDAAqD,EAC7D,IAAI;AACP,WAAO,MAAM,SAAS,IAAI,OAAO,EAAE,IAAI;AAAA,EACzC,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;;;ATteA,IAAM,UAAU;AAChB,IAAM,cAAc;AACpB,IAAM,kBAAkB;AACxB,IAAM,kBAAkB;AAIxB,SAAS,gBAAgB,aAA2B;AAClD,QAAM,gBAAgBC,MAAK,KAAK,aAAa,YAAY;AAEzD,MAAIC,IAAG,WAAW,aAAa,GAAG;AAChC,UAAM,UAAUA,IAAG,aAAa,eAAe,OAAO;AACtD,QAAI,QAAQ,SAAS,eAAe,EAAG;AACvC,UAAM,SAAS,QAAQ,SAAS,IAAI,IAAI,KAAK;AAC7C,IAAAA,IAAG,cAAc,eAAe,GAAG,OAAO,GAAG,MAAM,GAAG,eAAe;AAAA,CAAI;AAAA,EAC3E,OAAO;AACL,IAAAA,IAAG,cAAc,eAAe,GAAG,eAAe;AAAA,CAAI;AAAA,EACxD;AACF;AAIA,SAAS,aAAa,QAAsB;AAC1C,QAAMC,cAAaF,MAAK,KAAK,QAAQ,eAAe;AACpD,MAAIC,IAAG,WAAWC,WAAU,EAAG;AAE/B,QAAM,SAAS;AAAA,IACb,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,OAAO;AAAA,EACT;AACA,EAAAD,IAAG,cAAcC,aAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,IAAI;AACrE;AAIA,SAAS,eAAe,IAAoB;AAC1C,MAAI,KAAK,IAAM,QAAO,GAAG,KAAK,MAAM,EAAE,CAAC;AACvC,SAAO,IAAI,KAAK,KAAM,QAAQ,CAAC,CAAC;AAClC;AAEA,SAAS,YAAY,OAAuB;AAC1C,MAAI,QAAQ,KAAM,QAAO,GAAG,KAAK;AACjC,MAAI,QAAQ,OAAO,KAAM,QAAO,IAAI,QAAQ,MAAM,QAAQ,CAAC,CAAC;AAC5D,SAAO,IAAI,SAAS,OAAO,OAAO,QAAQ,CAAC,CAAC;AAC9C;AAEA,SAAS,sBAAsB,QAAqC;AAClE,QAAM,UAAU,CAAC,GAAG,OAAO,QAAQ,CAAC,EACjC,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,IAAI,CAAC,CAAC,MAAM,KAAK,MAAM,GAAG,IAAI,KAAK,KAAK,EAAE;AAC7C,SAAO,QAAQ,KAAK,IAAI;AAC1B;AAKA,eAAsB,QACpB,aACA,UAAuB,CAAC,GACH;AACrB,QAAM,MAAM,QAAQ,OAAO,QAAQ;AACnC,QAAM,eAAeF,MAAK,QAAQ,WAAW;AAC7C,QAAM,QAAQ,YAAY,IAAI;AAE9B,MAAI,YAAY,YAAY,KAAK;AAGjC,QAAM,SAASA,MAAK,KAAK,cAAc,OAAO;AAC9C,MAAI,CAACC,IAAG,WAAW,MAAM,EAAG,CAAAA,IAAG,UAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAEpE,kBAAgB,YAAY;AAC5B,eAAa,MAAM;AAGnB,QAAM,SAASD,MAAK,KAAK,QAAQ,WAAW;AAC5C,QAAM,KAAK,eAAe,MAAM;AAEhC,MAAI;AAEF,UAAM,aAAa,MAAM,cAAc;AAAA,MACrC,MAAM;AAAA,MACN,aAAa,CAAC,OAAO;AAAA,IACvB,CAAC;AAED,UAAM,iBAAiB,oBAAI,IAAoB;AAC/C,eAAW,QAAQ,YAAY;AAC7B,qBAAe;AAAA,QACb,KAAK;AAAA,SACJ,eAAe,IAAI,KAAK,QAAQ,KAAK,KAAK;AAAA,MAC7C;AAAA,IACF;AAEA;AAAA,MACE,gBAAgB,WAAW,MAAM,YAC9B,WAAW,SAAS,IACjB,KAAK,sBAAsB,cAAc,CAAC,MAC1C;AAAA,IACR;AAGA,UAAM,UAAU,MAAM,eAAe,YAAY,EAAE;AAEnD,UAAM,iBAAiB;AAAA,MACrB,GAAG,QAAQ,MAAM,IAAI,CAAC,OAAO,EAAE,MAAM,GAAG,QAAQ,QAAiB,EAAE;AAAA,MACnE,GAAG,QAAQ,SAAS,IAAI,CAAC,OAAO,EAAE,MAAM,GAAG,QAAQ,WAAoB,EAAE;AAAA,IAC3E;AAEA,QAAI,QAAQ,UAAU,SAAS,GAAG;AAChC,UAAI,KAAK,QAAQ,UAAU,MAAM,0BAA0B;AAAA,IAC7D;AACA,QAAI,QAAQ,QAAQ,SAAS,GAAG;AAC9B,UAAI,KAAK,QAAQ,QAAQ,MAAM,wBAAwB;AAAA,IACzD;AACA,QAAI,QAAQ,MAAM,SAAS,GAAG;AAC5B,UAAI,KAAK,QAAQ,MAAM,MAAM,qBAAqB;AAAA,IACpD;AACA,QAAI,QAAQ,SAAS,SAAS,GAAG;AAC/B,UAAI,KAAK,QAAQ,SAAS,MAAM,6BAA6B;AAAA,IAC/D;AAGA,eAAW,eAAe,QAAQ,SAAS;AACzC,SAAG,WAAW,WAAW;AAAA,IAC3B;AAGA,UAAM,WAAW;AAEjB,UAAM,oBAGA,CAAC;AAEP,QAAI,iBAAiB;AAErB,eAAW,EAAE,MAAM,QAAQ,KAAK,gBAAgB;AAC9C,YAAM,kBAAkB,WAAW,KAAK,CAAC,MAAM,EAAE,SAAS,OAAO;AACjE,UAAI,CAAC,gBAAiB;AAGtB,YAAM,eAAe,GAAG,QAAQ,OAAO;AACvC,UAAI,cAAc;AAChB,WAAG,mBAAmB,aAAa,EAAE;AAAA,MACvC;AAGA,UAAI;AACJ,UAAI;AACF,gBAAQ,MAAM,UAAU,gBAAgB,cAAc,gBAAgB,QAAQ;AAAA,MAChF,QAAQ;AACN,YAAI,qBAAgB,OAAO,gBAAgB;AAC3C;AAAA,MACF;AAGA,YAAM,SAAS,UAAU,OAAO,OAAO;AAGvC,YAAM,SAAS,GAAG,WAAW;AAAA,QAC3B,MAAM;AAAA,QACN,UAAU,gBAAgB;AAAA,QAC1B,MAAM,QAAQ,OAAO,IAAI,OAAO,KAAK;AAAA,QACrC,MAAM,gBAAgB;AAAA,MACxB,CAAC;AAGD,YAAM,WAAW,GAAG;AAAA,QAClB;AAAA,QACA,OAAO,IAAI,CAAC,OAAO;AAAA,UACjB,WAAW,EAAE;AAAA,UACb,SAAS,EAAE;AAAA,UACX,MAAM,EAAE;AAAA,UACR,MAAM,EAAE;AAAA,UACR,QAAQ,EAAE;AAAA,UACV,MAAM,EAAE;AAAA,UACR,SAAS,EAAE;AAAA,UACX,SAAS,EAAE;AAAA,UACX,MAAM,EAAE;AAAA,QACV,EAAE;AAAA,MACJ;AAGA,eAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,0BAAkB,KAAK;AAAA,UACrB,aAAa;AAAA,UACb,OAAO,EAAE,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,SAAS,CAAC,CAAC,EAAE;AAAA,QACjD,CAAC;AAAA,MACH;AAEA;AACA,UAAI,iBAAiB,OAAO,KAAK,mBAAmB,eAAe,QAAQ;AACzE,YAAI,gBAAgB,cAAc,IAAI,eAAe,MAAM,EAAE;AAAA,MAC/D;AAAA,IACF;AAEA,QAAI,KAAK,kBAAkB,MAAM,iBAAiB;AAGlD,QAAI,iBAAiB;AAErB,QAAI,CAAC,QAAQ,iBAAiB,kBAAkB,SAAS,GAAG;AAC1D,YAAM,WAAW,MAAM,eAAe;AAEtC,YAAM,QAAQ,kBAAkB;AAAA,QAAI,CAAC,OACnC,iBAAiB,GAAG,aAAa,GAAG,MAAM,QAAQ,GAAG,MAAM,IAAI;AAAA,MACjE;AAEA,YAAM,UAAU,MAAM,SAAS,MAAM,OAAO,CAAC,MAAM,UAAU;AAC3D,YAAI,kBAAkB,IAAI,IAAI,KAAK,EAAE;AAAA,MACvC,CAAC;AAGD,SAAG,YAAY,MAAM;AACnB,iBAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AACjD,gBAAM,YAAY,SAAS,kBAAkB,CAAC,EAAE,MAAM,IAAI,EAAE;AAC5D,aAAG,aAAa,WAAW,QAAQ,CAAC,CAAC;AAAA,QACvC;AAAA,MACF,CAAC;AAED,uBAAiB,QAAQ;AAAA,IAC3B;AAGA,UAAM,aAAa,YAAY,IAAI,IAAI;AACvC,UAAM,SAASC,IAAG,WAAW,MAAM,IAAIA,IAAG,SAAS,MAAM,EAAE,OAAO;AAElE,QAAI,EAAE;AACN,QAAI,qBAAgB,eAAe,UAAU,CAAC,EAAE;AAChD;AAAA,MACE,KAAK,WAAW,MAAM,iBAAY,kBAAkB,MAAM,aACvD,iBAAiB,IAAI,WAAM,cAAc,aAAa;AAAA,IAC3D;AACA,QAAI,eAAe,OAAO,IAAI,WAAW,KAAK,YAAY,MAAM,CAAC,GAAG;AAEpE,WAAO;AAAA,MACL,iBAAiB,WAAW;AAAA,MAC5B,YAAY,QAAQ,MAAM;AAAA,MAC1B,eAAe,QAAQ,SAAS;AAAA,MAChC,cAAc,QAAQ,QAAQ;AAAA,MAC9B,gBAAgB,QAAQ,UAAU;AAAA,MAClC,eAAe,kBAAkB;AAAA,MACjC;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;AAIA,eAAe,iBAAoC;AACjD,SAAO,oBAAoB;AAC7B;AAIO,SAAS,oBAAoBE,UAAwB;AAC1D,EAAAA,SACG,QAAQ,aAAa,EACrB,YAAY,2CAA2C,EACvD,OAAO,OAAO,cAAuB;AACpC,UAAM,cAAc,aAAa,QAAQ,IAAI;AAC7C,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAE/E,QAAI;AACF,YAAM,QAAQ,WAAW;AAAA,IAC3B,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,aAAa,MACxC,IAAI;AAAA,QACF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,QAC/C,UAAU;AAAA,QACV,eAAe,QAAQ,MAAM;AAAA,MAC/B;AACJ,cAAQ,WAAW,mBAAmB,SAAS,QAAQ,OAAO;AAAA,IAChE;AAAA,EACF,CAAC;AACL;;;AY5TA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACMjB,SAAS,gBAAgB,UAA0B;AACjD,SAAO,KAAK,IAAI;AAClB;AAKA,eAAsB,aACpB,IACA,UACA,OACA,OACA,SACyB;AAEzB,QAAM,WAAW,MAAM,SAAS,YAAY,KAAK;AAGjD,QAAM,aAAa,SAAS,WAAW,QAAQ,IAAI;AACnD,QAAM,gBAAgB,GAAG,cAAc,UAAU,UAAU;AAE3D,MAAI,cAAc,WAAW,EAAG,QAAO,CAAC;AAGxC,QAAM,WAAW,cAAc,IAAI,CAAC,MAAM,EAAE,OAAO;AACnD,QAAM,SAAS,GAAG,eAAe,QAAQ;AAGzC,QAAM,WAAW,oBAAI,IAA2B;AAChD,aAAW,SAAS,QAAQ;AAC1B,aAAS,IAAI,MAAM,IAAI,KAAK;AAAA,EAC9B;AAGA,QAAM,UAA0B,CAAC;AAEjC,aAAW,MAAM,eAAe;AAC9B,UAAM,QAAQ,SAAS,IAAI,GAAG,OAAO;AACrC,QAAI,CAAC,MAAO;AAGZ,QAAI,SAAS,YAAY,MAAM,aAAa,QAAQ,SAAU;AAE9D,YAAQ,KAAK;AAAA,MACX,SAAS,GAAG;AAAA,MACZ,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,MACf,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,OAAO,gBAAgB,GAAG,QAAQ;AAAA,MAClC,UAAU,MAAM;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,SAAO,QAAQ,MAAM,GAAG,KAAK;AAC/B;;;AC5DA,SAAS,YAAY,MAAsB;AAEzC,SAAO,KAAK,IAAI,KAAK,IAAI,IAAI;AAC/B;AAKO,SAAS,UACd,IACA,OACA,OACA,SACgB;AAEhB,QAAM,aAAa,SAAS,WAAW,QAAQ,IAAI;AACnD,QAAM,aAAa,GAAG,UAAU,OAAO,UAAU;AAEjD,MAAI,WAAW,WAAW,EAAG,QAAO,CAAC;AAGrC,QAAM,WAAW,WAAW,IAAI,CAAC,MAAM,EAAE,OAAO;AAChD,QAAM,SAAS,GAAG,eAAe,QAAQ;AAEzC,QAAM,WAAW,oBAAI,IAA2B;AAChD,aAAW,SAAS,QAAQ;AAC1B,aAAS,IAAI,MAAM,IAAI,KAAK;AAAA,EAC9B;AAGA,QAAM,UAA0B,CAAC;AAEjC,aAAW,OAAO,YAAY;AAC5B,UAAM,QAAQ,SAAS,IAAI,IAAI,OAAO;AACtC,QAAI,CAAC,MAAO;AAEZ,QAAI,SAAS,YAAY,MAAM,aAAa,QAAQ,SAAU;AAE9D,YAAQ,KAAK;AAAA,MACX,SAAS,IAAI;AAAA,MACb,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,MACf,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,OAAO,YAAY,IAAI,IAAI;AAAA,MAC3B,UAAU,MAAM;AAAA,IAClB,CAAC;AAAA,EACH;AAGA,UAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AACxC,SAAO,QAAQ,MAAM,GAAG,KAAK;AAC/B;;;AC9CA,IAAM,cAAc;AACpB,IAAM,eAAe;AACrB,IAAM,cAAc;AAKb,SAAS,UACd,IACA,SACA,OACgB;AAChB,QAAM,YAAY,QAAQ,aAAa;AAEvC,QAAM,WACJ,cAAc,UACT,UACD,cAAc,WACX,WACA;AAET,QAAM,QACJ,cAAc,UACV,cACA,cAAc,WACZ,eACA;AAER,QAAM,SAAS,GAAG;AAAA,IAChB;AAAA,MACE,MAAM,QAAQ;AAAA,MACd;AAAA,MACA,MAAM,QAAQ;AAAA,MACd,QAAQ,QAAQ;AAAA,MAChB,UAAU,QAAQ;AAAA,IACpB;AAAA,IACA;AAAA,EACF;AAEA,SAAO,OAAO,IAAI,CAAC,WAAW;AAAA,IAC5B,SAAS,MAAM;AAAA,IACf,UAAU,MAAM;AAAA,IAChB,WAAW,MAAM;AAAA,IACjB,SAAS,MAAM;AAAA,IACf,MAAM,MAAM;AAAA,IACZ,MAAM,MAAM;AAAA,IACZ,MAAM,MAAM;AAAA,IACZ;AAAA,IACA,UAAU,MAAM;AAAA,EAClB,EAAE;AACJ;;;ACxDA,SAAS,aAAa,SAAyB;AAC7C,MAAI,KAAK;AACT,MAAI,IAAI;AAER,SAAO,IAAI,QAAQ,QAAQ;AACzB,UAAM,KAAK,QAAQ,CAAC;AAEpB,QAAI,OAAO,OAAO,QAAQ,IAAI,CAAC,MAAM,KAAK;AAExC,YAAM;AACN,WAAK;AAEL,UAAI,QAAQ,CAAC,MAAM,IAAK;AAAA,IAC1B,WAAW,OAAO,KAAK;AAErB,YAAM;AACN;AAAA,IACF,WAAW,OAAO,KAAK;AACrB,YAAM;AACN;AAAA,IACF,WAAW,gBAAgB,SAAS,EAAE,GAAG;AACvC,YAAM,OAAO;AACb;AAAA,IACF,OAAO;AACL,YAAM;AACN;AAAA,IACF;AAAA,EACF;AAEA,SAAO,IAAI,OAAO,IAAI,EAAE,GAAG;AAC7B;AAKO,SAAS,WACd,IACA,SACA,OACgB;AAChB,QAAM,WAAW,GAAG,gBAAgB;AACpC,QAAM,QAAQ,aAAa,OAAO;AAClC,QAAM,gBAAgB,SAAS,OAAO,CAAC,MAAM,MAAM,KAAK,CAAC,CAAC;AAE1D,MAAI,cAAc,WAAW,EAAG,QAAO,CAAC;AAGxC,QAAM,UAA0B,CAAC;AAEjC,aAAW,YAAY,eAAe;AACpC,QAAI,QAAQ,UAAU,MAAO;AAE7B,UAAM,OAAO,GAAG,QAAQ,QAAQ;AAChC,QAAI,CAAC,KAAM;AAEX,UAAM,SAAS,GAAG,gBAAgB,KAAK,EAAE;AACzC,eAAW,SAAS,QAAQ;AAC1B,UAAI,QAAQ,UAAU,MAAO;AAE7B,cAAQ,KAAK;AAAA,QACX,SAAS,MAAM;AAAA,QACf,UAAU,KAAK;AAAA,QACf,WAAW,MAAM;AAAA,QACjB,SAAS,MAAM;AAAA,QACf,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,MAAM,MAAM;AAAA,QACZ,OAAO;AAAA,QACP,UAAU,KAAK;AAAA,MACjB,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;;;AClEA,IAAM,IAAI;AAKH,SAAS,YACd,iBACA,OACgB;AAEhB,QAAM,WAAW,oBAAI,IAAoB;AACzC,QAAM,YAAY,oBAAI,IAA0B;AAEhD,aAAW,EAAE,QAAQ,SAAAC,SAAQ,KAAK,iBAAiB;AACjD,aAAS,OAAO,GAAG,OAAOA,SAAQ,QAAQ,QAAQ;AAChD,YAAM,SAASA,SAAQ,IAAI;AAC3B,YAAM,WAAW,UAAU,KAAK,IAAI,OAAO;AAE3C,YAAM,WAAW,SAAS,IAAI,OAAO,OAAO,KAAK;AACjD,eAAS,IAAI,OAAO,SAAS,WAAW,QAAQ;AAGhD,UAAI,CAAC,UAAU,IAAI,OAAO,OAAO,GAAG;AAClC,kBAAU,IAAI,OAAO,SAAS,MAAM;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAEA,MAAI,SAAS,SAAS,EAAG,QAAO,CAAC;AAGjC,QAAM,UAAU,CAAC,GAAG,SAAS,QAAQ,CAAC,EAAE,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC;AAGlE,QAAM,WAAW,QAAQ,CAAC,EAAE,CAAC;AAE7B,QAAM,UAA0B,CAAC;AACjC,aAAW,CAAC,SAAS,QAAQ,KAAK,QAAQ,MAAM,GAAG,KAAK,GAAG;AACzD,UAAM,OAAO,UAAU,IAAI,OAAO;AAClC,QAAI,CAAC,KAAM;AACX,YAAQ,KAAK;AAAA,MACX,GAAG;AAAA,MACH,OAAO,WAAW,IAAI,WAAW,WAAW;AAAA,IAC9C,CAAC;AAAA,EACH;AACA,SAAO;AACT;;;ALZA,IAAMC,WAAU;AAChB,IAAMC,eAAc;AACpB,IAAM,qBAAqB;AAE3B,IAAM,mBAAiD;AAAA,EACrD,QAAQ;AAAA,EACR,KAAK;AAAA,EACL,KAAK;AAAA,EACL,MAAM;AAAA,EACN,YAAY;AACd;AAIA,SAAS,gBAAgB,MAAsB;AAC7C,QAAM,UAAU,KAAK,QAAQ,OAAO,GAAG,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AACnE,MAAI,QAAQ,UAAU,mBAAoB,QAAO;AACjD,SAAO,QAAQ,MAAM,GAAG,kBAAkB,IAAI;AAChD;AAEA,SAAS,eAAe,GAAoC;AAC1D,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,OAAO,CAAC,EAAE,WAAW,EAAE,OAAO;AAAA,IAC9B,MAAM,EAAE;AAAA,IACR,MAAM,EAAE;AAAA,IACR,OAAO,KAAK,MAAM,EAAE,QAAQ,GAAG,IAAI;AAAA,IACnC,SAAS,gBAAgB,EAAE,IAAI;AAAA,IAC/B,UAAU,EAAE;AAAA,EACd;AACF;AAEA,SAAS,iBAAiB,OAAe,SAAsC;AAC7E,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,mBAAmB,KAAK;AAAA,EACjC;AAEA,QAAM,QAAQ,CAAC,gBAAgB,KAAK;AAAA,CAAM;AAE1C,WAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,UAAM,IAAI,QAAQ,CAAC;AACnB,UAAM,YAAY,EAAE,OAAO,GAAG,EAAE,IAAI,KAAK,EAAE,IAAI,MAAM,IAAI,EAAE,IAAI;AAC/D,UAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,IAAI,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,YAAY,EAAE,KAAK,GAAG;AAChF,UAAM,KAAK,MAAM,SAAS,EAAE;AAC5B,UAAM,KAAK,MAAM,EAAE,OAAO,EAAE;AAC5B,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAGA,SAAS,mBAAmB,OAAyB;AAEnD,QAAM,UAAU,MAAM,MAAM,iEAAiE;AAC7F,SAAO,WAAW,CAAC;AACrB;AAGA,SAAS,WAAW,OAAwB;AAC1C,SAAO,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,GAAG,KAAK,MAAM,SAAS,GAAG;AACzE;AAKA,eAAsB,SACpB,aACA,OACA,SACsB;AACtB,QAAM,eAAeC,MAAK,QAAQ,WAAW;AAC7C,QAAM,SAASA,MAAK,KAAK,cAAcF,UAASC,YAAW;AAE3D,MAAI,CAACE,IAAG,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,mDAAmDH,QAAO,IAAIC,YAAW;AAAA,MACzE,UAAU;AAAA,IACZ;AAAA,EACF;AAEA,QAAM,QAAQ,YAAY,IAAI;AAC9B,QAAM,KAAK,eAAe,MAAM;AAEhC,MAAI;AACF,UAAM,kBAAkB,MAAM,cAAc,IAAI,OAAO,OAAO;AAC9D,UAAM,QAAQ,YAAY,iBAAiB,QAAQ,KAAK;AACxD,UAAM,gBAAgB,MAAM,IAAI,cAAc;AAE9C,UAAM,eAAe,KAAK,MAAM,YAAY,IAAI,IAAI,KAAK;AACzD,UAAM,OACJ,QAAQ,WAAW,SACf,iBAAiB,OAAO,aAAa,IACrC;AAEN,WAAO;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,OAAO;AAAA,QACL,YAAY,gBAAgB,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,QACjD,cAAc,cAAc;AAAA,QAC5B;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;AAIA,eAAe,cACb,IACA,OACA,SAC2B;AAC3B,QAAM,UAA4B,CAAC;AACnC,QAAM,UAAU,QAAQ,WAAW,EAAE,UAAU,QAAQ,SAAS,IAAI;AACpE,QAAM,QAAQ,QAAQ,QAAQ;AAE9B,aAAW,YAAY,QAAQ,YAAY;AACzC,UAAM,SAAS,iBAAiB,QAAQ;AACxC,UAAM,gBAAgB,MAAM;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,QAAI,cAAc,SAAS,GAAG;AAC5B,cAAQ,KAAK,EAAE,UAAU,QAAQ,SAAS,cAAc,CAAC;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO;AACT;AAEA,eAAe,gBACb,IACA,UACA,OACA,OACA,SACyB;AACzB,UAAQ,UAAU;AAAA,IAChB,KAAK,UAAU;AACb,YAAM,WAAW,MAAM,aAAa;AACpC,aAAO,aAAa,IAAI,UAAU,OAAO,OAAO,OAAO;AAAA,IACzD;AAAA,IAEA,KAAK;AACH,aAAO,UAAU,IAAI,OAAO,OAAO,OAAO;AAAA,IAE5C,KAAK,OAAO;AACV,YAAM,UAAU,mBAAmB,KAAK;AACxC,UAAI,QAAQ,WAAW,EAAG,QAAO,CAAC;AAElC,YAAM,aAA6B,CAAC;AACpC,iBAAW,QAAQ,SAAS;AAC1B,cAAM,UAAU;AAAA,UACd;AAAA,UACA,EAAE,MAAM,UAAU,SAAS,SAAS;AAAA,UACpC;AAAA,QACF;AACA,mBAAW,KAAK,GAAG,OAAO;AAAA,MAC5B;AAGA,YAAM,OAAO,oBAAI,IAAY;AAC7B,aAAO,WAAW,OAAO,CAAC,MAAM;AAC9B,YAAI,KAAK,IAAI,EAAE,OAAO,EAAG,QAAO;AAChC,aAAK,IAAI,EAAE,OAAO;AAClB,eAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IAEA,KAAK,QAAQ;AACX,UAAI,CAAC,WAAW,KAAK,EAAG,QAAO,CAAC;AAChC,aAAO,WAAW,IAAI,OAAO,KAAK;AAAA,IACpC;AAAA,IAEA,KAAK;AACH,aAAO,CAAC;AAAA,EACZ;AACF;AAIA,IAAI,mBAAoC;AAExC,eAAe,eAAkC;AAC/C,MAAI,iBAAkB,QAAO;AAC7B,qBAAmB,MAAM,oBAAoB;AAC7C,SAAO;AACT;AAIO,SAAS,qBAAqBG,UAAwB;AAC3D,EAAAA,SACG,QAAQ,eAAe,EACvB,YAAY,4BAA4B,EACxC,OAAO,mBAAmB,eAAe,IAAI,EAC7C;AAAA,IACC;AAAA,IACA;AAAA,IACA;AAAA,EACF,EACC,OAAO,qBAAqB,oBAAoB,EAChD,OAAO,sBAAsB,4BAA4B,MAAM,EAC/D,OAAO,gBAAgB,oBAAoB,EAC3C,OAAO,OAAO,OAAe,SAAiC;AAC7D,UAAM,cAAc,QAAQ,IAAI;AAChC,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAC/E,UAAM,cAAc,KAAK,UAAU,KAAK,WACrC,MAAM,GAAG,EACT,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAEtB,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,aAAa,OAAO;AAAA,QAChD,OAAO,SAAS,KAAK,OAAO,KAAK,MAAM,EAAE;AAAA,QACzC;AAAA,QACA,UAAU,KAAK,UAAU;AAAA,QACzB,QAAS,KAAK,QAAQ,KAAK;AAAA,MAC7B,CAAC;AAED,UAAI,OAAO,MAAM;AACf,gBAAQ,IAAI,OAAO,IAAI;AAAA,MACzB,OAAO;AACL,gBAAQ,IAAI,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC7C;AAAA,IACF,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,eAAe,MAC1C,IAAI;AAAA,QACF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,QAC/C,UAAU;AAAA,QACV,eAAe,QAAQ,MAAM;AAAA,MAC/B;AACJ,cAAQ,WAAW,mBAAmB,SAAS,QAAQ,OAAO;AAAA,IAChE;AAAA,EACF,CAAC;AACL;;;AMtSA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;AC2CjB,IAAM,aACJ;AACF,IAAM,aAAa;AACnB,IAAM,gBAAgB;AAEtB,IAAM,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiB3B,IAAM,2BAA2B;AAI1B,SAAS,qBAAqB,QAA6B;AAChE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK,UAA0C;AACnD,YAAM,WAAW,SAAS,IAAI,CAAC,OAAO;AAAA,QACpC,MAAM,EAAE,SAAS,cAAc,UAAU;AAAA,QACzC,OAAO,CAAC,EAAE,MAAM,EAAE,QAAQ,CAAC;AAAA,MAC7B,EAAE;AAEF,YAAM,oBAAoB,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AAClE,YAAM,oBAAoB,SAAS;AAAA,QACjC,CAAC,GAAG,MAAM,SAAS,CAAC,EAAE,SAAS;AAAA,MACjC;AAEA,YAAM,OAAgC;AAAA,QACpC,UAAU;AAAA,QACV,kBAAkB;AAAA,UAChB,aAAa;AAAA,UACb,iBAAiB;AAAA,QACnB;AAAA,MACF;AAEA,UAAI,mBAAmB;AACrB,aAAK,mBAAmB,IAAI;AAAA,UAC1B,OAAO,CAAC,EAAE,MAAM,kBAAkB,QAAQ,CAAC;AAAA,QAC7C;AAAA,MACF;AAEA,YAAM,WAAW,MAAM,MAAM,GAAG,UAAU,QAAQ,MAAM,IAAI;AAAA,QAC1D,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,MAC3B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,cAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,SAAS,EAAE;AAAA,MACvE;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,aAAO,KAAK,WAAW,CAAC,EAAE,QAAQ,MAAM,CAAC,EAAE;AAAA,IAC7C;AAAA,EACF;AACF;AAIO,SAAS,qBAAqB,QAA6B;AAChE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK,UAA0C;AACnD,YAAM,gBAAgB,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AAC9D,YAAM,eAAe,SAAS,OAAO,CAAC,MAAM,EAAE,SAAS,QAAQ;AAC/D,YAAM,YAAY,aAAa,IAAI,CAAC,MAAM,EAAE,OAAO,EAAE,KAAK,MAAM;AAEhE,YAAM,OAAgC;AAAA,QACpC,OAAO;AAAA,QACP,OAAO;AAAA,QACP,mBAAmB;AAAA,QACnB,WAAW,EAAE,QAAQ,MAAM;AAAA,MAC7B;AAEA,UAAI,eAAe;AACjB,aAAK,cAAc,IAAI,cAAc;AAAA,MACvC;AAEA,YAAM,WAAW,MAAM,MAAM,YAAY;AAAA,QACvC,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,eAAe,UAAU,MAAM;AAAA,QACjC;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,MAC3B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,cAAM,IAAI,MAAM,qBAAqB,SAAS,MAAM,MAAM,SAAS,EAAE;AAAA,MACvE;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAElC,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AACF;AAIO,SAAS,wBAAwB,QAA6B;AACnE,SAAO;AAAA,IACL,MAAM;AAAA,IACN,MAAM,KAAK,UAA0C;AACnD,YAAM,gBAAgB,SAAS,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AAC9D,YAAM,oBAAoB,SACvB,OAAO,CAAC,MAAM,EAAE,SAAS,QAAQ,EACjC,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,EAAE;AAEpD,YAAM,OAAgC;AAAA,QACpC,OAAO;AAAA,QACP,YAAY;AAAA,QACZ,aAAa;AAAA,QACb,UAAU;AAAA,MACZ;AAEA,UAAI,eAAe;AACjB,aAAK,QAAQ,IAAI,cAAc;AAAA,MACjC;AAEA,YAAM,WAAW,MAAM,MAAM,eAAe;AAAA,QAC1C,QAAQ;AAAA,QACR,SAAS;AAAA,UACP,gBAAgB;AAAA,UAChB,aAAa;AAAA,UACb,qBAAqB;AAAA,QACvB;AAAA,QACA,MAAM,KAAK,UAAU,IAAI;AAAA,MAC3B,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,cAAM,IAAI;AAAA,UACR,wBAAwB,SAAS,MAAM,MAAM,SAAS;AAAA,QACxD;AAAA,MACF;AAEA,YAAM,OAAQ,MAAM,SAAS,KAAK;AAIlC,aAAO,KAAK,QAAQ,CAAC,EAAE;AAAA,IACzB;AAAA,EACF;AACF;AAIA,IAAM,mBAAmB,oBAAI,IAAY;AAAA,EACvC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAED,SAAS,kBAAkB,OAA2B;AACpD,QAAM,aAA6B;AAAA,IACjC,EAAE,UAAU,OAAO,OAAO,QAAQ,KAAK,QAAQ,2BAA2B;AAAA,IAC1E,EAAE,UAAU,OAAO,OAAO,QAAQ,KAAK,QAAQ,2BAA2B;AAAA,EAC5E;AAEA,SAAO;AAAA,IACL,gBAAgB,kBAAkB,KAAK;AAAA,IACvC;AAAA,EACF;AACF;AAEA,SAAS,gBAAgB,KAAa,OAA2B;AAE/D,QAAM,YAAY,IAAI,MAAM,aAAa;AACzC,MAAI,CAAC,UAAW,QAAO,kBAAkB,KAAK;AAE9C,QAAM,SAAS,KAAK,MAAM,UAAU,CAAC,CAAC;AAKtC,MACE,CAAC,OAAO,kBACR,CAAC,MAAM,QAAQ,OAAO,UAAU,KAChC,OAAO,WAAW,WAAW,GAC7B;AACA,WAAO,kBAAkB,KAAK;AAAA,EAChC;AAGA,QAAM,kBAAkB,OAAO,WAAW;AAAA,IAAO,CAAC,MAChD,iBAAiB,IAAI,EAAE,QAAQ;AAAA,EACjC;AAEA,MAAI,gBAAgB,WAAW,EAAG,QAAO,kBAAkB,KAAK;AAEhE,SAAO;AAAA,IACL,gBAAgB,OAAO;AAAA,IACvB,YAAY;AAAA,EACd;AACF;AAGA,eAAsB,WACpB,UACA,OACqB;AACrB,MAAI;AACF,UAAM,WAAW,MAAM,SAAS,KAAK;AAAA,MACnC,EAAE,MAAM,UAAU,SAAS,mBAAmB;AAAA,MAC9C,EAAE,MAAM,QAAQ,SAAS,MAAM;AAAA,IACjC,CAAC;AAED,WAAO,gBAAgB,UAAU,KAAK;AAAA,EACxC,QAAQ;AACN,WAAO,kBAAkB,KAAK;AAAA,EAChC;AACF;AAIA,SAAS,oBAAoB,SAAiC;AAC5D,SAAO,QACJ,MAAM,GAAG,EAAE,EACX;AAAA,IACC,CAAC,GAAG,MACF,GAAG,IAAI,CAAC,KAAK,EAAE,QAAQ,IAAI,EAAE,SAAS,IAAI,EAAE,OAAO,IAAI,EAAE,QAAQ,WAAW,KAAK,EAAE,IAAI,aAAa,EAAE,MAAM,QAAQ,CAAC,CAAC;AAAA,KAAS,EAAE,KAAK,MAAM,GAAG,GAAG,CAAC;AAAA,EACvJ,EACC,KAAK,MAAM;AAChB;AAEA,eAAsB,sBACpB,UACA,OACA,SACiB;AACjB,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,yBAAyB,KAAK;AAAA,EACvC;AAEA,QAAM,mBAAmB,oBAAoB,OAAO;AAEpD,QAAM,WAAW,MAAM,SAAS,KAAK;AAAA,IACnC,EAAE,MAAM,UAAU,SAAS,yBAAyB;AAAA,IACpD;AAAA,MACE,MAAM;AAAA,MACN,SAAS,WAAW,KAAK;AAAA;AAAA;AAAA,EAAyB,gBAAgB;AAAA,IACpE;AAAA,EACF,CAAC;AAED,SAAO;AACT;AAIA,SAASC,gBAAe,MAAsB;AAE5C,SAAO,KAAK,KAAK,KAAK,SAAS,CAAC;AAClC;AAGA,eAAsB,MACpB,UACA,OACA,OACA,gBACyB;AACzB,MAAI,cAAc;AAGlB,QAAM,OAAO,MAAM,WAAW,UAAU,KAAK;AAC7C,iBAAeA,gBAAe,qBAAqB,KAAK;AACxD,iBAAeA,gBAAe,KAAK,UAAU,IAAI,CAAC;AAGlD,QAAM,UAAU,MAAM,eAAe,KAAK,YAAY,KAAK;AAG3D,MAAI;AACJ,MAAI;AACF,kBAAc,MAAM,sBAAsB,UAAU,OAAO,OAAO;AAClE,mBAAeA,gBAAe,2BAA2B,KAAK;AAC9D,mBAAeA,gBAAe,WAAW;AAAA,EAC3C,QAAQ;AACN,kBAAc,QAAQ,SAAS,IAC3B,SAAS,QAAQ,MAAM,mBAAmB,KAAK,OAC/C,yBAAyB,KAAK;AAAA,EACpC;AAGA,QAAM,eAAgB,cAAc,MAAa;AAEjD,SAAO;AAAA,IACL,gBAAgB,KAAK;AAAA,IACrB,YAAY,KAAK;AAAA,IACjB;AAAA,IACA;AAAA,IACA,YAAY;AAAA,IACZ;AAAA,EACF;AACF;;;ADxSA,IAAMC,WAAU;AAChB,IAAMC,eAAc;AACpB,IAAMC,sBAAqB;AAE3B,IAAM,kBACJ;AAIF,IAAM,mBAA2C;AAAA,EAC/C,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,WAAW;AACb;AAEA,IAAM,qBAAmE;AAAA,EACvE,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,WAAW;AACb;AAEA,IAAM,kBAAkB,CAAC,UAAU,UAAU,WAAW;AAGjD,SAAS,eAAe,UAAuC;AACpE,MAAI,UAAU;AACZ,UAAM,SAAS,iBAAiB,QAAQ;AACxC,UAAM,SAAS,SAAS,QAAQ,IAAI,MAAM,IAAI;AAC9C,QAAI,CAAC,OAAQ,QAAO;AACpB,UAAM,UAAU,mBAAmB,QAAQ;AAC3C,WAAO,UAAU,QAAQ,MAAM,IAAI;AAAA,EACrC;AAEA,aAAW,QAAQ,iBAAiB;AAClC,UAAM,SAAS,iBAAiB,IAAI;AACpC,UAAM,SAAS,SAAS,QAAQ,IAAI,MAAM,IAAI;AAC9C,QAAI,QAAQ;AACV,YAAM,UAAU,mBAAmB,IAAI;AACvC,UAAI,QAAS,QAAO,QAAQ,MAAM;AAAA,IACpC;AAAA,EACF;AAEA,SAAO;AACT;AAIA,SAASC,iBAAgB,MAAsB;AAC7C,QAAM,UAAU,KAAK,QAAQ,OAAO,GAAG,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AACnE,MAAI,QAAQ,UAAUD,oBAAoB,QAAO;AACjD,SAAO,QAAQ,MAAM,GAAGA,mBAAkB,IAAI;AAChD;AAEA,SAASE,gBAAe,GAAkC;AACxD,SAAO;AAAA,IACL,MAAM,EAAE;AAAA,IACR,OAAO,CAAC,EAAE,WAAW,EAAE,OAAO;AAAA,IAC9B,MAAM,EAAE;AAAA,IACR,MAAM,EAAE;AAAA,IACR,OAAO,KAAK,MAAM,EAAE,QAAQ,GAAG,IAAI;AAAA,IACnC,SAASD,iBAAgB,EAAE,IAAI;AAAA,IAC/B,UAAU,EAAE;AAAA,EACd;AACF;AAEA,SAASE,kBAAiB,QAA2B;AACnD,QAAM,QAAkB,CAAC;AAEzB,MAAI,OAAO,UAAU;AACnB,UAAM,KAAK,eAAe;AAC1B,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,MAAI,OAAO,gBAAgB;AACzB,UAAM,KAAK,kBAAkB,OAAO,cAAc,EAAE;AACpD,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,MAAI,OAAO,QAAQ,WAAW,GAAG;AAC/B,UAAM,KAAK,yBAAyB,OAAO,KAAK,GAAG;AAAA,EACrD,OAAO;AACL,UAAM,KAAK,SAAS,OAAO,QAAQ,MAAM,wBAAwB;AACjE,UAAM,KAAK,EAAE;AAEb,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,QAAQ,KAAK;AAC9C,YAAM,IAAI,OAAO,QAAQ,CAAC;AAC1B,YAAM,YAAY,EAAE,OAAO,GAAG,EAAE,IAAI,KAAK,EAAE,IAAI,MAAM,IAAI,EAAE,IAAI;AAC/D,YAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,IAAI,IAAI,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC,YAAY,EAAE,KAAK,GAAG;AAChF,YAAM,KAAK,MAAM,SAAS,EAAE;AAC5B,YAAM,KAAK,MAAM,EAAE,OAAO,EAAE;AAC5B,YAAM,KAAK,EAAE;AAAA,IACf;AAAA,EACF;AAEA,MAAI,OAAO,aAAa;AACtB,UAAM,KAAK,cAAc;AACzB,UAAM,KAAK,OAAO,WAAW;AAC7B,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,QAAM,KAAK,wDAAW;AACtB,QAAM,OAAO,OAAO,MAAM,aAAa,QAAQ,CAAC;AAChD,QAAM;AAAA,IACJ,WAAW,OAAO,MAAM,WAAW,eAAe,CAAC,cAAc,IAAI,kBAAkB,OAAO,MAAM,WAAW,KAAK,IAAI,CAAC;AAAA,EAC3H;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAIA,SAAS,qBAAqB,IAAqC;AACjE,SAAO,OAAO,YAA4B,UAA2C;AACnF,UAAM,kBAAoC,CAAC;AAC3C,UAAM,aAAa,QAAQ;AAE3B,eAAW,QAAQ,YAAY;AAC7B,YAAM,UAAU,MAAMC,iBAAgB,IAAI,MAAM,UAAU;AAC1D,UAAI,QAAQ,SAAS,GAAG;AACtB,wBAAgB,KAAK;AAAA,UACnB,UAAU,KAAK;AAAA,UACf,QAAQ,KAAK;AAAA,UACb;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAEA,WAAO,YAAY,iBAAiB,KAAK;AAAA,EAC3C;AACF;AAEA,eAAeA,iBACb,IACA,MACA,OACyB;AACzB,UAAQ,KAAK,UAAU;AAAA,IACrB,KAAK,UAAU;AACb,YAAM,WAAW,MAAMC,cAAa;AACpC,aAAO,aAAa,IAAI,UAAU,KAAK,OAAO,KAAK;AAAA,IACrD;AAAA,IACA,KAAK;AACH,aAAO,UAAU,IAAI,KAAK,OAAO,KAAK;AAAA,IACxC,KAAK;AACH,aAAO,UAAU,IAAI,EAAE,MAAM,KAAK,MAAM,GAAG,KAAK;AAAA,IAClD,KAAK;AACH,aAAO,WAAW,IAAI,KAAK,OAAO,KAAK;AAAA,IACzC,KAAK;AACH,aAAO,CAAC;AAAA,EACZ;AACF;AAIA,IAAIC,oBAAoC;AAExC,eAAeD,gBAAkC;AAC/C,MAAIC,kBAAkB,QAAOA;AAC7B,EAAAA,oBAAmB,MAAM,oBAAoB;AAC7C,SAAOA;AACT;AAIA,eAAe,eACb,IACA,OACA,OACoB;AACpB,QAAM,WAAW,qBAAqB,EAAE;AACxC,QAAM,qBAAqC;AAAA,IACzC,EAAE,UAAU,OAAO,OAAO,QAAQ,KAAK,QAAQ,0BAA0B;AAAA,IACzE,EAAE,UAAU,OAAO,OAAO,QAAQ,KAAK,QAAQ,6BAA6B;AAAA,EAC9E;AAEA,QAAM,UAAU,MAAM,SAAS,oBAAoB,KAAK;AAExD,SAAO;AAAA,IACL;AAAA,IACA,gBAAgB;AAAA,IAChB,SAAS,QAAQ,IAAIJ,eAAc;AAAA,IACnC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY,mBAAmB,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,MACpD,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,cAAc,QAAQ;AAAA,IACxB;AAAA,IACA,UAAU;AAAA,EACZ;AACF;AAKA,eAAsB,OACpB,aACA,OACA,SACoB;AACpB,QAAM,eAAeK,MAAK,QAAQ,WAAW;AAC7C,QAAM,SAASA,MAAK,KAAK,cAAcT,UAASC,YAAW;AAE3D,MAAI,CAACS,IAAG,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,mDAAmDV,QAAO,IAAIC,YAAW;AAAA,MACzE,UAAU;AAAA,IACZ;AAAA,EACF;AAEA,QAAM,KAAK,eAAe,MAAM;AAEhC,MAAI;AACF,UAAM,WAAW,QAAQ,YAAY;AAErC,QAAI,CAAC,UAAU;AACb,YAAM,SAAS,MAAM,eAAe,IAAI,OAAO,QAAQ,KAAK;AAC5D,UAAI,QAAQ,WAAW,QAAQ;AAC7B,eAAO,OAAOI,kBAAiB,MAAM;AAAA,MACvC;AACA,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,qBAAqB,EAAE;AAExC,QAAI,QAAQ,WAAW;AACrB,aAAO,MAAM,aAAa,UAAU,OAAO,SAAS,QAAQ;AAAA,IAC9D;AAEA,WAAO,MAAM,gBAAgB,UAAU,OAAO,SAAS,QAAQ;AAAA,EACjE,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;AAEA,eAAe,aACb,UACA,OACA,SACA,UACoB;AACpB,QAAM,OAAO,MAAM,WAAW,UAAU,KAAK;AAC7C,QAAM,UAAU,MAAM,SAAS,KAAK,YAAY,QAAQ,KAAK;AAE7D,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA,gBAAgB,KAAK;AAAA,IACrB,SAAS,QAAQ,IAAID,eAAc;AAAA,IACnC,aAAa;AAAA,IACb,OAAO;AAAA,MACL,YAAY,KAAK,WAAW,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,MACjD,YAAY;AAAA,MACZ,cAAc;AAAA,MACd,cAAc,QAAQ;AAAA,IACxB;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,QAAQ;AAC7B,WAAO,OAAOC,kBAAiB,MAAM;AAAA,EACvC;AAEA,SAAO;AACT;AAEA,eAAe,gBACb,UACA,OACA,SACA,UACoB;AACpB,QAAM,SAAS,MAAM,MAAM,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnE,QAAM,SAAoB;AAAA,IACxB;AAAA,IACA,gBAAgB,OAAO;AAAA,IACvB,SAAS,OAAO,QAAQ,IAAID,eAAc;AAAA,IAC1C,aAAa,OAAO;AAAA,IACpB,OAAO;AAAA,MACL,YAAY,OAAO,WAAW,IAAI,CAAC,MAAM,EAAE,QAAQ;AAAA,MACnD,YAAY,OAAO;AAAA,MACnB,cAAc,OAAO;AAAA,MACrB,cAAc,OAAO,QAAQ;AAAA,IAC/B;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,QAAQ;AAC7B,WAAO,OAAOC,kBAAiB,MAAM;AAAA,EACvC;AAEA,SAAO;AACT;AAIO,SAAS,mBAAmBM,UAAwB;AACzD,EAAAA,SACG,QAAQ,aAAa,EACrB,YAAY,0CAA0C,EACtD,OAAO,mBAAmB,eAAe,IAAI,EAC7C,OAAO,yBAAyB,uCAAuC,EACvE,OAAO,sBAAsB,4BAA4B,MAAM,EAC/D,OAAO,gBAAgB,8CAA8C,EACrE,OAAO,OAAO,OAAe,SAA2C;AACvE,UAAM,cAAc,QAAQ,IAAI;AAChC,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAC/E,UAAM,eAAe,KAAK,UAAU;AACpC,UAAM,WAAW,eAAe,YAAY;AAE5C,QAAI;AACF,YAAM,SAAS,MAAM,OAAO,aAAa,OAAO;AAAA,QAC9C,OAAO,SAAS,OAAO,KAAK,OAAO,KAAK,IAAI,GAAG,EAAE;AAAA,QACjD,QAAS,KAAK,QAAQ,KAAK;AAAA,QAC3B,UAAU,YAAY;AAAA,QACtB,WAAW,KAAK,SAAS,MAAM;AAAA,MACjC,CAAC;AAED,UAAI,OAAO,MAAM;AACf,gBAAQ,IAAI,OAAO,IAAI;AAAA,MACzB,OAAO;AACL,gBAAQ,IAAI,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,MAC7C;AAAA,IACF,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,eAAe,MAC1C,IAAI;AAAA,QACF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,QAC/C,UAAU;AAAA,QACV,eAAe,QAAQ,MAAM;AAAA,MAC/B;AACJ,cAAQ,WAAW,mBAAmB,SAAS,QAAQ,OAAO;AAAA,IAChE;AAAA,EACF,CAAC;AACL;;;AE1YO,SAAS,oBAAoBC,UAAwB;AAC1D,EAAAA,SACG,QAAQ,cAAc,EACtB,YAAY,8BAA8B,EAC1C,OAAO,UAAU,+BAA+B,EAChD,OAAO,UAAU,8BAA8B,EAC/C,OAAO,YAAY,2CAA2C,EAC9D,OAAO,mBAAmB,eAAe,GAAG,EAC5C,OAAO,qBAAqB,oBAAoB,EAChD,OAAO,CAAC,QAAgB,aAAsC;AAC7D,YAAQ,IAAI,qCAAgC;AAAA,EAC9C,CAAC;AACL;;;ACZO,SAAS,sBAAsBC,UAAwB;AAC5D,EAAAA,SACG,QAAQ,QAAQ,EAChB,YAAY,uCAAuC,EACnD,OAAO,MAAM;AACZ,YAAQ,IAAI,uCAAkC;AAAA,EAChD,CAAC;AACL;;;ACRA,OAAOC,SAAQ;AACf,OAAOC,WAAU;;;ACFjB,SAAS,aAA6B;AACtC,OAAOC,WAAU;AA+BjB,IAAM,sBAAsB;AAE5B,IAAM,sBAAsB,oBAAI,IAAI;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAED,IAAM,qBAAqB,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAI5D,SAAS,cAAc,UAA2B;AAChD,QAAM,MAAMC,MAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,SAAO,mBAAmB,IAAI,GAAG;AACnC;AAGO,SAAS,cACd,SACA,QACe;AACf,QAAM,aAAa,QAAQ,cAAc;AACzC,QAAM,cAAcA,MAAK,QAAQ,QAAQ,WAAW;AAEpD,QAAM,eAAe,IAAI,IAAI,QAAQ,WAAW,CAAC,CAAC;AAElD,WAAS,UAAU,UAA2B;AAC5C,UAAM,WAAW,SAAS,MAAMA,MAAK,GAAG;AACxC,eAAW,OAAO,UAAU;AAC1B,UAAI,oBAAoB,IAAI,GAAG,EAAG,QAAO;AACzC,UAAI,aAAa,IAAI,GAAG,EAAG,QAAO;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAEA,MAAI,UAA4B;AAChC,MAAI,iBAAiB,oBAAI,IAAwB;AACjD,MAAI,gBAAsD;AAE1D,WAAS,QAAc;AACrB,QAAI,eAAe,SAAS,EAAG;AAE/B,UAAM,QAAQ,CAAC,GAAG,eAAe,OAAO,CAAC;AACzC,qBAAiB,oBAAI,IAAI;AAEzB,WAAO,SAAS,KAAK;AAAA,EACvB;AAEA,WAAS,gBAAsB;AAC7B,QAAI,cAAe,cAAa,aAAa;AAC7C,oBAAgB,WAAW,OAAO,UAAU;AAAA,EAC9C;AAEA,WAAS,YAAY,MAAmC,UAAwB;AAC9E,QAAI,CAAC,cAAc,QAAQ,EAAG;AAG9B,mBAAe,IAAI,UAAU,EAAE,MAAM,MAAM,SAAS,CAAC;AACrD,kBAAc;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,QAAuB;AACrB,aAAO,IAAI,QAAc,CAAC,YAAY;AACpC,kBAAU,MAAM,KAAK;AAAA,UACnB,KAAK;AAAA,UACL,SAAS,CAAC,OAAe,UAAU,EAAE;AAAA,UACrC,eAAe;AAAA,UACf,YAAY;AAAA,QACd,CAAC;AAED,gBAAQ,GAAG,OAAO,CAAC,OAAO,YAAY,OAAO,EAAE,CAAC;AAChD,gBAAQ,GAAG,UAAU,CAAC,OAAO,YAAY,UAAU,EAAE,CAAC;AACtD,gBAAQ,GAAG,UAAU,CAAC,OAAO,YAAY,UAAU,EAAE,CAAC;AACtD,gBAAQ,GAAG,SAAS,CAAC,QAAiB;AACpC,iBAAO,QAAQ,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC,CAAC;AAAA,QACpE,CAAC;AACD,gBAAQ,GAAG,SAAS,MAAM,QAAQ,CAAC;AAAA,MACrC,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,OAAsB;AAC1B,UAAI,eAAe;AACjB,qBAAa,aAAa;AAC1B,wBAAgB;AAAA,MAClB;AACA,qBAAe,MAAM;AAErB,UAAI,SAAS;AACX,cAAM,QAAQ,MAAM;AACpB,kBAAU;AAAA,MACZ;AAAA,IACF;AAAA,EACF;AACF;;;AD/FA,IAAMC,WAAU;AAChB,IAAMC,eAAc;AAIpB,SAAS,YAAoB;AAC3B,UAAO,oBAAI,KAAK,GAAE,mBAAmB,SAAS,EAAE,QAAQ,MAAM,CAAC;AACjE;AAEA,SAAS,eAAe,UAAiC;AACvD,QAAM,MAAMC,MAAK,QAAQ,QAAQ,EAAE,YAAY;AAC/C,SAAO,aAAa,GAAG,KAAK;AAC9B;AAEA,SAASC,gBAAe,IAAoB;AAC1C,MAAI,KAAK,IAAM,QAAO,GAAG,KAAK,MAAM,EAAE,CAAC;AACvC,SAAO,IAAI,KAAK,KAAM,QAAQ,CAAC,CAAC;AAClC;AAEA,eAAe,SAAS,cAAuC;AAC7D,QAAM,EAAE,YAAAC,YAAW,IAAI,MAAM,OAAO,QAAa;AACjD,QAAM,UAAUC,IAAG,aAAa,YAAY;AAC5C,SAAOD,YAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC1D;AAUA,eAAe,eACb,IACA,SACA,aACA,SACwB;AACxB,QAAM,QAAQ,YAAY,IAAI;AAC9B,QAAM,MAAM,QAAQ;AAEpB,MAAI,iBAAiB;AACrB,MAAI,gBAAgB;AAEpB,QAAM,oBAA6D,CAAC;AAEpE,aAAW,UAAU,SAAS;AAC5B,UAAM,eAAeF,MAAK,KAAK,aAAa,OAAO,IAAI;AACvD,UAAM,WAAW,eAAe,OAAO,IAAI;AAE3C,QAAI,OAAO,SAAS,UAAU;AAC5B,UAAI,IAAI,UAAU,CAAC,cAAc,OAAO,IAAI,EAAE;AAC9C,YAAMI,gBAAe,GAAG,QAAQ,OAAO,IAAI;AAC3C,UAAIA,eAAc;AAChB,WAAG,WAAW,OAAO,IAAI;AAAA,MAC3B;AACA;AACA;AAAA,IACF;AAEA,QAAI,CAAC,SAAU;AACf,QAAI,CAACD,IAAG,WAAW,YAAY,EAAG;AAElC,UAAM,QAAQ,OAAO,SAAS,QAAQ,UAAU;AAChD,QAAI,IAAI,UAAU,CAAC,KAAK,KAAK,KAAK,OAAO,IAAI,EAAE;AAG/C,UAAM,eAAe,GAAG,QAAQ,OAAO,IAAI;AAC3C,QAAI,cAAc;AAChB,SAAG,mBAAmB,aAAa,EAAE;AAAA,IACvC;AAGA,QAAI;AACJ,QAAI;AACF,cAAQ,MAAM,UAAU,cAAc,QAAQ;AAAA,IAChD,QAAQ;AACN,UAAI,IAAI,UAAU,CAAC,qBAAgB,OAAO,IAAI,gBAAgB;AAC9D;AAAA,IACF;AAGA,UAAM,SAAS,UAAU,OAAO,OAAO,IAAI;AAG3C,UAAM,OAAO,MAAM,SAAS,YAAY;AACxC,UAAM,OAAOA,IAAG,SAAS,YAAY,EAAE;AAGvC,UAAM,SAAS,GAAG,WAAW;AAAA,MAC3B,MAAM,OAAO;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAGD,UAAM,WAAW,GAAG;AAAA,MAClB;AAAA,MACA,OAAO,IAAI,CAAC,OAAO;AAAA,QACjB,WAAW,EAAE;AAAA,QACb,SAAS,EAAE;AAAA,QACX,MAAM,EAAE;AAAA,QACR,MAAM,EAAE;AAAA,QACR,QAAQ,EAAE;AAAA,QACV,MAAM,EAAE;AAAA,QACR,SAAS,EAAE;AAAA,QACX,SAAS,EAAE;AAAA,QACX,MAAM,EAAE;AAAA,MACV,EAAE;AAAA,IACJ;AAEA,aAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,wBAAkB,KAAK;AAAA,QACrB,aAAa,OAAO;AAAA,QACpB,OAAO,EAAE,GAAG,OAAO,CAAC,GAAG,IAAI,OAAO,SAAS,CAAC,CAAC,EAAE;AAAA,MACjD,CAAC;AAAA,IACH;AAEA,qBAAiB,OAAO;AACxB;AAAA,EACF;AAGA,MAAI,CAAC,QAAQ,iBAAiB,kBAAkB,SAAS,GAAG;AAC1D,UAAM,WAAW,MAAME,cAAa;AAEpC,UAAM,QAAQ,kBAAkB;AAAA,MAAI,CAAC,OACnC,iBAAiB,GAAG,aAAa,GAAG,MAAM,QAAQ,GAAG,MAAM,IAAI;AAAA,IACjE;AAEA,UAAM,UAAU,MAAM,SAAS,MAAM,KAAK;AAE1C,OAAG,YAAY,MAAM;AACnB,eAAS,IAAI,GAAG,IAAI,kBAAkB,QAAQ,KAAK;AACjD,cAAM,YAAY,SAAS,kBAAkB,CAAC,EAAE,MAAM,IAAI,EAAE;AAC5D,WAAG,aAAa,WAAW,QAAQ,CAAC,CAAC;AAAA,MACvC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,aAAa,YAAY,IAAI,IAAI;AACvC,SAAO,EAAE,gBAAgB,eAAe,WAAW;AACrD;AAIA,IAAIC,oBAAoC;AAExC,eAAeD,gBAAkC;AAC/C,MAAIC,kBAAkB,QAAOA;AAC7B,EAAAA,oBAAmB,MAAM,oBAAoB;AAC7C,SAAOA;AACT;AAKA,eAAsB,SACpB,aACA,UAAwB,CAAC,GACH;AACtB,QAAM,eAAeN,MAAK,QAAQ,WAAW;AAC7C,QAAM,SAASA,MAAK,KAAK,cAAcF,UAASC,YAAW;AAC3D,QAAM,MAAM,QAAQ,OAAO,QAAQ;AAGnC,MAAI,QAAQ,MAAM;AAChB,UAAM,QAAQ,cAAc,EAAE,KAAK,eAAe,QAAQ,cAAc,CAAC;AAAA,EAC3E;AAGA,MAAI,CAACI,IAAG,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,sEAAsEL,QAAO,IAAIC,YAAW;AAAA,MAC5F,UAAU;AAAA,IACZ;AAAA,EACF;AAGA,QAAM,WAAW;AAGjB,QAAM,KAAK,eAAe,MAAM;AAGhC,MAAI,gBAAsC;AAE1C,QAAM,UAAU;AAAA,IACd;AAAA,MACE,aAAa;AAAA,MACb;AAAA,MACA,YAAY,QAAQ;AAAA,IACtB;AAAA,IACA;AAAA,MACE,UAAU,CAAC,YAA0B;AACnC,cAAM,YAAY;AAChB,cAAI;AACF,kBAAM,SAAS,MAAM,eAAe,IAAI,SAAS,cAAc;AAAA,cAC7D,eAAe,QAAQ;AAAA,cACvB;AAAA,YACF,CAAC;AAED,gBAAI,OAAO,iBAAiB,GAAG;AAC7B;AAAA,gBACE,IAAI,UAAU,CAAC,iBAAiB,OAAO,cAAc,aAAa,OAAO,aAAa,oBAAoBE,gBAAe,OAAO,UAAU,CAAC;AAAA,cAC7I;AAAA,YACF;AAAA,UACF,SAAS,KAAK;AACZ;AAAA,cACE,IAAI,UAAU,CAAC,YAAY,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,YAC7E;AAAA,UACF;AAAA,QACF,GAAG;AAAA,MACL;AAAA,MACA,SAAS,CAAC,QAAQ;AAChB,YAAI,IAAI,UAAU,CAAC,oBAAoB,IAAI,OAAO,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AAGA,QAAM,QAAQ,MAAM;AACpB,kBAAgB;AAEhB,MAAI,YAAY,YAAY,iBAAiB;AAE7C,SAAO;AAAA,IACL,MAAM,OAAsB;AAC1B,UAAI,eAAe;AACjB,cAAM,cAAc,KAAK;AACzB,wBAAgB;AAAA,MAClB;AACA,SAAG,MAAM;AACT,UAAI,mCAAmC;AAAA,IACzC;AAAA,EACF;AACF;AAIO,SAAS,qBAAqBM,UAAwB;AAC3D,EAAAA,SACG,QAAQ,cAAc,EACtB,YAAY,4CAAuC,EACnD,OAAO,UAAU,gCAAgC,EACjD,OAAO,mBAAmB,2BAA2B,KAAK,EAC1D,OAAO,WAAW,wCAAwC,EAC1D,OAAO,OAAO,WAA+B,SAA2C;AACvF,UAAM,cAAc,aAAa,QAAQ,IAAI;AAC7C,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAC/E,UAAM,gBAAgB,KAAK,OAAO,MAAM;AAExC,QAAI;AACF,YAAM,SAAS,MAAM,SAAS,aAAa;AAAA,QACzC,MAAM,KAAK,MAAM,MAAM;AAAA,QACvB,YAAY,SAAS,OAAO,KAAK,UAAU,KAAK,KAAK,GAAG,EAAE;AAAA,QAC1D;AAAA,MACF,CAAC;AAGD,YAAM,WAAW,MAAM;AACrB,aAAK,OAAO,KAAK,EAAE,KAAK,MAAM,QAAQ,KAAK,CAAC,CAAC;AAAA,MAC/C;AACA,cAAQ,GAAG,UAAU,QAAQ;AAC7B,cAAQ,GAAG,WAAW,QAAQ;AAAA,IAChC,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,eAAe,MAC1C,IAAI;AAAA,QACF,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,QAC/C,UAAU;AAAA,QACV,eAAe,QAAQ,MAAM;AAAA,MAC/B;AACJ,cAAQ,WAAW,mBAAmB,SAAS,QAAQ,OAAO;AAAA,IAChE;AAAA,EACF,CAAC;AACL;;;AExTA,OAAOC,SAAQ;AACf,OAAOC,WAAU;AA2BjB,IAAMC,WAAU;AAChB,IAAMC,eAAc;AACpB,IAAMC,mBAAkB;AAIxB,SAASC,aAAY,OAAuB;AAC1C,MAAI,QAAQ,KAAM,QAAO,GAAG,KAAK;AACjC,MAAI,QAAQ,OAAO,KAAM,QAAO,IAAI,QAAQ,MAAM,QAAQ,CAAC,CAAC;AAC5D,SAAO,IAAI,SAAS,OAAO,OAAO,QAAQ,CAAC,CAAC;AAC9C;AAEA,SAAS,gBAAgB,KAAqB;AAC5C,QAAM,MAAM,OAAO,GAAG;AACtB,MAAI,OAAO,MAAM,GAAG,EAAG,QAAO;AAC9B,QAAM,OAAO,IAAI,KAAK,GAAG;AACzB,SAAO,KAAK,YAAY,EAAE,QAAQ,KAAK,GAAG,EAAE,QAAQ,WAAW,EAAE;AACnE;AAEA,SAAS,WAAW,GAAmB;AACrC,SAAO,EAAE,OAAO,CAAC,EAAE,YAAY,IAAI,EAAE,MAAM,CAAC;AAC9C;AAEA,SAAS,WAAW,QAAsC;AACxD,QAAMC,cAAaC,MAAK,KAAK,QAAQH,gBAAe;AACpD,MAAI,CAACI,IAAG,WAAWF,WAAU,EAAG,QAAO;AAEvC,MAAI;AACF,UAAM,MAAME,IAAG,aAAaF,aAAY,OAAO;AAC/C,UAAM,SAAS,KAAK,MAAM,GAAG;AAC7B,WAAO;AAAA,MACL,OAAO,OAAO,SAAS;AAAA,MACvB,YAAY,OAAO,cAAc;AAAA,IACnC;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAIA,SAAS,qBAAqB,aAA6B;AACzD,SAAO;AAAA,IACL,yBAAoB,WAAW;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,EACF,EAAE,KAAK,IAAI;AACb;AAEA,SAAS,aAAa,aAAqB,QAA8B;AACvE,QAAM,QAAkB;AAAA,IACtB,yBAAoB,WAAW;AAAA,IAC/B;AAAA,IACA;AAAA,IACA,mBAAmBJ,QAAO,IAAIC,YAAW,KAAKE,aAAY,OAAO,WAAW,CAAC;AAAA,EAC/E;AAEA,MAAI,OAAO,aAAa;AACtB,UAAM,KAAK,mBAAmB,gBAAgB,OAAO,WAAW,CAAC,EAAE;AAAA,EACrE;AAEA,QAAM,KAAK,EAAE;AACb,QAAM,KAAK,eAAe,OAAO,UAAU,eAAe,CAAC,EAAE;AAC7D,QAAM,KAAK,eAAe,OAAO,WAAW,eAAe,CAAC,EAAE;AAC9D,QAAM,KAAK,eAAe,OAAO,YAAY,eAAe,CAAC,EAAE;AAE/D,MAAI,OAAO,UAAU,OAAO,GAAG;AAC7B,UAAM,KAAK,EAAE;AACb,UAAM,KAAK,cAAc;AAEzB,UAAM,aAAa,KAAK;AAAA,MACtB,GAAG,CAAC,GAAG,OAAO,UAAU,KAAK,CAAC,EAAE,IAAI,CAAC,MAAM,WAAW,CAAC,EAAE,MAAM;AAAA,IACjE;AAEA,eAAW,CAAC,MAAM,KAAK,KAAK,OAAO,WAAW;AAC5C,YAAM,QAAQ,WAAW,IAAI,EAAE,OAAO,aAAa,CAAC;AACpD,YAAM,KAAK,OAAO,KAAK,GAAG,KAAK,QAAQ,UAAU,IAAI,MAAM,EAAE,EAAE;AAAA,IACjE;AAAA,EACF;AAEA,MAAI,OAAO,QAAQ;AACjB,UAAM,KAAK,EAAE;AACb,UAAM;AAAA,MACJ,sBAAsB,OAAO,OAAO,KAAK,KAAK,OAAO,OAAO,UAAU;AAAA,IACxE;AAAA,EACF;AAEA,QAAM,KAAK,EAAE;AACb,SAAO,MAAM,KAAK,IAAI;AACxB;AAKA,eAAsB,UAAU,aAA4C;AAC1E,QAAM,eAAeE,MAAK,QAAQ,WAAW;AAC7C,QAAM,SAASA,MAAK,KAAK,cAAcL,QAAO;AAC9C,QAAM,SAASK,MAAK,KAAK,QAAQJ,YAAW;AAE5C,MAAI,CAACK,IAAG,WAAW,MAAM,GAAG;AAC1B,UAAM,SAAuB;AAAA,MAC3B,aAAa;AAAA,MACb,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,aAAa;AAAA,MACb,aAAa;AAAA,MACb,aAAa;AAAA,MACb,WAAW,oBAAI,IAAI;AAAA,MACnB,QAAQ;AAAA,MACR,MAAM,qBAAqB,YAAY;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAEA,QAAM,KAAK,eAAe,MAAM;AAEhC,MAAI;AACF,UAAM,YAAY,GAAG,aAAa;AAClC,UAAM,aAAa,GAAG,cAAc;AACpC,UAAM,cAAc,GAAG,eAAe;AACtC,UAAM,YAAY,GAAG,qBAAqB;AAC1C,UAAM,cAAc,GAAG,eAAe;AACtC,UAAM,SAAS,WAAW,MAAM;AAChC,UAAM,cAAcA,IAAG,SAAS,MAAM,EAAE;AAExC,UAAM,SAAuB;AAAA,MAC3B,aAAa;AAAA,MACb;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,MAAM;AAAA,IACR;AAEA,WAAO,OAAO,aAAa,cAAc,MAAM;AAC/C,WAAO;AAAA,EACT,UAAE;AACA,OAAG,MAAM;AAAA,EACX;AACF;AAIO,SAAS,sBAAsBC,UAAwB;AAC5D,EAAAA,SACG,QAAQ,eAAe,EACvB,YAAY,uBAAuB,EACnC,OAAO,OAAO,cAAuB;AACpC,UAAM,cAAc,aAAa,QAAQ,IAAI;AAC7C,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAE/E,QAAI;AACF,YAAM,SAAS,MAAM,UAAU,WAAW;AAC1C,cAAQ,IAAI,OAAO,IAAI;AAAA,IACzB,SAAS,KAAK;AACZ,cAAQ,WAAW,mBAAmB,KAAK,QAAQ,OAAO;AAAA,IAC5D;AAAA,EACF,CAAC;AACL;;;AC9LO,SAAS,uBAAuBC,UAAwB;AAC7D,EAAAA,SACG,QAAQ,SAAS,EACjB,YAAY,0BAA0B,EACtC,OAAO,iBAAiB,+CAA+C,EACvE,OAAO,CAAC,aAAsC;AAC7C,YAAQ,IAAI,wCAAmC;AAAA,EACjD,CAAC;AACL;;;ACRO,SAAS,oBAAoBC,UAAwB;AAC1D,EAAAA,SACG,QAAQ,aAAa,EACrB,YAAY,kCAAkC,EAC9C,OAAO,CAAC,UAAkB;AACzB,YAAQ,IAAI,qCAAgC;AAAA,EAC9C,CAAC;AACL;;;ACPO,SAAS,qBAAqBC,UAAwB;AAC3D,EAAAA,SACG,QAAQ,kBAAkB,EAC1B,YAAY,gDAAgD,EAC5D,OAAO,CAAC,cAAsB;AAC7B,YAAQ,IAAI,sCAAiC;AAAA,EAC/C,CAAC;AACL;;;ACRA,OAAOC,UAAQ;AACf,OAAOC,YAAU;AAoCjB,IAAMC,WAAU;AAChB,IAAMC,mBAAkB;AAGjB,IAAM,iBAAgC;AAAA,EAC3C,UAAU;AAAA,IACR,UAAU;AAAA,IACV,OAAO;AAAA,IACP,YAAY;AAAA,EACd;AAAA,EACA,QAAQ;AAAA,IACN,cAAc;AAAA,IACd,YAAY,CAAC,UAAU,OAAO,OAAO,MAAM;AAAA,IAC3C,SAAS,EAAE,QAAQ,GAAK,KAAK,KAAK,KAAK,KAAK,MAAM,KAAK,YAAY,IAAI;AAAA,EACzE;AAAA,EACA,OAAO;AAAA,IACL,YAAY;AAAA,IACZ,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,KAAK;AAAA,IACH,UAAU;AAAA,IACV,OAAO;AAAA,EACT;AACF;AAIA,IAAM,2BAA2B,oBAAI,IAAI,CAAC,SAAS,UAAU,QAAQ,CAAC;AACtE,IAAM,sBAAsB,oBAAI,IAAI,CAAC,UAAU,UAAU,WAAW,CAAC;AAOrE,IAAM,mBAAmD;AAAA,EACvD,qBAAqB;AAAA,IACnB,UAAU,CAAC,MAAM,OAAO,MAAM,YAAY,yBAAyB,IAAI,CAAC;AAAA,IACxE,SAAS,mBAAmB,CAAC,GAAG,wBAAwB,EAAE,KAAK,IAAI,CAAC;AAAA,EACtE;AAAA,EACA,uBAAuB;AAAA,IACrB,UAAU,CAAC,MAAM,OAAO,MAAM,YAAY,IAAI,KAAK,OAAO,UAAU,CAAC;AAAA,IACrE,SAAS;AAAA,EACX;AAAA,EACA,uBAAuB;AAAA,IACrB,UAAU,CAAC,MAAM,OAAO,MAAM,YAAY,IAAI,KAAK,OAAO,UAAU,CAAC;AAAA,IACrE,SAAS;AAAA,EACX;AAAA,EACA,oBAAoB;AAAA,IAClB,UAAU,CAAC,MAAM,OAAO,MAAM,YAAY,KAAK,KAAK,OAAO,UAAU,CAAC;AAAA,IACtE,SAAS;AAAA,EACX;AAAA,EACA,gBAAgB;AAAA,IACd,UAAU,CAAC,MAAM,MAAM,QAAS,OAAO,MAAM,YAAY,oBAAoB,IAAI,CAAC;AAAA,IAClF,SAAS,2BAA2B,CAAC,GAAG,mBAAmB,EAAE,KAAK,IAAI,CAAC;AAAA,EACzE;AACF;AAIA,SAAS,cAAc,aAA6B;AAClD,QAAM,eAAeC,OAAK,QAAQ,WAAW;AAC7C,QAAM,SAASA,OAAK,KAAK,cAAcF,QAAO;AAE9C,MAAI,CAACG,KAAG,WAAW,MAAM,GAAG;AAC1B,UAAM,IAAI;AAAA,MACR,mDAAmDH,QAAO;AAAA,MAC1D,UAAU;AAAA,IACZ;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,WAAW,QAAwB;AAC1C,SAAOE,OAAK,KAAK,QAAQD,gBAAe;AAC1C;AAEA,SAASG,YAAW,QAA+B;AACjD,QAAM,WAAW,WAAW,MAAM;AAElC,MAAI,CAACD,KAAG,WAAW,QAAQ,GAAG;AAE5B,gBAAY,QAAQ,cAAc;AAClC,WAAO,gBAAgB,cAAc;AAAA,EACvC;AAEA,QAAM,MAAMA,KAAG,aAAa,UAAU,OAAO;AAC7C,QAAM,SAAS,KAAK,MAAM,GAAG;AAG7B,SAAO,kBAAkB,MAAM;AACjC;AAEA,SAAS,YAAY,QAAgB,QAA6B;AAChE,EAAAA,KAAG;AAAA,IACD,WAAW,MAAM;AAAA,IACjB,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI;AAAA,EACpC;AACF;AAEA,SAAS,kBAAkB,SAAgD;AACzE,SAAO;AAAA,IACL,UAAU,EAAE,GAAG,eAAe,UAAU,GAAG,QAAQ,SAAS;AAAA,IAC5D,QAAQ;AAAA,MACN,GAAG,eAAe;AAAA,MAClB,GAAG,QAAQ;AAAA,MACX,SAAS,EAAE,GAAG,eAAe,OAAO,SAAS,GAAG,QAAQ,QAAQ,QAAQ;AAAA,IAC1E;AAAA,IACA,OAAO,EAAE,GAAG,eAAe,OAAO,GAAG,QAAQ,MAAM;AAAA,IACnD,KAAK,EAAE,GAAG,eAAe,KAAK,GAAG,QAAQ,IAAI;AAAA,EAC/C;AACF;AAEA,SAAS,eAAe,KAA8B,KAAsB;AAC1E,QAAM,QAAQ,IAAI,MAAM,GAAG;AAC3B,MAAI,UAAmB;AAEvB,aAAW,QAAQ,OAAO;AACxB,QAAI,YAAY,QAAQ,YAAY,UAAa,OAAO,YAAY,UAAU;AAC5E,aAAO;AAAA,IACT;AACA,cAAW,QAAoC,IAAI;AAAA,EACrD;AAEA,SAAO;AACT;AAEA,SAAS,eAAe,KAA8B,KAAa,OAAsB;AACvF,QAAM,QAAQ,IAAI,MAAM,GAAG;AAC3B,MAAI,UAAmC;AAEvC,WAAS,IAAI,GAAG,IAAI,MAAM,SAAS,GAAG,KAAK;AACzC,UAAM,OAAO,MAAM,CAAC;AACpB,QAAI,OAAO,QAAQ,IAAI,MAAM,YAAY,QAAQ,IAAI,MAAM,MAAM;AAC/D,cAAQ,IAAI,IAAI,CAAC;AAAA,IACnB;AACA,cAAU,QAAQ,IAAI;AAAA,EACxB;AAEA,UAAQ,MAAM,MAAM,SAAS,CAAC,CAAC,IAAI;AACrC;AAEA,SAAS,WAAW,UAA2B;AAE7C,MAAI,aAAa,OAAQ,QAAO;AAGhC,MAAI,aAAa,OAAQ,QAAO;AAChC,MAAI,aAAa,QAAS,QAAO;AAGjC,QAAM,MAAM,OAAO,QAAQ;AAC3B,MAAI,CAAC,OAAO,MAAM,GAAG,KAAK,SAAS,KAAK,MAAM,GAAI,QAAO;AAGzD,MAAI,SAAS,WAAW,GAAG,KAAK,SAAS,WAAW,GAAG,GAAG;AACxD,QAAI;AACF,aAAO,KAAK,MAAM,QAAQ;AAAA,IAC5B,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,SAAO;AACT;AAKO,SAAS,cAAc,aAAuC;AACnE,QAAM,SAAS,cAAc,WAAW;AACxC,QAAM,SAASC,YAAW,MAAM;AAEhC,SAAO;AAAA,IACL;AAAA,IACA,MAAM,KAAK,UAAU,QAAQ,MAAM,CAAC;AAAA,EACtC;AACF;AAGO,SAAS,aAAa,aAAqB,KAAsB;AACtE,QAAM,SAAS,cAAc,WAAW;AACxC,QAAM,SAASA,YAAW,MAAM;AAChC,SAAO,eAAe,QAA8C,GAAG;AACzE;AAGO,SAAS,aACd,aACA,KACA,UACM;AACN,QAAM,SAAS,cAAc,WAAW;AACxC,QAAM,SAASA,YAAW,MAAM;AAChC,QAAM,QAAQ,WAAW,QAAQ;AAGjC,QAAM,OAAO,iBAAiB,GAAG;AACjC,MAAI,QAAQ,CAAC,KAAK,SAAS,KAAK,GAAG;AACjC,UAAM,IAAI,YAAY,sBAAsB,GAAG,MAAM,KAAK,OAAO,IAAI,UAAU,cAAc;AAAA,EAC/F;AAEA,iBAAe,QAA8C,KAAK,KAAK;AACvE,cAAY,QAAQ,MAAM;AAC5B;AAGO,SAAS,eAAe,aAA2B;AACxD,QAAM,SAAS,cAAc,WAAW;AACxC,cAAY,QAAQ,gBAAgB,cAAc,CAAC;AACrD;AAIO,SAAS,sBAAsBC,UAAwB;AAC5D,QAAM,MAAMA,SACT,QAAQ,QAAQ,EAChB,YAAY,8BAA8B;AAE7C,WAAS,mBAAmB,KAAoB;AAC9C,UAAM,UAAUA,SAAQ,KAAK,EAAE,SAAS,MAAM;AAC9C,UAAM,SAAS,aAAa,EAAE,OAAO,UAAU,SAAS,QAAQ,SAAS,KAAK,CAAC;AAC/E,YAAQ,WAAW,mBAAmB,KAAK,QAAQ,OAAO;AAAA,EAC5D;AAEA,MACG,QAAQ,MAAM,EACd,YAAY,4BAA4B,EACxC,OAAO,MAAM;AACZ,QAAI;AACF,YAAM,SAAS,cAAc,QAAQ,IAAI,CAAC;AAC1C,cAAQ,IAAI,OAAO,IAAI;AAAA,IACzB,SAAS,KAAK;AACZ,yBAAmB,GAAG;AAAA,IACxB;AAAA,EACF,CAAC;AAEH,MACG,QAAQ,WAAW,EACnB,YAAY,0CAA0C,EACtD,OAAO,CAAC,QAAgB;AACvB,QAAI;AACF,YAAM,QAAQ,aAAa,QAAQ,IAAI,GAAG,GAAG;AAC7C,cAAQ;AAAA,QACN,OAAO,UAAU,WAAW,KAAK,UAAU,OAAO,MAAM,CAAC,IAAI,OAAO,KAAK;AAAA,MAC3E;AAAA,IACF,SAAS,KAAK;AACZ,yBAAmB,GAAG;AAAA,IACxB;AAAA,EACF,CAAC;AAEH,MACG,QAAQ,mBAAmB,EAC3B,YAAY,0CAA0C,EACtD,OAAO,CAAC,KAAa,UAAkB;AACtC,QAAI;AACF,mBAAa,QAAQ,IAAI,GAAG,KAAK,KAAK;AACtC,cAAQ,IAAI,OAAO,GAAG,MAAM,KAAK,EAAE;AAAA,IACrC,SAAS,KAAK;AACZ,yBAAmB,GAAG;AAAA,IACxB;AAAA,EACF,CAAC;AAEH,MACG,QAAQ,OAAO,EACf,YAAY,iCAAiC,EAC7C,OAAO,MAAM;AACZ,QAAI;AACF,qBAAe,QAAQ,IAAI,CAAC;AAC5B,cAAQ,IAAI,kCAAkC;AAAA,IAChD,SAAS,KAAK;AACZ,yBAAmB,GAAG;AAAA,IACxB;AAAA,EACF,CAAC;AACL;;;ACxTO,SAAS,oBAAoBC,UAAwB;AAC1D,EAAAA,SACG,QAAQ,MAAM,EACd,YAAY,8CAA8C,EAC1D,OAAO,MAAM;AACZ,YAAQ,IAAI,qCAAgC;AAAA,EAC9C,CAAC;AACL;;;A9BKA,IAAM,UAAU,IAAI,QAAQ;AAE5B,QACG,KAAK,KAAK,EACV,YAAY,oDAA+C,EAC3D,QAAQ,OAAO,EACf,OAAO,aAAa,6BAA6B;AAEpD,oBAAoB,OAAO;AAC3B,qBAAqB,OAAO;AAC5B,mBAAmB,OAAO;AAC1B,oBAAoB,OAAO;AAC3B,sBAAsB,OAAO;AAC7B,qBAAqB,OAAO;AAC5B,sBAAsB,OAAO;AAC7B,uBAAuB,OAAO;AAC9B,oBAAoB,OAAO;AAC3B,qBAAqB,OAAO;AAC5B,sBAAsB,OAAO;AAC7B,oBAAoB,OAAO;AAE3B,QAAQ,MAAM;","names":["fs","path","stat","fs","fs","path","require","getLanguage","retType","createHash","path","fs","path","fs","path","fs","configPath","program","fs","path","results","CTX_DIR","DB_FILENAME","path","fs","program","fs","path","estimateTokens","CTX_DIR","DB_FILENAME","SNIPPET_MAX_LENGTH","truncateSnippet","toOutputResult","formatTextOutput","executeStrategy","loadEmbedder","embedderInstance","path","fs","program","program","program","fs","path","path","path","CTX_DIR","DB_FILENAME","path","formatDuration","createHash","fs","existingFile","loadEmbedder","embedderInstance","program","fs","path","CTX_DIR","DB_FILENAME","CONFIG_FILENAME","formatBytes","configPath","path","fs","program","program","program","program","fs","path","CTX_DIR","CONFIG_FILENAME","path","fs","readConfig","program","program"]}