kontext-engine 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3034 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli/index.ts
4
+ import { Command } from "commander";
5
+
6
+ // src/cli/commands/init.ts
7
+ import fs5 from "fs";
8
+ import path4 from "path";
9
+
10
+ // src/indexer/discovery.ts
11
+ import fs from "fs/promises";
12
+ import path from "path";
13
+ import ignore from "ignore";
14
+ var LANGUAGE_MAP = {
15
+ ".ts": "typescript",
16
+ ".tsx": "typescript",
17
+ ".js": "javascript",
18
+ ".jsx": "javascript",
19
+ ".mjs": "javascript",
20
+ ".cjs": "javascript",
21
+ ".py": "python",
22
+ ".go": "go",
23
+ ".rs": "rust",
24
+ ".java": "java",
25
+ ".rb": "ruby",
26
+ ".php": "php",
27
+ ".swift": "swift",
28
+ ".kt": "kotlin",
29
+ ".c": "c",
30
+ ".h": "c",
31
+ ".cpp": "cpp",
32
+ ".hpp": "cpp",
33
+ ".cc": "cpp",
34
+ ".cxx": "cpp",
35
+ ".json": "json",
36
+ ".yaml": "yaml",
37
+ ".yml": "yaml",
38
+ ".toml": "toml",
39
+ ".md": "markdown",
40
+ ".mdx": "markdown",
41
+ ".env": "env"
42
+ };
43
+ var BUILTIN_IGNORE = [
44
+ "node_modules",
45
+ ".git",
46
+ "dist",
47
+ "build",
48
+ "*.lock",
49
+ "package-lock.json",
50
+ "*.png",
51
+ "*.jpg",
52
+ "*.jpeg",
53
+ "*.gif",
54
+ "*.webp",
55
+ "*.ico",
56
+ "*.bmp",
57
+ "*.svg",
58
+ "*.woff",
59
+ "*.woff2",
60
+ "*.ttf",
61
+ "*.eot",
62
+ "*.mp3",
63
+ "*.mp4",
64
+ "*.wav",
65
+ "*.avi",
66
+ "*.mov",
67
+ "*.zip",
68
+ "*.tar",
69
+ "*.gz",
70
+ "*.rar",
71
+ "*.7z",
72
+ "*.pdf",
73
+ "*.exe",
74
+ "*.dll",
75
+ "*.so",
76
+ "*.dylib",
77
+ "*.o",
78
+ "*.a",
79
+ "*.wasm",
80
+ "*.pyc",
81
+ "*.class"
82
+ ];
83
+ function getLanguage(filePath) {
84
+ const basename = path.basename(filePath);
85
+ if (basename.startsWith(".") && !basename.includes(".", 1)) {
86
+ const dotExt = basename;
87
+ return LANGUAGE_MAP[dotExt] ?? null;
88
+ }
89
+ const ext = path.extname(filePath).toLowerCase();
90
+ return LANGUAGE_MAP[ext] ?? null;
91
+ }
92
+ async function readIgnoreFile(filePath) {
93
+ try {
94
+ const content = await fs.readFile(filePath, "utf-8");
95
+ return content.split("\n").map((line) => line.trim()).filter((line) => line.length > 0 && !line.startsWith("#"));
96
+ } catch {
97
+ return [];
98
+ }
99
+ }
100
+ async function statSafe(filePath, followSymlinks) {
101
+ try {
102
+ return followSymlinks ? await fs.stat(filePath) : await fs.lstat(filePath);
103
+ } catch {
104
+ return null;
105
+ }
106
+ }
107
+ async function discoverFiles(options) {
108
+ const { root, extraIgnore = [], followSymlinks = true } = options;
109
+ const absoluteRoot = path.resolve(root);
110
+ const ig = ignore();
111
+ ig.add(BUILTIN_IGNORE);
112
+ const gitignoreRules = await readIgnoreFile(
113
+ path.join(absoluteRoot, ".gitignore")
114
+ );
115
+ ig.add(gitignoreRules);
116
+ const ctxignoreRules = await readIgnoreFile(
117
+ path.join(absoluteRoot, ".ctxignore")
118
+ );
119
+ ig.add(ctxignoreRules);
120
+ ig.add(extraIgnore);
121
+ const results = [];
122
+ await walkDirectory(absoluteRoot, absoluteRoot, ig, followSymlinks, results);
123
+ return results.sort((a, b) => a.path.localeCompare(b.path));
124
+ }
125
+ async function walkDirectory(dir, root, ig, followSymlinks, results) {
126
+ let entries;
127
+ try {
128
+ entries = await fs.readdir(dir, { withFileTypes: true });
129
+ } catch {
130
+ return;
131
+ }
132
+ for (const entry of entries) {
133
+ const absolutePath = path.join(dir, entry.name);
134
+ const relativePath = path.relative(root, absolutePath);
135
+ const normalizedRelative = relativePath.split(path.sep).join("/");
136
+ if (entry.isDirectory() || entry.isSymbolicLink()) {
137
+ const stat2 = await statSafe(absolutePath, followSymlinks);
138
+ if (!stat2) continue;
139
+ if (stat2.isDirectory()) {
140
+ if (ig.ignores(normalizedRelative + "/") || ig.ignores(normalizedRelative)) {
141
+ continue;
142
+ }
143
+ await walkDirectory(absolutePath, root, ig, followSymlinks, results);
144
+ continue;
145
+ }
146
+ if (!stat2.isFile()) continue;
147
+ }
148
+ if (!entry.isFile() && !entry.isSymbolicLink()) continue;
149
+ if (ig.ignores(normalizedRelative)) continue;
150
+ const language = getLanguage(relativePath);
151
+ if (language === null) continue;
152
+ const stat = await statSafe(absolutePath, followSymlinks);
153
+ if (!stat || !stat.isFile()) continue;
154
+ results.push({
155
+ path: normalizedRelative,
156
+ absolutePath,
157
+ language,
158
+ size: stat.size,
159
+ lastModified: stat.mtimeMs
160
+ });
161
+ }
162
+ }
163
+
164
+ // src/indexer/incremental.ts
165
+ import { createHash } from "crypto";
166
+ import fs2 from "fs/promises";
167
+ async function hashFileContent(absolutePath) {
168
+ const content = await fs2.readFile(absolutePath);
169
+ return createHash("sha256").update(content).digest("hex");
170
+ }
171
+ async function computeChanges(discovered, db) {
172
+ const start = performance.now();
173
+ const added = [];
174
+ const modified = [];
175
+ const unchanged = [];
176
+ const hashes = /* @__PURE__ */ new Map();
177
+ const discoveredPaths = new Set(discovered.map((f) => f.path));
178
+ await Promise.all(
179
+ discovered.map(async (file) => {
180
+ const contentHash = await hashFileContent(file.absolutePath);
181
+ const existing = db.getFile(file.path);
182
+ if (!existing) {
183
+ added.push(file.path);
184
+ hashes.set(file.path, contentHash);
185
+ } else if (existing.hash !== contentHash) {
186
+ modified.push(file.path);
187
+ hashes.set(file.path, contentHash);
188
+ } else {
189
+ unchanged.push(file.path);
190
+ }
191
+ })
192
+ );
193
+ const dbPaths = db.getAllFilePaths();
194
+ const deleted = dbPaths.filter((p) => !discoveredPaths.has(p));
195
+ added.sort();
196
+ modified.sort();
197
+ deleted.sort();
198
+ unchanged.sort();
199
+ return {
200
+ added,
201
+ modified,
202
+ deleted,
203
+ unchanged,
204
+ hashes,
205
+ duration: performance.now() - start
206
+ };
207
+ }
208
+
209
+ // src/indexer/parser.ts
210
+ import fs3 from "fs/promises";
211
+ import path2 from "path";
212
+ import { createRequire } from "module";
213
+ import Parser from "web-tree-sitter";
214
+ var GRAMMAR_FILES = {
215
+ typescript: "tree-sitter-typescript.wasm",
216
+ javascript: "tree-sitter-javascript.wasm",
217
+ python: "tree-sitter-python.wasm"
218
+ };
219
+ var require2 = createRequire(import.meta.url);
220
+ var initialized = false;
221
+ var languageCache = /* @__PURE__ */ new Map();
222
+ function resolveWasmPath(filename) {
223
+ if (filename === "tree-sitter.wasm") {
224
+ return path2.join(path2.dirname(require2.resolve("web-tree-sitter")), filename);
225
+ }
226
+ return path2.join(path2.dirname(require2.resolve("tree-sitter-wasms/package.json")), "out", filename);
227
+ }
228
+ async function initParser() {
229
+ if (initialized) return;
230
+ await Parser.init({
231
+ locateFile: (scriptName) => resolveWasmPath(scriptName)
232
+ });
233
+ initialized = true;
234
+ }
235
+ async function getLanguage2(language) {
236
+ const grammarFile = GRAMMAR_FILES[language];
237
+ if (!grammarFile) return null;
238
+ const cached = languageCache.get(language);
239
+ if (cached) return cached;
240
+ const wasmPath = resolveWasmPath(grammarFile);
241
+ const lang = await Parser.Language.load(wasmPath);
242
+ languageCache.set(language, lang);
243
+ return lang;
244
+ }
245
+ function extractDocstring(node, language) {
246
+ if (language === "python") {
247
+ const body = node.childForFieldName("body");
248
+ if (body) {
249
+ const firstStmt = body.namedChildren[0];
250
+ if (firstStmt?.type === "expression_statement") {
251
+ const strNode = firstStmt.namedChildren[0];
252
+ if (strNode?.type === "string") {
253
+ const raw = strNode.text;
254
+ return raw.replace(/^["']{1,3}|["']{1,3}$/g, "").trim();
255
+ }
256
+ }
257
+ }
258
+ return void 0;
259
+ }
260
+ const prev = findPrecedingComment(node);
261
+ if (prev) return cleanJSDocComment(prev.text);
262
+ return void 0;
263
+ }
264
+ function findPrecedingComment(node) {
265
+ let candidate = node.previousNamedSibling;
266
+ if (node.parent?.type === "export_statement") {
267
+ candidate = node.parent.previousNamedSibling;
268
+ }
269
+ if (candidate?.type === "comment") return candidate;
270
+ return null;
271
+ }
272
+ function cleanJSDocComment(text) {
273
+ return text.replace(/^\/\*\*?\s*/, "").replace(/\s*\*\/$/, "").replace(/^\s*\* ?/gm, "").trim();
274
+ }
275
+ function extractParams(node, language) {
276
+ const paramsNode = node.childForFieldName("parameters") ?? node.childForFieldName("formal_parameters");
277
+ if (!paramsNode) return void 0;
278
+ if (language === "python") {
279
+ return paramsNode.namedChildren.filter((c) => c.type !== "comment").map((c) => c.text).filter((t) => t !== "self" && t !== "cls");
280
+ }
281
+ return paramsNode.namedChildren.filter((c) => c.type !== "comment").map((c) => c.text);
282
+ }
283
+ function extractReturnType(node, language) {
284
+ if (language === "python") {
285
+ const retType2 = node.childForFieldName("return_type");
286
+ return retType2?.text;
287
+ }
288
+ const retType = node.childForFieldName("return_type");
289
+ if (retType) {
290
+ const text = retType.text;
291
+ return text.startsWith(":") ? text.slice(1).trim() : text;
292
+ }
293
+ return void 0;
294
+ }
295
+ function isExported(node) {
296
+ return node.parent?.type === "export_statement";
297
+ }
298
+ function extractTopLevelNode(node) {
299
+ if (node.parent?.type === "export_statement") return node.parent;
300
+ return node;
301
+ }
302
+ function extractTypeScript(rootNode, source, language) {
303
+ const nodes = [];
304
+ function walk(node, parentClassName) {
305
+ for (const child of node.namedChildren) {
306
+ const inner = child.type === "export_statement" ? child.namedChildren.find(
307
+ (c) => c.type === "function_declaration" || c.type === "class_declaration" || c.type === "lexical_declaration" || c.type === "interface_declaration" || c.type === "type_alias_declaration" || c.type === "abstract_class_declaration"
308
+ ) ?? child : child;
309
+ switch (inner.type) {
310
+ case "import_statement": {
311
+ nodes.push({
312
+ type: "import",
313
+ name: null,
314
+ lineStart: inner.startPosition.row + 1,
315
+ lineEnd: inner.endPosition.row + 1,
316
+ language,
317
+ parent: null,
318
+ text: inner.text
319
+ });
320
+ break;
321
+ }
322
+ case "function_declaration": {
323
+ const topNode = extractTopLevelNode(inner);
324
+ const name = inner.childForFieldName("name")?.text ?? null;
325
+ nodes.push({
326
+ type: parentClassName ? "method" : "function",
327
+ name,
328
+ lineStart: topNode.startPosition.row + 1,
329
+ lineEnd: topNode.endPosition.row + 1,
330
+ language,
331
+ parent: parentClassName,
332
+ params: extractParams(inner, language),
333
+ returnType: extractReturnType(inner, language),
334
+ docstring: extractDocstring(inner, language),
335
+ exports: isExported(inner),
336
+ text: topNode.text
337
+ });
338
+ break;
339
+ }
340
+ case "class_declaration":
341
+ case "abstract_class_declaration": {
342
+ const topNode = extractTopLevelNode(inner);
343
+ const className = inner.childForFieldName("name")?.text ?? null;
344
+ nodes.push({
345
+ type: "class",
346
+ name: className,
347
+ lineStart: topNode.startPosition.row + 1,
348
+ lineEnd: topNode.endPosition.row + 1,
349
+ language,
350
+ parent: null,
351
+ docstring: extractDocstring(inner, language),
352
+ exports: isExported(inner),
353
+ text: topNode.text
354
+ });
355
+ const classBody = inner.childForFieldName("body");
356
+ if (classBody) {
357
+ for (const member of classBody.namedChildren) {
358
+ if (member.type === "method_definition") {
359
+ const methodName = member.childForFieldName("name")?.text ?? null;
360
+ nodes.push({
361
+ type: "method",
362
+ name: methodName,
363
+ lineStart: member.startPosition.row + 1,
364
+ lineEnd: member.endPosition.row + 1,
365
+ language,
366
+ parent: className,
367
+ params: extractParams(member, language),
368
+ returnType: extractReturnType(member, language),
369
+ docstring: extractDocstring(member, language),
370
+ exports: isExported(inner),
371
+ text: member.text
372
+ });
373
+ }
374
+ }
375
+ }
376
+ break;
377
+ }
378
+ case "interface_declaration": {
379
+ const topNode = extractTopLevelNode(inner);
380
+ const name = inner.childForFieldName("name")?.text ?? null;
381
+ nodes.push({
382
+ type: "type",
383
+ name,
384
+ lineStart: topNode.startPosition.row + 1,
385
+ lineEnd: topNode.endPosition.row + 1,
386
+ language,
387
+ parent: null,
388
+ docstring: extractDocstring(inner, language),
389
+ exports: isExported(inner),
390
+ text: topNode.text
391
+ });
392
+ break;
393
+ }
394
+ case "type_alias_declaration": {
395
+ const topNode = extractTopLevelNode(inner);
396
+ const name = inner.childForFieldName("name")?.text ?? null;
397
+ nodes.push({
398
+ type: "type",
399
+ name,
400
+ lineStart: topNode.startPosition.row + 1,
401
+ lineEnd: topNode.endPosition.row + 1,
402
+ language,
403
+ parent: null,
404
+ docstring: extractDocstring(inner, language),
405
+ exports: isExported(inner),
406
+ text: topNode.text
407
+ });
408
+ break;
409
+ }
410
+ case "lexical_declaration": {
411
+ const topNode = extractTopLevelNode(inner);
412
+ const declarator = inner.namedChildren.find(
413
+ (c) => c.type === "variable_declarator"
414
+ );
415
+ const name = declarator?.childForFieldName("name")?.text ?? null;
416
+ nodes.push({
417
+ type: "constant",
418
+ name,
419
+ lineStart: topNode.startPosition.row + 1,
420
+ lineEnd: topNode.endPosition.row + 1,
421
+ language,
422
+ parent: parentClassName,
423
+ docstring: extractDocstring(inner, language),
424
+ exports: isExported(inner),
425
+ text: topNode.text
426
+ });
427
+ break;
428
+ }
429
+ default:
430
+ if (child.type !== "export_statement") {
431
+ }
432
+ break;
433
+ }
434
+ }
435
+ }
436
+ walk(rootNode, null);
437
+ return nodes;
438
+ }
439
+ function extractPython(rootNode, _source, language) {
440
+ const nodes = [];
441
+ function walk(node, parentClassName) {
442
+ for (const child of node.namedChildren) {
443
+ switch (child.type) {
444
+ case "import_statement":
445
+ case "import_from_statement": {
446
+ nodes.push({
447
+ type: "import",
448
+ name: null,
449
+ lineStart: child.startPosition.row + 1,
450
+ lineEnd: child.endPosition.row + 1,
451
+ language,
452
+ parent: null,
453
+ text: child.text
454
+ });
455
+ break;
456
+ }
457
+ case "function_definition": {
458
+ const name = child.childForFieldName("name")?.text ?? null;
459
+ nodes.push({
460
+ type: parentClassName ? "method" : "function",
461
+ name,
462
+ lineStart: child.startPosition.row + 1,
463
+ lineEnd: child.endPosition.row + 1,
464
+ language,
465
+ parent: parentClassName,
466
+ params: extractParams(child, language),
467
+ returnType: extractReturnType(child, language),
468
+ docstring: extractDocstring(child, language),
469
+ text: child.text
470
+ });
471
+ break;
472
+ }
473
+ case "decorated_definition": {
474
+ const innerDef = child.namedChildren.find(
475
+ (c) => c.type === "function_definition" || c.type === "class_definition"
476
+ );
477
+ if (innerDef) {
478
+ const name = innerDef.childForFieldName("name")?.text ?? null;
479
+ if (innerDef.type === "function_definition") {
480
+ nodes.push({
481
+ type: parentClassName ? "method" : "function",
482
+ name,
483
+ lineStart: child.startPosition.row + 1,
484
+ lineEnd: child.endPosition.row + 1,
485
+ language,
486
+ parent: parentClassName,
487
+ params: extractParams(innerDef, language),
488
+ returnType: extractReturnType(innerDef, language),
489
+ docstring: extractDocstring(innerDef, language),
490
+ text: child.text
491
+ });
492
+ } else if (innerDef.type === "class_definition") {
493
+ nodes.push({
494
+ type: "class",
495
+ name,
496
+ lineStart: child.startPosition.row + 1,
497
+ lineEnd: child.endPosition.row + 1,
498
+ language,
499
+ parent: null,
500
+ docstring: extractDocstring(innerDef, language),
501
+ text: child.text
502
+ });
503
+ const body = innerDef.childForFieldName("body");
504
+ if (body) walk(body, name);
505
+ }
506
+ }
507
+ break;
508
+ }
509
+ case "class_definition": {
510
+ const name = child.childForFieldName("name")?.text ?? null;
511
+ nodes.push({
512
+ type: "class",
513
+ name,
514
+ lineStart: child.startPosition.row + 1,
515
+ lineEnd: child.endPosition.row + 1,
516
+ language,
517
+ parent: null,
518
+ docstring: extractDocstring(child, language),
519
+ text: child.text
520
+ });
521
+ const body = child.childForFieldName("body");
522
+ if (body) walk(body, name);
523
+ break;
524
+ }
525
+ case "expression_statement": {
526
+ const assignment = child.namedChildren.find(
527
+ (c) => c.type === "assignment"
528
+ );
529
+ if (assignment && parentClassName === null) {
530
+ const left = assignment.childForFieldName("left");
531
+ if (left?.type === "identifier") {
532
+ nodes.push({
533
+ type: "constant",
534
+ name: left.text,
535
+ lineStart: child.startPosition.row + 1,
536
+ lineEnd: child.endPosition.row + 1,
537
+ language,
538
+ parent: null,
539
+ text: child.text
540
+ });
541
+ }
542
+ }
543
+ break;
544
+ }
545
+ default:
546
+ break;
547
+ }
548
+ }
549
+ }
550
+ walk(rootNode, null);
551
+ return nodes;
552
+ }
553
+ async function parseFile(filePath, language) {
554
+ await initParser();
555
+ const lang = await getLanguage2(language);
556
+ if (!lang) return [];
557
+ let source;
558
+ try {
559
+ source = await fs3.readFile(filePath, "utf-8");
560
+ } catch {
561
+ return [];
562
+ }
563
+ const parser = new Parser();
564
+ parser.setLanguage(lang);
565
+ const tree = parser.parse(source);
566
+ if (!tree) return [];
567
+ try {
568
+ if (language === "python") {
569
+ return extractPython(tree.rootNode, source, language);
570
+ }
571
+ return extractTypeScript(tree.rootNode, source, language);
572
+ } finally {
573
+ tree.delete();
574
+ parser.delete();
575
+ }
576
+ }
577
+
578
+ // src/indexer/chunker.ts
579
+ import { createHash as createHash2 } from "crypto";
580
+ var DEFAULT_MAX_TOKENS = 500;
581
+ var MERGE_THRESHOLD = 50;
582
+ var TOKEN_MULTIPLIER = 1.3;
583
+ function estimateTokens(text) {
584
+ const wordCount = text.split(/\s+/).filter((w) => w.length > 0).length;
585
+ return Math.ceil(wordCount * TOKEN_MULTIPLIER);
586
+ }
587
+ function makeChunkId(filePath, lineStart, lineEnd) {
588
+ const input = `${filePath}:${lineStart}:${lineEnd}`;
589
+ return createHash2("sha256").update(input).digest("hex").slice(0, 16);
590
+ }
591
+ function makeContentHash(text) {
592
+ return createHash2("sha256").update(text).digest("hex").slice(0, 16);
593
+ }
594
+ function splitLargeNode(node, maxTokens) {
595
+ const lines = node.text.split("\n");
596
+ const chunks = [];
597
+ let currentLines = [];
598
+ let currentStartOffset = 0;
599
+ for (let i = 0; i < lines.length; i++) {
600
+ currentLines.push(lines[i]);
601
+ const currentText = currentLines.join("\n");
602
+ const tokens = estimateTokens(currentText);
603
+ if (tokens >= maxTokens && currentLines.length > 1) {
604
+ currentLines.pop();
605
+ chunks.push({
606
+ lineStart: node.lineStart + currentStartOffset,
607
+ lineEnd: node.lineStart + currentStartOffset + currentLines.length - 1,
608
+ text: currentLines.join("\n")
609
+ });
610
+ currentStartOffset = i;
611
+ currentLines = [lines[i]];
612
+ }
613
+ }
614
+ if (currentLines.length > 0) {
615
+ chunks.push({
616
+ lineStart: node.lineStart + currentStartOffset,
617
+ lineEnd: node.lineStart + currentStartOffset + currentLines.length - 1,
618
+ text: currentLines.join("\n")
619
+ });
620
+ }
621
+ return chunks;
622
+ }
623
+ function groupImports(imports) {
624
+ if (imports.length === 0) return null;
625
+ const sorted = [...imports].sort((a, b) => a.lineStart - b.lineStart);
626
+ return {
627
+ type: "import",
628
+ name: null,
629
+ lineStart: sorted[0].lineStart,
630
+ lineEnd: sorted[sorted.length - 1].lineEnd,
631
+ language: sorted[0].language,
632
+ parent: null,
633
+ text: sorted.map((n) => n.text).join("\n")
634
+ };
635
+ }
636
+ var UNMERGEABLE_TYPES = /* @__PURE__ */ new Set([
637
+ "function",
638
+ "method",
639
+ "class",
640
+ "type",
641
+ "import"
642
+ ]);
643
+ function canMerge(a, b) {
644
+ if (UNMERGEABLE_TYPES.has(a.type) || UNMERGEABLE_TYPES.has(b.type)) return false;
645
+ if (a.type !== b.type) return false;
646
+ return true;
647
+ }
648
+ function mergeSmallChunks(chunks, maxTokens) {
649
+ if (chunks.length <= 1) return chunks;
650
+ const merged = [];
651
+ let accumulator = null;
652
+ for (const chunk of chunks) {
653
+ const chunkTokens = estimateTokens(chunk.text);
654
+ if (accumulator === null) {
655
+ if (chunkTokens < MERGE_THRESHOLD && !UNMERGEABLE_TYPES.has(chunk.type)) {
656
+ accumulator = { ...chunk };
657
+ } else {
658
+ merged.push(chunk);
659
+ }
660
+ continue;
661
+ }
662
+ const accTokens = estimateTokens(accumulator.text);
663
+ const combinedTokens = accTokens + chunkTokens;
664
+ if (chunkTokens < MERGE_THRESHOLD && combinedTokens <= maxTokens && canMerge(accumulator, chunk)) {
665
+ const combinedText = accumulator.text + "\n" + chunk.text;
666
+ accumulator = {
667
+ ...accumulator,
668
+ lineEnd: chunk.lineEnd,
669
+ text: combinedText,
670
+ name: accumulator.name ?? chunk.name,
671
+ id: makeChunkId(accumulator.filePath, accumulator.lineStart, chunk.lineEnd),
672
+ hash: makeContentHash(combinedText)
673
+ };
674
+ } else {
675
+ merged.push(accumulator);
676
+ accumulator = chunkTokens < MERGE_THRESHOLD && !UNMERGEABLE_TYPES.has(chunk.type) ? { ...chunk } : null;
677
+ if (accumulator === null) {
678
+ merged.push(chunk);
679
+ }
680
+ }
681
+ }
682
+ if (accumulator) {
683
+ merged.push(accumulator);
684
+ }
685
+ return merged;
686
+ }
687
+ function collectImportTexts(nodes) {
688
+ return nodes.filter((n) => n.type === "import").map((n) => n.text);
689
+ }
690
+ function chunkFile(nodes, filePath, options) {
691
+ if (nodes.length === 0) return [];
692
+ const maxTokens = options?.maxTokens ?? DEFAULT_MAX_TOKENS;
693
+ const language = nodes[0].language;
694
+ const importTexts = collectImportTexts(nodes);
695
+ const sorted = [...nodes].sort((a, b) => a.lineStart - b.lineStart);
696
+ const importNodes = sorted.filter((n) => n.type === "import");
697
+ const nonImportNodes = sorted.filter((n) => n.type !== "import");
698
+ const classesWithMethods = /* @__PURE__ */ new Set();
699
+ for (const node of nonImportNodes) {
700
+ if (node.type === "method" && node.parent) {
701
+ classesWithMethods.add(node.parent);
702
+ }
703
+ }
704
+ const rawChunks = [];
705
+ const groupedImport = groupImports(importNodes);
706
+ if (groupedImport) {
707
+ rawChunks.push({
708
+ id: makeChunkId(filePath, groupedImport.lineStart, groupedImport.lineEnd),
709
+ filePath,
710
+ lineStart: groupedImport.lineStart,
711
+ lineEnd: groupedImport.lineEnd,
712
+ language,
713
+ type: "import",
714
+ name: null,
715
+ parent: null,
716
+ text: groupedImport.text,
717
+ imports: [],
718
+ exports: false,
719
+ hash: makeContentHash(groupedImport.text)
720
+ });
721
+ }
722
+ for (const node of nonImportNodes) {
723
+ if (node.type === "class" && node.name && classesWithMethods.has(node.name)) {
724
+ continue;
725
+ }
726
+ const tokenCount = estimateTokens(node.text);
727
+ const nodeExports = node.exports ?? false;
728
+ if (tokenCount <= maxTokens) {
729
+ rawChunks.push({
730
+ id: makeChunkId(filePath, node.lineStart, node.lineEnd),
731
+ filePath,
732
+ lineStart: node.lineStart,
733
+ lineEnd: node.lineEnd,
734
+ language,
735
+ type: node.type === "export" ? "constant" : node.type,
736
+ name: node.name,
737
+ parent: node.parent,
738
+ text: node.text,
739
+ imports: node.type !== "import" ? importTexts : [],
740
+ exports: nodeExports,
741
+ hash: makeContentHash(node.text)
742
+ });
743
+ } else {
744
+ const subChunks = splitLargeNode(node, maxTokens);
745
+ for (const sub of subChunks) {
746
+ rawChunks.push({
747
+ id: makeChunkId(filePath, sub.lineStart, sub.lineEnd),
748
+ filePath,
749
+ lineStart: sub.lineStart,
750
+ lineEnd: sub.lineEnd,
751
+ language,
752
+ type: node.type === "export" ? "constant" : node.type,
753
+ name: node.name,
754
+ parent: node.parent,
755
+ text: sub.text,
756
+ imports: importTexts,
757
+ exports: nodeExports,
758
+ hash: makeContentHash(sub.text)
759
+ });
760
+ }
761
+ }
762
+ }
763
+ rawChunks.sort((a, b) => a.lineStart - b.lineStart);
764
+ return mergeSmallChunks(rawChunks, maxTokens);
765
+ }
766
+
767
+ // src/indexer/embedder.ts
768
+ function normalizeVector(vec) {
769
+ let sumSq = 0;
770
+ for (const v of vec) sumSq += v * v;
771
+ const norm = Math.sqrt(sumSq);
772
+ if (norm === 0) return vec;
773
+ return vec.map((v) => v / norm);
774
+ }
775
+ function prepareChunkText(filePath, parent, text) {
776
+ const parts = [filePath];
777
+ if (parent) parts.push(parent);
778
+ parts.push(text);
779
+ return parts.join("\n");
780
+ }
781
+ var LOCAL_MODEL_ID = "Xenova/all-MiniLM-L6-v2";
782
+ var LOCAL_DIMENSIONS = 384;
783
+ var LOCAL_BATCH_SIZE = 32;
784
+ var pipelineInstance = null;
785
+ async function getLocalPipeline() {
786
+ if (pipelineInstance) return pipelineInstance;
787
+ const { pipeline, env } = await import("@huggingface/transformers");
788
+ env.cacheDir = getCacheDir();
789
+ pipelineInstance = await pipeline("feature-extraction", LOCAL_MODEL_ID, {
790
+ dtype: "fp32"
791
+ });
792
+ return pipelineInstance;
793
+ }
794
+ function getCacheDir() {
795
+ const home = process.env["HOME"] ?? process.env["USERPROFILE"] ?? "/tmp";
796
+ return `${home}/.cache/kontext/models`;
797
+ }
798
+ async function createLocalEmbedder() {
799
+ const pipe = await getLocalPipeline();
800
+ return {
801
+ name: "all-MiniLM-L6-v2",
802
+ dimensions: LOCAL_DIMENSIONS,
803
+ async embed(texts, onProgress) {
804
+ const results = [];
805
+ for (let i = 0; i < texts.length; i += LOCAL_BATCH_SIZE) {
806
+ const batch = texts.slice(i, i + LOCAL_BATCH_SIZE);
807
+ const output = await pipe(batch, {
808
+ pooling: "mean",
809
+ normalize: true
810
+ });
811
+ for (let j = 0; j < batch.length; j++) {
812
+ const offset = j * LOCAL_DIMENSIONS;
813
+ const vec = new Float32Array(
814
+ output.data.buffer,
815
+ output.data.byteOffset + offset * 4,
816
+ LOCAL_DIMENSIONS
817
+ );
818
+ results.push(normalizeVector(vec));
819
+ }
820
+ onProgress?.(Math.min(i + batch.length, texts.length), texts.length);
821
+ }
822
+ return results;
823
+ },
824
+ async embedSingle(text) {
825
+ const output = await pipe(text, {
826
+ pooling: "mean",
827
+ normalize: true
828
+ });
829
+ const vec = new Float32Array(
830
+ output.data.buffer,
831
+ output.data.byteOffset,
832
+ LOCAL_DIMENSIONS
833
+ );
834
+ return normalizeVector(vec);
835
+ }
836
+ };
837
+ }
838
+
839
+ // src/utils/errors.ts
840
+ var ErrorCode = {
841
+ NOT_INITIALIZED: "NOT_INITIALIZED",
842
+ INDEX_FAILED: "INDEX_FAILED",
843
+ PARSE_FAILED: "PARSE_FAILED",
844
+ CHUNK_FAILED: "CHUNK_FAILED",
845
+ EMBEDDER_FAILED: "EMBEDDER_FAILED",
846
+ SEARCH_FAILED: "SEARCH_FAILED",
847
+ CONFIG_INVALID: "CONFIG_INVALID",
848
+ DB_CORRUPTED: "DB_CORRUPTED",
849
+ DB_WRITE_FAILED: "DB_WRITE_FAILED",
850
+ WATCHER_FAILED: "WATCHER_FAILED",
851
+ LLM_FAILED: "LLM_FAILED"
852
+ };
853
+ var KontextError = class extends Error {
854
+ code;
855
+ constructor(message, code, cause) {
856
+ super(message, { cause });
857
+ this.name = "KontextError";
858
+ this.code = code;
859
+ }
860
+ };
861
+ var IndexError = class extends KontextError {
862
+ constructor(message, code, cause) {
863
+ super(message, code, cause);
864
+ this.name = "IndexError";
865
+ }
866
+ };
867
+ var SearchError = class extends KontextError {
868
+ constructor(message, code, cause) {
869
+ super(message, code, cause);
870
+ this.name = "SearchError";
871
+ }
872
+ };
873
+ var ConfigError = class extends KontextError {
874
+ constructor(message, code, cause) {
875
+ super(message, code, cause);
876
+ this.name = "ConfigError";
877
+ }
878
+ };
879
+
880
+ // src/utils/error-boundary.ts
881
+ function handleCommandError(err, logger, verbose) {
882
+ if (err instanceof KontextError) {
883
+ logger.error(`${err.message} [${err.code}]`);
884
+ if (verbose && err.cause) {
885
+ logger.debug("Cause:", String(err.cause));
886
+ }
887
+ return 1;
888
+ }
889
+ if (err instanceof Error) {
890
+ logger.error(`Unexpected error: ${err.message}`);
891
+ if (verbose && err.stack) {
892
+ logger.debug(err.stack);
893
+ }
894
+ } else {
895
+ logger.error(`Unexpected error: ${String(err)}`);
896
+ }
897
+ return 2;
898
+ }
899
+
900
+ // src/utils/logger.ts
901
+ var LogLevel = {
902
+ DEBUG: 0,
903
+ INFO: 1,
904
+ WARN: 2,
905
+ ERROR: 3,
906
+ SILENT: 4
907
+ };
908
+ function resolveLevel(options) {
909
+ if (options?.level !== void 0) return options.level;
910
+ if (process.env["CTX_DEBUG"] === "1") return LogLevel.DEBUG;
911
+ return LogLevel.INFO;
912
+ }
913
+ function formatArgs(args) {
914
+ return args.map((a) => typeof a === "string" ? a : String(a)).join(" ");
915
+ }
916
+ function write(level, msg, args) {
917
+ const extra = args.length > 0 ? ` ${formatArgs(args)}` : "";
918
+ process.stderr.write(`[${level}] ${msg}${extra}
919
+ `);
920
+ }
921
+ function createLogger(options) {
922
+ const minLevel = resolveLevel(options);
923
+ return {
924
+ debug(msg, ...args) {
925
+ if (minLevel <= LogLevel.DEBUG) write("debug", msg, args);
926
+ },
927
+ info(msg, ...args) {
928
+ if (minLevel <= LogLevel.INFO) write("info", msg, args);
929
+ },
930
+ warn(msg, ...args) {
931
+ if (minLevel <= LogLevel.WARN) write("warn", msg, args);
932
+ },
933
+ error(msg, ...args) {
934
+ if (minLevel <= LogLevel.ERROR) write("error", msg, args);
935
+ }
936
+ };
937
+ }
938
+
939
+ // src/storage/db.ts
940
+ import path3 from "path";
941
+ import fs4 from "fs";
942
+ import BetterSqlite3 from "better-sqlite3";
943
+ import * as sqliteVec from "sqlite-vec";
944
+
945
+ // src/storage/schema.ts
946
+ var SCHEMA_VERSION = 1;
947
+ var SCHEMA_SQL = `
948
+ CREATE TABLE IF NOT EXISTS meta (
949
+ key TEXT PRIMARY KEY,
950
+ value TEXT
951
+ );
952
+
953
+ CREATE TABLE IF NOT EXISTS files (
954
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
955
+ path TEXT UNIQUE NOT NULL,
956
+ language TEXT NOT NULL,
957
+ hash TEXT NOT NULL,
958
+ last_indexed INTEGER NOT NULL,
959
+ size INTEGER NOT NULL
960
+ );
961
+
962
+ CREATE TABLE IF NOT EXISTS chunks (
963
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
964
+ file_id INTEGER NOT NULL REFERENCES files(id) ON DELETE CASCADE,
965
+ line_start INTEGER NOT NULL,
966
+ line_end INTEGER NOT NULL,
967
+ type TEXT NOT NULL,
968
+ name TEXT,
969
+ parent TEXT,
970
+ text TEXT NOT NULL,
971
+ imports JSON,
972
+ exports INTEGER DEFAULT 0,
973
+ hash TEXT NOT NULL
974
+ );
975
+
976
+ CREATE TABLE IF NOT EXISTS dependencies (
977
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
978
+ source_chunk_id INTEGER NOT NULL REFERENCES chunks(id) ON DELETE CASCADE,
979
+ target_chunk_id INTEGER NOT NULL REFERENCES chunks(id) ON DELETE CASCADE,
980
+ type TEXT NOT NULL
981
+ );
982
+
983
+ CREATE INDEX IF NOT EXISTS idx_chunks_file ON chunks(file_id);
984
+ CREATE INDEX IF NOT EXISTS idx_chunks_name ON chunks(name);
985
+ CREATE INDEX IF NOT EXISTS idx_deps_source ON dependencies(source_chunk_id);
986
+ CREATE INDEX IF NOT EXISTS idx_deps_target ON dependencies(target_chunk_id);
987
+ `;
988
+ var FTS_SQL = `
989
+ CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(
990
+ name, text, parent,
991
+ content=chunks,
992
+ content_rowid=id
993
+ );
994
+ `;
995
+ var FTS_TRIGGERS_SQL = `
996
+ CREATE TRIGGER IF NOT EXISTS chunks_fts_ai AFTER INSERT ON chunks BEGIN
997
+ INSERT INTO chunks_fts(rowid, name, text, parent)
998
+ VALUES (new.id, new.name, new.text, new.parent);
999
+ END;
1000
+
1001
+ CREATE TRIGGER IF NOT EXISTS chunks_fts_ad AFTER DELETE ON chunks BEGIN
1002
+ INSERT INTO chunks_fts(chunks_fts, rowid, name, text, parent)
1003
+ VALUES ('delete', old.id, old.name, old.text, old.parent);
1004
+ END;
1005
+
1006
+ CREATE TRIGGER IF NOT EXISTS chunks_fts_au AFTER UPDATE ON chunks BEGIN
1007
+ INSERT INTO chunks_fts(chunks_fts, rowid, name, text, parent)
1008
+ VALUES ('delete', old.id, old.name, old.text, old.parent);
1009
+ INSERT INTO chunks_fts(rowid, name, text, parent)
1010
+ VALUES (new.id, new.name, new.text, new.parent);
1011
+ END;
1012
+ `;
1013
+ var VECTOR_TABLE_SQL = (dimensions) => `CREATE VIRTUAL TABLE IF NOT EXISTS chunk_vectors USING vec0(
1014
+ embedding float[${dimensions}]
1015
+ );`;
1016
+
1017
+ // src/storage/vectors.ts
1018
+ function vecToBuffer(vec) {
1019
+ return Buffer.from(vec.buffer, vec.byteOffset, vec.byteLength);
1020
+ }
1021
+ function insertVector(db, chunkId, vector) {
1022
+ db.prepare(
1023
+ `INSERT INTO chunk_vectors(rowid, embedding) VALUES (${chunkId}, ?)`
1024
+ ).run(vecToBuffer(vector));
1025
+ }
1026
+ function deleteVectorsByChunkIds(db, chunkIds) {
1027
+ if (chunkIds.length === 0) return;
1028
+ const placeholders = chunkIds.map(() => "?").join(",");
1029
+ db.prepare(
1030
+ `DELETE FROM chunk_vectors WHERE rowid IN (${placeholders})`
1031
+ ).run(...chunkIds);
1032
+ }
1033
+ function getVectorCount(db) {
1034
+ const row = db.prepare("SELECT COUNT(*) as count FROM chunk_vectors").get();
1035
+ return row.count;
1036
+ }
1037
+ function searchVectors(db, query, limit) {
1038
+ const rows = db.prepare(
1039
+ `SELECT rowid, distance
1040
+ FROM chunk_vectors
1041
+ WHERE embedding MATCH ?
1042
+ AND k = ${limit}
1043
+ ORDER BY distance`
1044
+ ).all(vecToBuffer(query));
1045
+ return rows.map((r) => ({
1046
+ chunkId: r.rowid,
1047
+ distance: r.distance
1048
+ }));
1049
+ }
1050
+
1051
+ // src/storage/db.ts
1052
+ var DEFAULT_DIMENSIONS = 384;
1053
+ function createDatabase(dbPath, dimensions = DEFAULT_DIMENSIONS) {
1054
+ const dir = path3.dirname(dbPath);
1055
+ if (!fs4.existsSync(dir)) {
1056
+ fs4.mkdirSync(dir, { recursive: true });
1057
+ }
1058
+ const db = new BetterSqlite3(dbPath);
1059
+ db.pragma("journal_mode = WAL");
1060
+ db.pragma("foreign_keys = ON");
1061
+ sqliteVec.load(db);
1062
+ initializeSchema(db, dimensions);
1063
+ const stmtUpsertFile = db.prepare(`
1064
+ INSERT INTO files (path, language, hash, last_indexed, size)
1065
+ VALUES (@path, @language, @hash, @lastIndexed, @size)
1066
+ ON CONFLICT(path) DO UPDATE SET
1067
+ language = excluded.language,
1068
+ hash = excluded.hash,
1069
+ last_indexed = excluded.last_indexed,
1070
+ size = excluded.size
1071
+ `);
1072
+ const stmtGetFile = db.prepare(
1073
+ "SELECT id, path, language, hash, last_indexed as lastIndexed, size FROM files WHERE path = ?"
1074
+ );
1075
+ const stmtDeleteFile = db.prepare("DELETE FROM files WHERE path = ?");
1076
+ const stmtInsertChunk = db.prepare(`
1077
+ INSERT INTO chunks (file_id, line_start, line_end, type, name, parent, text, imports, exports, hash)
1078
+ VALUES (@fileId, @lineStart, @lineEnd, @type, @name, @parent, @text, @imports, @exports, @hash)
1079
+ `);
1080
+ const stmtGetChunksByFile = db.prepare(
1081
+ "SELECT id, file_id as fileId, line_start as lineStart, line_end as lineEnd, type, name, parent, text, imports, exports, hash FROM chunks WHERE file_id = ? ORDER BY line_start"
1082
+ );
1083
+ const stmtGetChunkIdsByFile = db.prepare(
1084
+ "SELECT id FROM chunks WHERE file_id = ?"
1085
+ );
1086
+ const stmtDeleteChunksByFile = db.prepare(
1087
+ "DELETE FROM chunks WHERE file_id = ?"
1088
+ );
1089
+ const stmtSearchFTS = db.prepare(
1090
+ "SELECT rowid as chunkId, name, rank FROM chunks_fts WHERE chunks_fts MATCH ? ORDER BY rank LIMIT ?"
1091
+ );
1092
+ const stmtGetAllFiles = db.prepare(
1093
+ "SELECT id, path, language, hash, last_indexed as lastIndexed, size FROM files"
1094
+ );
1095
+ const stmtInsertDep = db.prepare(
1096
+ "INSERT INTO dependencies (source_chunk_id, target_chunk_id, type) VALUES (?, ?, ?)"
1097
+ );
1098
+ const stmtGetDeps = db.prepare(
1099
+ "SELECT target_chunk_id as targetChunkId, type FROM dependencies WHERE source_chunk_id = ?"
1100
+ );
1101
+ const stmtGetReverseDeps = db.prepare(
1102
+ "SELECT source_chunk_id as sourceChunkId, type FROM dependencies WHERE target_chunk_id = ?"
1103
+ );
1104
+ const stmtFileCount = db.prepare("SELECT COUNT(*) as count FROM files");
1105
+ const stmtChunkCount = db.prepare("SELECT COUNT(*) as count FROM chunks");
1106
+ const stmtLanguageBreakdown = db.prepare(
1107
+ "SELECT language, COUNT(*) as count FROM files GROUP BY language ORDER BY count DESC"
1108
+ );
1109
+ const stmtLastIndexed = db.prepare(
1110
+ "SELECT MAX(last_indexed) as lastIndexed FROM files"
1111
+ );
1112
+ return {
1113
+ upsertFile(file) {
1114
+ const result = stmtUpsertFile.run({
1115
+ path: file.path,
1116
+ language: file.language,
1117
+ hash: file.hash,
1118
+ lastIndexed: Date.now(),
1119
+ size: file.size
1120
+ });
1121
+ if (result.changes > 0 && result.lastInsertRowid) {
1122
+ return Number(result.lastInsertRowid);
1123
+ }
1124
+ const existing = stmtGetFile.get(file.path);
1125
+ return existing?.id ?? 0;
1126
+ },
1127
+ getFile(filePath) {
1128
+ const row = stmtGetFile.get(filePath);
1129
+ return row ?? null;
1130
+ },
1131
+ getFilesByHash(hashes) {
1132
+ const result = /* @__PURE__ */ new Map();
1133
+ const allFiles = stmtGetAllFiles.all();
1134
+ for (const file of allFiles) {
1135
+ const expectedHash = hashes.get(file.path);
1136
+ if (expectedHash !== void 0 && expectedHash === file.hash) {
1137
+ result.set(file.path, file);
1138
+ }
1139
+ }
1140
+ return result;
1141
+ },
1142
+ getAllFilePaths() {
1143
+ const rows = stmtGetAllFiles.all();
1144
+ return rows.map((r) => r.path);
1145
+ },
1146
+ getFileCount() {
1147
+ return stmtFileCount.get().count;
1148
+ },
1149
+ getChunkCount() {
1150
+ return stmtChunkCount.get().count;
1151
+ },
1152
+ getVectorCount() {
1153
+ return getVectorCount(db);
1154
+ },
1155
+ getLanguageBreakdown() {
1156
+ const rows = stmtLanguageBreakdown.all();
1157
+ const map = /* @__PURE__ */ new Map();
1158
+ for (const row of rows) {
1159
+ map.set(row.language, row.count);
1160
+ }
1161
+ return map;
1162
+ },
1163
+ getLastIndexed() {
1164
+ const row = stmtLastIndexed.get();
1165
+ return row.lastIndexed;
1166
+ },
1167
+ deleteFile(filePath) {
1168
+ const file = stmtGetFile.get(filePath);
1169
+ if (file) {
1170
+ const chunkRows = stmtGetChunkIdsByFile.all(file.id);
1171
+ const chunkIds = chunkRows.map((r) => r.id);
1172
+ if (chunkIds.length > 0) {
1173
+ deleteVectorsByChunkIds(db, chunkIds);
1174
+ }
1175
+ }
1176
+ stmtDeleteFile.run(filePath);
1177
+ },
1178
+ insertChunks(fileId, chunks) {
1179
+ const ids = [];
1180
+ for (const chunk of chunks) {
1181
+ const result = stmtInsertChunk.run({
1182
+ fileId,
1183
+ lineStart: chunk.lineStart,
1184
+ lineEnd: chunk.lineEnd,
1185
+ type: chunk.type,
1186
+ name: chunk.name,
1187
+ parent: chunk.parent,
1188
+ text: chunk.text,
1189
+ imports: JSON.stringify(chunk.imports),
1190
+ exports: chunk.exports ? 1 : 0,
1191
+ hash: chunk.hash
1192
+ });
1193
+ ids.push(Number(result.lastInsertRowid));
1194
+ }
1195
+ return ids;
1196
+ },
1197
+ getChunksByFile(fileId) {
1198
+ const rows = stmtGetChunksByFile.all(fileId);
1199
+ return rows.map((r) => ({
1200
+ ...r,
1201
+ imports: JSON.parse(r.imports),
1202
+ exports: r.exports === 1
1203
+ }));
1204
+ },
1205
+ getChunksByIds(ids) {
1206
+ if (ids.length === 0) return [];
1207
+ const placeholders = ids.map(() => "?").join(",");
1208
+ const rows = db.prepare(
1209
+ `SELECT c.id, c.file_id as fileId, f.path as filePath, f.language,
1210
+ c.line_start as lineStart, c.line_end as lineEnd,
1211
+ c.type, c.name, c.parent, c.text
1212
+ FROM chunks c
1213
+ JOIN files f ON f.id = c.file_id
1214
+ WHERE c.id IN (${placeholders})`
1215
+ ).all(...ids);
1216
+ return rows;
1217
+ },
1218
+ searchChunks(filters, limit) {
1219
+ const conditions = [];
1220
+ const params = [];
1221
+ if (filters.name) {
1222
+ switch (filters.nameMode ?? "contains") {
1223
+ case "exact":
1224
+ conditions.push("c.name = ?");
1225
+ params.push(filters.name);
1226
+ break;
1227
+ case "prefix":
1228
+ conditions.push("c.name LIKE ? || '%'");
1229
+ params.push(filters.name);
1230
+ break;
1231
+ case "contains":
1232
+ conditions.push("c.name LIKE '%' || ? || '%'");
1233
+ params.push(filters.name);
1234
+ break;
1235
+ }
1236
+ }
1237
+ if (filters.type) {
1238
+ conditions.push("c.type = ?");
1239
+ params.push(filters.type);
1240
+ }
1241
+ if (filters.parent) {
1242
+ conditions.push("c.parent = ?");
1243
+ params.push(filters.parent);
1244
+ }
1245
+ if (filters.language) {
1246
+ conditions.push("f.language = ?");
1247
+ params.push(filters.language);
1248
+ }
1249
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1250
+ const sql = `
1251
+ SELECT c.id, c.file_id as fileId, f.path as filePath, f.language,
1252
+ c.line_start as lineStart, c.line_end as lineEnd,
1253
+ c.type, c.name, c.parent, c.text
1254
+ FROM chunks c
1255
+ JOIN files f ON f.id = c.file_id
1256
+ ${where}
1257
+ ORDER BY c.name, c.line_start
1258
+ LIMIT ?
1259
+ `;
1260
+ params.push(limit);
1261
+ return db.prepare(sql).all(...params);
1262
+ },
1263
+ deleteChunksByFile(fileId) {
1264
+ const chunkRows = stmtGetChunkIdsByFile.all(fileId);
1265
+ const chunkIds = chunkRows.map((r) => r.id);
1266
+ if (chunkIds.length > 0) {
1267
+ deleteVectorsByChunkIds(db, chunkIds);
1268
+ }
1269
+ stmtDeleteChunksByFile.run(fileId);
1270
+ },
1271
+ insertDependency(sourceChunkId, targetChunkId, type) {
1272
+ stmtInsertDep.run(sourceChunkId, targetChunkId, type);
1273
+ },
1274
+ getDependencies(chunkId) {
1275
+ return stmtGetDeps.all(chunkId);
1276
+ },
1277
+ getReverseDependencies(chunkId) {
1278
+ return stmtGetReverseDeps.all(chunkId);
1279
+ },
1280
+ insertVector(chunkId, vector) {
1281
+ insertVector(db, chunkId, vector);
1282
+ },
1283
+ searchVectors(query, limit) {
1284
+ return searchVectors(db, query, limit);
1285
+ },
1286
+ searchFTS(query, limit) {
1287
+ const rows = stmtSearchFTS.all(query, limit);
1288
+ return rows;
1289
+ },
1290
+ transaction(fn) {
1291
+ return db.transaction(fn)();
1292
+ },
1293
+ vacuum() {
1294
+ db.exec("VACUUM");
1295
+ },
1296
+ close() {
1297
+ db.close();
1298
+ },
1299
+ getSchemaVersion() {
1300
+ const row = db.prepare("SELECT value FROM meta WHERE key = 'schema_version'").get();
1301
+ return row ? parseInt(row.value, 10) : 0;
1302
+ },
1303
+ pragma(key) {
1304
+ const result = db.pragma(key);
1305
+ if (Array.isArray(result) && result.length > 0) {
1306
+ return Object.values(result[0])[0];
1307
+ }
1308
+ return String(result);
1309
+ }
1310
+ };
1311
+ }
1312
+ function initializeSchema(db, dimensions) {
1313
+ const currentVersion = getMetaVersion(db);
1314
+ if (currentVersion >= SCHEMA_VERSION) return;
1315
+ db.exec(SCHEMA_SQL);
1316
+ db.exec(VECTOR_TABLE_SQL(dimensions));
1317
+ db.exec(FTS_SQL);
1318
+ db.exec(FTS_TRIGGERS_SQL);
1319
+ db.prepare(
1320
+ "INSERT OR REPLACE INTO meta (key, value) VALUES ('schema_version', ?)"
1321
+ ).run(String(SCHEMA_VERSION));
1322
+ }
1323
+ function getMetaVersion(db) {
1324
+ try {
1325
+ const row = db.prepare("SELECT value FROM meta WHERE key = 'schema_version'").get();
1326
+ return row ? parseInt(row.value, 10) : 0;
1327
+ } catch {
1328
+ return 0;
1329
+ }
1330
+ }
1331
+
1332
+ // src/cli/commands/init.ts
1333
+ var CTX_DIR = ".ctx";
1334
+ var DB_FILENAME = "index.db";
1335
+ var CONFIG_FILENAME = "config.json";
1336
+ var GITIGNORE_ENTRY = ".ctx/";
1337
+ function ensureGitignore(projectRoot) {
1338
+ const gitignorePath = path4.join(projectRoot, ".gitignore");
1339
+ if (fs5.existsSync(gitignorePath)) {
1340
+ const content = fs5.readFileSync(gitignorePath, "utf-8");
1341
+ if (content.includes(GITIGNORE_ENTRY)) return;
1342
+ const suffix = content.endsWith("\n") ? "" : "\n";
1343
+ fs5.writeFileSync(gitignorePath, `${content}${suffix}${GITIGNORE_ENTRY}
1344
+ `);
1345
+ } else {
1346
+ fs5.writeFileSync(gitignorePath, `${GITIGNORE_ENTRY}
1347
+ `);
1348
+ }
1349
+ }
1350
+ function ensureConfig(ctxDir) {
1351
+ const configPath2 = path4.join(ctxDir, CONFIG_FILENAME);
1352
+ if (fs5.existsSync(configPath2)) return;
1353
+ const config = {
1354
+ version: 1,
1355
+ dimensions: 384,
1356
+ model: "all-MiniLM-L6-v2"
1357
+ };
1358
+ fs5.writeFileSync(configPath2, JSON.stringify(config, null, 2) + "\n");
1359
+ }
1360
+ function formatDuration(ms) {
1361
+ if (ms < 1e3) return `${Math.round(ms)}ms`;
1362
+ return `${(ms / 1e3).toFixed(1)}s`;
1363
+ }
1364
+ function formatBytes(bytes) {
1365
+ if (bytes < 1024) return `${bytes} B`;
1366
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
1367
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
1368
+ }
1369
+ function formatLanguageSummary(counts) {
1370
+ const entries = [...counts.entries()].sort((a, b) => b[1] - a[1]).map(([lang, count]) => `${lang}: ${count}`);
1371
+ return entries.join(", ");
1372
+ }
1373
+ async function runInit(projectPath, options = {}) {
1374
+ const log = options.log ?? console.log;
1375
+ const absoluteRoot = path4.resolve(projectPath);
1376
+ const start = performance.now();
1377
+ log(`Indexing ${absoluteRoot}...`);
1378
+ const ctxDir = path4.join(absoluteRoot, CTX_DIR);
1379
+ if (!fs5.existsSync(ctxDir)) fs5.mkdirSync(ctxDir, { recursive: true });
1380
+ ensureGitignore(absoluteRoot);
1381
+ ensureConfig(ctxDir);
1382
+ const dbPath = path4.join(ctxDir, DB_FILENAME);
1383
+ const db = createDatabase(dbPath);
1384
+ try {
1385
+ const discovered = await discoverFiles({
1386
+ root: absoluteRoot,
1387
+ extraIgnore: [".ctx/"]
1388
+ });
1389
+ const languageCounts = /* @__PURE__ */ new Map();
1390
+ for (const file of discovered) {
1391
+ languageCounts.set(
1392
+ file.language,
1393
+ (languageCounts.get(file.language) ?? 0) + 1
1394
+ );
1395
+ }
1396
+ log(
1397
+ ` Discovered ${discovered.length} files` + (discovered.length > 0 ? ` (${formatLanguageSummary(languageCounts)})` : "")
1398
+ );
1399
+ const changes = await computeChanges(discovered, db);
1400
+ const filesToProcess = [
1401
+ ...changes.added.map((p) => ({ path: p, reason: "added" })),
1402
+ ...changes.modified.map((p) => ({ path: p, reason: "modified" }))
1403
+ ];
1404
+ if (changes.unchanged.length > 0) {
1405
+ log(` ${changes.unchanged.length} unchanged files skipped`);
1406
+ }
1407
+ if (changes.deleted.length > 0) {
1408
+ log(` ${changes.deleted.length} deleted files removed`);
1409
+ }
1410
+ if (changes.added.length > 0) {
1411
+ log(` ${changes.added.length} new files to index`);
1412
+ }
1413
+ if (changes.modified.length > 0) {
1414
+ log(` ${changes.modified.length} modified files to re-index`);
1415
+ }
1416
+ for (const deletedPath of changes.deleted) {
1417
+ db.deleteFile(deletedPath);
1418
+ }
1419
+ await initParser();
1420
+ const allChunksWithMeta = [];
1421
+ let filesProcessed = 0;
1422
+ for (const { path: relPath } of filesToProcess) {
1423
+ const discovered_file = discovered.find((f) => f.path === relPath);
1424
+ if (!discovered_file) continue;
1425
+ const existingFile = db.getFile(relPath);
1426
+ if (existingFile) {
1427
+ db.deleteChunksByFile(existingFile.id);
1428
+ }
1429
+ let nodes;
1430
+ try {
1431
+ nodes = await parseFile(discovered_file.absolutePath, discovered_file.language);
1432
+ } catch {
1433
+ log(` \u26A0 Skipping ${relPath} (parse error)`);
1434
+ continue;
1435
+ }
1436
+ const chunks = chunkFile(nodes, relPath);
1437
+ const fileId = db.upsertFile({
1438
+ path: relPath,
1439
+ language: discovered_file.language,
1440
+ hash: changes.hashes.get(relPath) ?? "",
1441
+ size: discovered_file.size
1442
+ });
1443
+ const chunkIds = db.insertChunks(
1444
+ fileId,
1445
+ chunks.map((c) => ({
1446
+ lineStart: c.lineStart,
1447
+ lineEnd: c.lineEnd,
1448
+ type: c.type,
1449
+ name: c.name,
1450
+ parent: c.parent,
1451
+ text: c.text,
1452
+ imports: c.imports,
1453
+ exports: c.exports,
1454
+ hash: c.hash
1455
+ }))
1456
+ );
1457
+ for (let i = 0; i < chunks.length; i++) {
1458
+ allChunksWithMeta.push({
1459
+ fileRelPath: relPath,
1460
+ chunk: { ...chunks[i], id: String(chunkIds[i]) }
1461
+ });
1462
+ }
1463
+ filesProcessed++;
1464
+ if (filesProcessed % 50 === 0 || filesProcessed === filesToProcess.length) {
1465
+ log(` Parsing... ${filesProcessed}/${filesToProcess.length}`);
1466
+ }
1467
+ }
1468
+ log(` ${allChunksWithMeta.length} chunks created`);
1469
+ let vectorsCreated = 0;
1470
+ if (!options.skipEmbedding && allChunksWithMeta.length > 0) {
1471
+ const embedder = await createEmbedder();
1472
+ const texts = allChunksWithMeta.map(
1473
+ (cm) => prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text)
1474
+ );
1475
+ const vectors = await embedder.embed(texts, (done, total) => {
1476
+ log(` Embedding... ${done}/${total}`);
1477
+ });
1478
+ db.transaction(() => {
1479
+ for (let i = 0; i < allChunksWithMeta.length; i++) {
1480
+ const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);
1481
+ db.insertVector(chunkDbId, vectors[i]);
1482
+ }
1483
+ });
1484
+ vectorsCreated = vectors.length;
1485
+ }
1486
+ const durationMs = performance.now() - start;
1487
+ const dbSize = fs5.existsSync(dbPath) ? fs5.statSync(dbPath).size : 0;
1488
+ log("");
1489
+ log(`\u2713 Indexed in ${formatDuration(durationMs)}`);
1490
+ log(
1491
+ ` ${discovered.length} files \u2192 ${allChunksWithMeta.length} chunks` + (vectorsCreated > 0 ? ` \u2192 ${vectorsCreated} vectors` : "")
1492
+ );
1493
+ log(` Database: ${CTX_DIR}/${DB_FILENAME} (${formatBytes(dbSize)})`);
1494
+ return {
1495
+ filesDiscovered: discovered.length,
1496
+ filesAdded: changes.added.length,
1497
+ filesModified: changes.modified.length,
1498
+ filesDeleted: changes.deleted.length,
1499
+ filesUnchanged: changes.unchanged.length,
1500
+ chunksCreated: allChunksWithMeta.length,
1501
+ vectorsCreated,
1502
+ durationMs,
1503
+ languageCounts
1504
+ };
1505
+ } finally {
1506
+ db.close();
1507
+ }
1508
+ }
1509
+ async function createEmbedder() {
1510
+ return createLocalEmbedder();
1511
+ }
1512
+ function registerInitCommand(program2) {
1513
+ program2.command("init [path]").description("Index current directory or specified path").action(async (inputPath) => {
1514
+ const projectPath = inputPath ?? process.cwd();
1515
+ const verbose = program2.opts()["verbose"] === true;
1516
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
1517
+ try {
1518
+ await runInit(projectPath);
1519
+ } catch (err) {
1520
+ const wrapped = err instanceof IndexError ? err : new IndexError(
1521
+ err instanceof Error ? err.message : String(err),
1522
+ ErrorCode.INDEX_FAILED,
1523
+ err instanceof Error ? err : void 0
1524
+ );
1525
+ process.exitCode = handleCommandError(wrapped, logger, verbose);
1526
+ }
1527
+ });
1528
+ }
1529
+
1530
+ // src/cli/commands/query.ts
1531
+ import fs6 from "fs";
1532
+ import path5 from "path";
1533
+
1534
+ // src/search/vector.ts
1535
+ function distanceToScore(distance) {
1536
+ return 1 / (1 + distance);
1537
+ }
1538
+ async function vectorSearch(db, embedder, query, limit, filters) {
1539
+ const queryVec = await embedder.embedSingle(query);
1540
+ const fetchLimit = filters?.language ? limit * 3 : limit;
1541
+ const vectorResults = db.searchVectors(queryVec, fetchLimit);
1542
+ if (vectorResults.length === 0) return [];
1543
+ const chunkIds = vectorResults.map((r) => r.chunkId);
1544
+ const chunks = db.getChunksByIds(chunkIds);
1545
+ const chunkMap = /* @__PURE__ */ new Map();
1546
+ for (const chunk of chunks) {
1547
+ chunkMap.set(chunk.id, chunk);
1548
+ }
1549
+ const results = [];
1550
+ for (const vr of vectorResults) {
1551
+ const chunk = chunkMap.get(vr.chunkId);
1552
+ if (!chunk) continue;
1553
+ if (filters?.language && chunk.language !== filters.language) continue;
1554
+ results.push({
1555
+ chunkId: vr.chunkId,
1556
+ filePath: chunk.filePath,
1557
+ lineStart: chunk.lineStart,
1558
+ lineEnd: chunk.lineEnd,
1559
+ name: chunk.name,
1560
+ type: chunk.type,
1561
+ text: chunk.text,
1562
+ score: distanceToScore(vr.distance),
1563
+ language: chunk.language
1564
+ });
1565
+ }
1566
+ results.sort((a, b) => b.score - a.score);
1567
+ return results.slice(0, limit);
1568
+ }
1569
+
1570
+ // src/search/fts.ts
1571
+ function bm25ToScore(rank) {
1572
+ return 1 / (1 + Math.abs(rank));
1573
+ }
1574
+ function ftsSearch(db, query, limit, filters) {
1575
+ const fetchLimit = filters?.language ? limit * 3 : limit;
1576
+ const ftsResults = db.searchFTS(query, fetchLimit);
1577
+ if (ftsResults.length === 0) return [];
1578
+ const chunkIds = ftsResults.map((r) => r.chunkId);
1579
+ const chunks = db.getChunksByIds(chunkIds);
1580
+ const chunkMap = /* @__PURE__ */ new Map();
1581
+ for (const chunk of chunks) {
1582
+ chunkMap.set(chunk.id, chunk);
1583
+ }
1584
+ const results = [];
1585
+ for (const fts of ftsResults) {
1586
+ const chunk = chunkMap.get(fts.chunkId);
1587
+ if (!chunk) continue;
1588
+ if (filters?.language && chunk.language !== filters.language) continue;
1589
+ results.push({
1590
+ chunkId: fts.chunkId,
1591
+ filePath: chunk.filePath,
1592
+ lineStart: chunk.lineStart,
1593
+ lineEnd: chunk.lineEnd,
1594
+ name: chunk.name,
1595
+ type: chunk.type,
1596
+ text: chunk.text,
1597
+ score: bm25ToScore(fts.rank),
1598
+ language: chunk.language
1599
+ });
1600
+ }
1601
+ results.sort((a, b) => b.score - a.score);
1602
+ return results.slice(0, limit);
1603
+ }
1604
+
1605
+ // src/search/ast.ts
1606
+ var SCORE_EXACT = 1;
1607
+ var SCORE_PREFIX = 0.8;
1608
+ var SCORE_FUZZY = 0.5;
1609
+ function astSearch(db, filters, limit) {
1610
+ const matchMode = filters.matchMode ?? "fuzzy";
1611
+ const nameMode = matchMode === "exact" ? "exact" : matchMode === "prefix" ? "prefix" : "contains";
1612
+ const score = matchMode === "exact" ? SCORE_EXACT : matchMode === "prefix" ? SCORE_PREFIX : SCORE_FUZZY;
1613
+ const chunks = db.searchChunks(
1614
+ {
1615
+ name: filters.name,
1616
+ nameMode,
1617
+ type: filters.type,
1618
+ parent: filters.parent,
1619
+ language: filters.language
1620
+ },
1621
+ limit
1622
+ );
1623
+ return chunks.map((chunk) => ({
1624
+ chunkId: chunk.id,
1625
+ filePath: chunk.filePath,
1626
+ lineStart: chunk.lineStart,
1627
+ lineEnd: chunk.lineEnd,
1628
+ name: chunk.name,
1629
+ type: chunk.type,
1630
+ text: chunk.text,
1631
+ score,
1632
+ language: chunk.language
1633
+ }));
1634
+ }
1635
+
1636
+ // src/search/path.ts
1637
+ function globToRegExp(pattern) {
1638
+ let re = "";
1639
+ let i = 0;
1640
+ while (i < pattern.length) {
1641
+ const ch = pattern[i];
1642
+ if (ch === "*" && pattern[i + 1] === "*") {
1643
+ re += ".*";
1644
+ i += 2;
1645
+ if (pattern[i] === "/") i++;
1646
+ } else if (ch === "*") {
1647
+ re += "[^/]*";
1648
+ i++;
1649
+ } else if (ch === "?") {
1650
+ re += "[^/]";
1651
+ i++;
1652
+ } else if (".+^${}()|[]\\".includes(ch)) {
1653
+ re += "\\" + ch;
1654
+ i++;
1655
+ } else {
1656
+ re += ch;
1657
+ i++;
1658
+ }
1659
+ }
1660
+ return new RegExp(`^${re}$`);
1661
+ }
1662
+ function pathSearch(db, pattern, limit) {
1663
+ const allPaths = db.getAllFilePaths();
1664
+ const regex = globToRegExp(pattern);
1665
+ const matchingPaths = allPaths.filter((p) => regex.test(p));
1666
+ if (matchingPaths.length === 0) return [];
1667
+ const results = [];
1668
+ for (const filePath of matchingPaths) {
1669
+ if (results.length >= limit) break;
1670
+ const file = db.getFile(filePath);
1671
+ if (!file) continue;
1672
+ const chunks = db.getChunksByFile(file.id);
1673
+ for (const chunk of chunks) {
1674
+ if (results.length >= limit) break;
1675
+ results.push({
1676
+ chunkId: chunk.id,
1677
+ filePath: file.path,
1678
+ lineStart: chunk.lineStart,
1679
+ lineEnd: chunk.lineEnd,
1680
+ name: chunk.name,
1681
+ type: chunk.type,
1682
+ text: chunk.text,
1683
+ score: 1,
1684
+ language: file.language
1685
+ });
1686
+ }
1687
+ }
1688
+ return results;
1689
+ }
1690
+
1691
+ // src/search/fusion.ts
1692
+ var K = 60;
1693
+ function fusionMerge(strategyResults, limit) {
1694
+ const scoreMap = /* @__PURE__ */ new Map();
1695
+ const resultMap = /* @__PURE__ */ new Map();
1696
+ for (const { weight, results: results2 } of strategyResults) {
1697
+ for (let rank = 0; rank < results2.length; rank++) {
1698
+ const result = results2[rank];
1699
+ const rrfScore = weight * (1 / (K + rank + 1));
1700
+ const existing = scoreMap.get(result.chunkId) ?? 0;
1701
+ scoreMap.set(result.chunkId, existing + rrfScore);
1702
+ if (!resultMap.has(result.chunkId)) {
1703
+ resultMap.set(result.chunkId, result);
1704
+ }
1705
+ }
1706
+ }
1707
+ if (scoreMap.size === 0) return [];
1708
+ const entries = [...scoreMap.entries()].sort((a, b) => b[1] - a[1]);
1709
+ const maxScore = entries[0][1];
1710
+ const results = [];
1711
+ for (const [chunkId, rawScore] of entries.slice(0, limit)) {
1712
+ const base = resultMap.get(chunkId);
1713
+ if (!base) continue;
1714
+ results.push({
1715
+ ...base,
1716
+ score: maxScore > 0 ? rawScore / maxScore : 0
1717
+ });
1718
+ }
1719
+ return results;
1720
+ }
1721
+
1722
+ // src/cli/commands/query.ts
1723
+ var CTX_DIR2 = ".ctx";
1724
+ var DB_FILENAME2 = "index.db";
1725
+ var SNIPPET_MAX_LENGTH = 200;
1726
+ var STRATEGY_WEIGHTS = {
1727
+ vector: 1,
1728
+ fts: 0.8,
1729
+ ast: 0.9,
1730
+ path: 0.7,
1731
+ dependency: 0.6
1732
+ };
1733
+ function truncateSnippet(text) {
1734
+ const oneLine = text.replace(/\n/g, " ").replace(/\s+/g, " ").trim();
1735
+ if (oneLine.length <= SNIPPET_MAX_LENGTH) return oneLine;
1736
+ return oneLine.slice(0, SNIPPET_MAX_LENGTH) + "...";
1737
+ }
1738
+ function toOutputResult(r) {
1739
+ return {
1740
+ file: r.filePath,
1741
+ lines: [r.lineStart, r.lineEnd],
1742
+ name: r.name,
1743
+ type: r.type,
1744
+ score: Math.round(r.score * 100) / 100,
1745
+ snippet: truncateSnippet(r.text),
1746
+ language: r.language
1747
+ };
1748
+ }
1749
+ function formatTextOutput(query, results) {
1750
+ if (results.length === 0) {
1751
+ return `No results for "${query}"`;
1752
+ }
1753
+ const lines = [`Results for "${query}":
1754
+ `];
1755
+ for (let i = 0; i < results.length; i++) {
1756
+ const r = results[i];
1757
+ const nameLabel = r.name ? `${r.name} [${r.type}]` : `[${r.type}]`;
1758
+ lines.push(`${i + 1}. ${r.file}:${r.lines[0]}-${r.lines[1]} (score: ${r.score})`);
1759
+ lines.push(` ${nameLabel}`);
1760
+ lines.push(` ${r.snippet}`);
1761
+ lines.push("");
1762
+ }
1763
+ return lines.join("\n");
1764
+ }
1765
+ function extractSymbolNames(query) {
1766
+ const matches = query.match(/[A-Z]?[a-z]+(?:[A-Z][a-z]+)*|[a-z]+(?:_[a-z]+)+|[A-Z][a-zA-Z]+/g);
1767
+ return matches ?? [];
1768
+ }
1769
+ function isPathLike(query) {
1770
+ return query.includes("/") || query.includes("*") || query.includes(".");
1771
+ }
1772
+ async function runQuery(projectPath, query, options) {
1773
+ const absoluteRoot = path5.resolve(projectPath);
1774
+ const dbPath = path5.join(absoluteRoot, CTX_DIR2, DB_FILENAME2);
1775
+ if (!fs6.existsSync(dbPath)) {
1776
+ throw new KontextError(
1777
+ `Project not initialized. Run "ctx init" first. (${CTX_DIR2}/${DB_FILENAME2} not found)`,
1778
+ ErrorCode.NOT_INITIALIZED
1779
+ );
1780
+ }
1781
+ const start = performance.now();
1782
+ const db = createDatabase(dbPath);
1783
+ try {
1784
+ const strategyResults = await runStrategies(db, query, options);
1785
+ const fused = fusionMerge(strategyResults, options.limit);
1786
+ const outputResults = fused.map(toOutputResult);
1787
+ const searchTimeMs = Math.round(performance.now() - start);
1788
+ const text = options.format === "text" ? formatTextOutput(query, outputResults) : void 0;
1789
+ return {
1790
+ query,
1791
+ results: outputResults,
1792
+ stats: {
1793
+ strategies: strategyResults.map((s) => s.strategy),
1794
+ totalResults: outputResults.length,
1795
+ searchTimeMs
1796
+ },
1797
+ text
1798
+ };
1799
+ } finally {
1800
+ db.close();
1801
+ }
1802
+ }
1803
+ async function runStrategies(db, query, options) {
1804
+ const results = [];
1805
+ const filters = options.language ? { language: options.language } : void 0;
1806
+ const limit = options.limit * 3;
1807
+ for (const strategy of options.strategies) {
1808
+ const weight = STRATEGY_WEIGHTS[strategy];
1809
+ const searchResults = await executeStrategy(
1810
+ db,
1811
+ strategy,
1812
+ query,
1813
+ limit,
1814
+ filters
1815
+ );
1816
+ if (searchResults.length > 0) {
1817
+ results.push({ strategy, weight, results: searchResults });
1818
+ }
1819
+ }
1820
+ return results;
1821
+ }
1822
+ async function executeStrategy(db, strategy, query, limit, filters) {
1823
+ switch (strategy) {
1824
+ case "vector": {
1825
+ const embedder = await loadEmbedder();
1826
+ return vectorSearch(db, embedder, query, limit, filters);
1827
+ }
1828
+ case "fts":
1829
+ return ftsSearch(db, query, limit, filters);
1830
+ case "ast": {
1831
+ const symbols = extractSymbolNames(query);
1832
+ if (symbols.length === 0) return [];
1833
+ const allResults = [];
1834
+ for (const name of symbols) {
1835
+ const results = astSearch(
1836
+ db,
1837
+ { name, language: filters?.language },
1838
+ limit
1839
+ );
1840
+ allResults.push(...results);
1841
+ }
1842
+ const seen = /* @__PURE__ */ new Set();
1843
+ return allResults.filter((r) => {
1844
+ if (seen.has(r.chunkId)) return false;
1845
+ seen.add(r.chunkId);
1846
+ return true;
1847
+ });
1848
+ }
1849
+ case "path": {
1850
+ if (!isPathLike(query)) return [];
1851
+ return pathSearch(db, query, limit);
1852
+ }
1853
+ case "dependency":
1854
+ return [];
1855
+ }
1856
+ }
1857
+ var embedderInstance = null;
1858
+ async function loadEmbedder() {
1859
+ if (embedderInstance) return embedderInstance;
1860
+ embedderInstance = await createLocalEmbedder();
1861
+ return embedderInstance;
1862
+ }
1863
+ function registerQueryCommand(program2) {
1864
+ program2.command("query <query>").description("Multi-strategy code search").option("-l, --limit <n>", "Max results", "10").option(
1865
+ "-s, --strategy <list>",
1866
+ "Comma-separated strategies: vector,fts,ast,path",
1867
+ "fts,ast"
1868
+ ).option("--language <lang>", "Filter by language").option("-f, --format <fmt>", "Output format: json|text", "json").option("--no-vectors", "Skip vector search").action(async (query, opts) => {
1869
+ const projectPath = process.cwd();
1870
+ const verbose = program2.opts()["verbose"] === true;
1871
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
1872
+ const strategies = (opts["strategy"] ?? "fts,ast").split(",").map((s) => s.trim());
1873
+ try {
1874
+ const output = await runQuery(projectPath, query, {
1875
+ limit: parseInt(opts["limit"] ?? "10", 10),
1876
+ strategies,
1877
+ language: opts["language"],
1878
+ format: opts["format"] ?? "json"
1879
+ });
1880
+ if (output.text) {
1881
+ console.log(output.text);
1882
+ } else {
1883
+ console.log(JSON.stringify(output, null, 2));
1884
+ }
1885
+ } catch (err) {
1886
+ const wrapped = err instanceof KontextError ? err : new SearchError(
1887
+ err instanceof Error ? err.message : String(err),
1888
+ ErrorCode.SEARCH_FAILED,
1889
+ err instanceof Error ? err : void 0
1890
+ );
1891
+ process.exitCode = handleCommandError(wrapped, logger, verbose);
1892
+ }
1893
+ });
1894
+ }
1895
+
1896
+ // src/cli/commands/ask.ts
1897
+ import fs7 from "fs";
1898
+ import path6 from "path";
1899
+
1900
+ // src/steering/llm.ts
1901
+ var GEMINI_URL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-3-flash-preview:generateContent";
1902
+ var OPENAI_URL = "https://api.openai.com/v1/responses";
1903
+ var ANTHROPIC_URL = "https://api.anthropic.com/v1/messages";
1904
+ var PLAN_SYSTEM_PROMPT = `You are a code search strategy planner. Given a user query about code, output a JSON object with:
1905
+ - "interpretation": a one-line summary of what the user is looking for
1906
+ - "strategies": an array of search strategy objects, each with:
1907
+ - "strategy": one of "vector", "fts", "ast", "path", "dependency"
1908
+ - "query": the optimized query string for that strategy
1909
+ - "weight": a number 0-1 indicating importance
1910
+ - "reason": brief explanation of why this strategy is used
1911
+
1912
+ Choose strategies based on query type:
1913
+ - Conceptual/natural language \u2192 vector (semantic search)
1914
+ - Keywords/identifiers \u2192 fts (full-text search)
1915
+ - Symbol names (functions, classes) \u2192 ast (structural search)
1916
+ - File paths or patterns \u2192 path (path glob search)
1917
+ - Import/dependency chains \u2192 dependency
1918
+
1919
+ Output ONLY valid JSON, no markdown.`;
1920
+ var SYNTHESIZE_SYSTEM_PROMPT = `You are a code search assistant. Given search results, write a brief, helpful explanation of what was found. Be concise (2-4 sentences). Reference specific files and function names. Do not use markdown.`;
1921
+ function createGeminiProvider(apiKey) {
1922
+ return {
1923
+ name: "gemini",
1924
+ async chat(messages) {
1925
+ const contents = messages.map((m) => ({
1926
+ role: m.role === "assistant" ? "model" : "user",
1927
+ parts: [{ text: m.content }]
1928
+ }));
1929
+ const systemInstruction = messages.find((m) => m.role === "system");
1930
+ const nonSystemContents = contents.filter(
1931
+ (_, i) => messages[i].role !== "system"
1932
+ );
1933
+ const body = {
1934
+ contents: nonSystemContents,
1935
+ generationConfig: {
1936
+ temperature: 0.1,
1937
+ maxOutputTokens: 6e3
1938
+ }
1939
+ };
1940
+ if (systemInstruction) {
1941
+ body["systemInstruction"] = {
1942
+ parts: [{ text: systemInstruction.content }]
1943
+ };
1944
+ }
1945
+ const response = await fetch(`${GEMINI_URL}?key=${apiKey}`, {
1946
+ method: "POST",
1947
+ headers: { "Content-Type": "application/json" },
1948
+ body: JSON.stringify(body)
1949
+ });
1950
+ if (!response.ok) {
1951
+ const errorText = await response.text();
1952
+ throw new Error(`Gemini API error (${response.status}): ${errorText}`);
1953
+ }
1954
+ const data = await response.json();
1955
+ return data.candidates[0].content.parts[0].text;
1956
+ }
1957
+ };
1958
+ }
1959
+ function createOpenAIProvider(apiKey) {
1960
+ return {
1961
+ name: "openai",
1962
+ async chat(messages) {
1963
+ const systemMessage = messages.find((m) => m.role === "system");
1964
+ const userMessages = messages.filter((m) => m.role !== "system");
1965
+ const userInput = userMessages.map((m) => m.content).join("\n\n");
1966
+ const body = {
1967
+ model: "gpt-5-mini",
1968
+ input: userInput,
1969
+ max_output_tokens: 6e3,
1970
+ reasoning: { effort: "low" }
1971
+ };
1972
+ if (systemMessage) {
1973
+ body["instructions"] = systemMessage.content;
1974
+ }
1975
+ const response = await fetch(OPENAI_URL, {
1976
+ method: "POST",
1977
+ headers: {
1978
+ "Content-Type": "application/json",
1979
+ Authorization: `Bearer ${apiKey}`
1980
+ },
1981
+ body: JSON.stringify(body)
1982
+ });
1983
+ if (!response.ok) {
1984
+ const errorText = await response.text();
1985
+ throw new Error(`OpenAI API error (${response.status}): ${errorText}`);
1986
+ }
1987
+ const data = await response.json();
1988
+ return data.output_text;
1989
+ }
1990
+ };
1991
+ }
1992
+ function createAnthropicProvider(apiKey) {
1993
+ return {
1994
+ name: "anthropic",
1995
+ async chat(messages) {
1996
+ const systemMessage = messages.find((m) => m.role === "system");
1997
+ const nonSystemMessages = messages.filter((m) => m.role !== "system").map((m) => ({ role: m.role, content: m.content }));
1998
+ const body = {
1999
+ model: "claude-3-5-haiku-20241022",
2000
+ max_tokens: 6e3,
2001
+ temperature: 0.1,
2002
+ messages: nonSystemMessages
2003
+ };
2004
+ if (systemMessage) {
2005
+ body["system"] = systemMessage.content;
2006
+ }
2007
+ const response = await fetch(ANTHROPIC_URL, {
2008
+ method: "POST",
2009
+ headers: {
2010
+ "Content-Type": "application/json",
2011
+ "x-api-key": apiKey,
2012
+ "anthropic-version": "2023-06-01"
2013
+ },
2014
+ body: JSON.stringify(body)
2015
+ });
2016
+ if (!response.ok) {
2017
+ const errorText = await response.text();
2018
+ throw new Error(
2019
+ `Anthropic API error (${response.status}): ${errorText}`
2020
+ );
2021
+ }
2022
+ const data = await response.json();
2023
+ return data.content[0].text;
2024
+ }
2025
+ };
2026
+ }
2027
+ var VALID_STRATEGIES = /* @__PURE__ */ new Set([
2028
+ "vector",
2029
+ "fts",
2030
+ "ast",
2031
+ "path",
2032
+ "dependency"
2033
+ ]);
2034
+ function buildFallbackPlan(query) {
2035
+ const strategies = [
2036
+ { strategy: "fts", query, weight: 0.8, reason: "Full-text keyword search" },
2037
+ { strategy: "ast", query, weight: 0.9, reason: "Structural symbol search" }
2038
+ ];
2039
+ return {
2040
+ interpretation: `Searching for: ${query}`,
2041
+ strategies
2042
+ };
2043
+ }
2044
+ function parseSearchPlan(raw, query) {
2045
+ const jsonMatch = raw.match(/\{[\s\S]*\}/);
2046
+ if (!jsonMatch) return buildFallbackPlan(query);
2047
+ const parsed = JSON.parse(jsonMatch[0]);
2048
+ if (!parsed.interpretation || !Array.isArray(parsed.strategies) || parsed.strategies.length === 0) {
2049
+ return buildFallbackPlan(query);
2050
+ }
2051
+ const validStrategies = parsed.strategies.filter(
2052
+ (s) => VALID_STRATEGIES.has(s.strategy)
2053
+ );
2054
+ if (validStrategies.length === 0) return buildFallbackPlan(query);
2055
+ return {
2056
+ interpretation: parsed.interpretation,
2057
+ strategies: validStrategies
2058
+ };
2059
+ }
2060
+ async function planSearch(provider, query) {
2061
+ try {
2062
+ const response = await provider.chat([
2063
+ { role: "system", content: PLAN_SYSTEM_PROMPT },
2064
+ { role: "user", content: query }
2065
+ ]);
2066
+ return parseSearchPlan(response, query);
2067
+ } catch {
2068
+ return buildFallbackPlan(query);
2069
+ }
2070
+ }
2071
+ function formatResultsForLLM(results) {
2072
+ return results.slice(0, 10).map(
2073
+ (r, i) => `${i + 1}. ${r.filePath}:${r.lineStart}-${r.lineEnd} ${r.name ?? "(unnamed)"} [${r.type}] (score: ${r.score.toFixed(2)})
2074
+ ${r.text.slice(0, 150)}`
2075
+ ).join("\n\n");
2076
+ }
2077
+ async function synthesizeExplanation(provider, query, results) {
2078
+ if (results.length === 0) {
2079
+ return `No results found for "${query}".`;
2080
+ }
2081
+ const formattedResults = formatResultsForLLM(results);
2082
+ const response = await provider.chat([
2083
+ { role: "system", content: SYNTHESIZE_SYSTEM_PROMPT },
2084
+ {
2085
+ role: "user",
2086
+ content: `Query: "${query}"
2087
+
2088
+ Search results:
2089
+ ${formattedResults}`
2090
+ }
2091
+ ]);
2092
+ return response;
2093
+ }
2094
+ function estimateTokens2(text) {
2095
+ return Math.ceil(text.length / 4);
2096
+ }
2097
+ async function steer(provider, query, limit, searchExecutor) {
2098
+ let totalTokens = 0;
2099
+ const plan = await planSearch(provider, query);
2100
+ totalTokens += estimateTokens2(PLAN_SYSTEM_PROMPT + query);
2101
+ totalTokens += estimateTokens2(JSON.stringify(plan));
2102
+ const results = await searchExecutor(plan.strategies, limit);
2103
+ let explanation;
2104
+ try {
2105
+ explanation = await synthesizeExplanation(provider, query, results);
2106
+ totalTokens += estimateTokens2(SYNTHESIZE_SYSTEM_PROMPT + query);
2107
+ totalTokens += estimateTokens2(explanation);
2108
+ } catch {
2109
+ explanation = results.length > 0 ? `Found ${results.length} result(s) for "${query}".` : `No results found for "${query}".`;
2110
+ }
2111
+ const costEstimate = totalTokens / 1e6 * 0.15;
2112
+ return {
2113
+ interpretation: plan.interpretation,
2114
+ strategies: plan.strategies,
2115
+ results,
2116
+ explanation,
2117
+ tokensUsed: totalTokens,
2118
+ costEstimate
2119
+ };
2120
+ }
2121
+
2122
+ // src/cli/commands/ask.ts
2123
+ var CTX_DIR3 = ".ctx";
2124
+ var DB_FILENAME3 = "index.db";
2125
+ var SNIPPET_MAX_LENGTH2 = 200;
2126
+ var FALLBACK_NOTICE = "No LLM provider configured. Set CTX_GEMINI_KEY, CTX_OPENAI_KEY, or CTX_ANTHROPIC_KEY. Running basic search instead.";
2127
+ var PROVIDER_ENV_MAP = {
2128
+ gemini: "CTX_GEMINI_KEY",
2129
+ openai: "CTX_OPENAI_KEY",
2130
+ anthropic: "CTX_ANTHROPIC_KEY"
2131
+ };
2132
+ var PROVIDER_FACTORIES = {
2133
+ gemini: createGeminiProvider,
2134
+ openai: createOpenAIProvider,
2135
+ anthropic: createAnthropicProvider
2136
+ };
2137
+ var DETECTION_ORDER = ["gemini", "openai", "anthropic"];
2138
+ function detectProvider(explicit) {
2139
+ if (explicit) {
2140
+ const envVar = PROVIDER_ENV_MAP[explicit];
2141
+ const apiKey = envVar ? process.env[envVar] : void 0;
2142
+ if (!apiKey) return null;
2143
+ const factory = PROVIDER_FACTORIES[explicit];
2144
+ return factory ? factory(apiKey) : null;
2145
+ }
2146
+ for (const name of DETECTION_ORDER) {
2147
+ const envVar = PROVIDER_ENV_MAP[name];
2148
+ const apiKey = envVar ? process.env[envVar] : void 0;
2149
+ if (apiKey) {
2150
+ const factory = PROVIDER_FACTORIES[name];
2151
+ if (factory) return factory(apiKey);
2152
+ }
2153
+ }
2154
+ return null;
2155
+ }
2156
+ function truncateSnippet2(text) {
2157
+ const oneLine = text.replace(/\n/g, " ").replace(/\s+/g, " ").trim();
2158
+ if (oneLine.length <= SNIPPET_MAX_LENGTH2) return oneLine;
2159
+ return oneLine.slice(0, SNIPPET_MAX_LENGTH2) + "...";
2160
+ }
2161
+ function toOutputResult2(r) {
2162
+ return {
2163
+ file: r.filePath,
2164
+ lines: [r.lineStart, r.lineEnd],
2165
+ name: r.name,
2166
+ type: r.type,
2167
+ score: Math.round(r.score * 100) / 100,
2168
+ snippet: truncateSnippet2(r.text),
2169
+ language: r.language
2170
+ };
2171
+ }
2172
+ function formatTextOutput2(output) {
2173
+ const lines = [];
2174
+ if (output.fallback) {
2175
+ lines.push(FALLBACK_NOTICE);
2176
+ lines.push("");
2177
+ }
2178
+ if (output.interpretation) {
2179
+ lines.push(`Understanding: ${output.interpretation}`);
2180
+ lines.push("");
2181
+ }
2182
+ if (output.results.length === 0) {
2183
+ lines.push(`No results found for "${output.query}"`);
2184
+ } else {
2185
+ lines.push(`Found ${output.results.length} relevant location(s):`);
2186
+ lines.push("");
2187
+ for (let i = 0; i < output.results.length; i++) {
2188
+ const r = output.results[i];
2189
+ const nameLabel = r.name ? `${r.name} [${r.type}]` : `[${r.type}]`;
2190
+ lines.push(`${i + 1}. ${r.file}:${r.lines[0]}-${r.lines[1]} (score: ${r.score})`);
2191
+ lines.push(` ${nameLabel}`);
2192
+ lines.push(` ${r.snippet}`);
2193
+ lines.push("");
2194
+ }
2195
+ }
2196
+ if (output.explanation) {
2197
+ lines.push("Explanation:");
2198
+ lines.push(output.explanation);
2199
+ lines.push("");
2200
+ }
2201
+ lines.push("\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500");
2202
+ const cost = output.stats.costEstimate.toFixed(4);
2203
+ lines.push(
2204
+ `Tokens: ${output.stats.tokensUsed.toLocaleString()} | Cost: ~$${cost} | Strategies: ${output.stats.strategies.join(", ")}`
2205
+ );
2206
+ return lines.join("\n");
2207
+ }
2208
+ function createSearchExecutor(db) {
2209
+ return async (strategies, limit) => {
2210
+ const strategyResults = [];
2211
+ const fetchLimit = limit * 3;
2212
+ for (const plan of strategies) {
2213
+ const results = await executeStrategy2(db, plan, fetchLimit);
2214
+ if (results.length > 0) {
2215
+ strategyResults.push({
2216
+ strategy: plan.strategy,
2217
+ weight: plan.weight,
2218
+ results
2219
+ });
2220
+ }
2221
+ }
2222
+ return fusionMerge(strategyResults, limit);
2223
+ };
2224
+ }
2225
+ async function executeStrategy2(db, plan, limit) {
2226
+ switch (plan.strategy) {
2227
+ case "vector": {
2228
+ const embedder = await loadEmbedder2();
2229
+ return vectorSearch(db, embedder, plan.query, limit);
2230
+ }
2231
+ case "fts":
2232
+ return ftsSearch(db, plan.query, limit);
2233
+ case "ast":
2234
+ return astSearch(db, { name: plan.query }, limit);
2235
+ case "path":
2236
+ return pathSearch(db, plan.query, limit);
2237
+ case "dependency":
2238
+ return [];
2239
+ }
2240
+ }
2241
+ var embedderInstance2 = null;
2242
+ async function loadEmbedder2() {
2243
+ if (embedderInstance2) return embedderInstance2;
2244
+ embedderInstance2 = await createLocalEmbedder();
2245
+ return embedderInstance2;
2246
+ }
2247
+ async function fallbackSearch(db, query, limit) {
2248
+ const executor = createSearchExecutor(db);
2249
+ const fallbackStrategies = [
2250
+ { strategy: "fts", query, weight: 0.8, reason: "fallback keyword search" },
2251
+ { strategy: "ast", query, weight: 0.9, reason: "fallback structural search" }
2252
+ ];
2253
+ const results = await executor(fallbackStrategies, limit);
2254
+ return {
2255
+ query,
2256
+ interpretation: "",
2257
+ results: results.map(toOutputResult2),
2258
+ explanation: "",
2259
+ stats: {
2260
+ strategies: fallbackStrategies.map((s) => s.strategy),
2261
+ tokensUsed: 0,
2262
+ costEstimate: 0,
2263
+ totalResults: results.length
2264
+ },
2265
+ fallback: true
2266
+ };
2267
+ }
2268
+ async function runAsk(projectPath, query, options) {
2269
+ const absoluteRoot = path6.resolve(projectPath);
2270
+ const dbPath = path6.join(absoluteRoot, CTX_DIR3, DB_FILENAME3);
2271
+ if (!fs7.existsSync(dbPath)) {
2272
+ throw new KontextError(
2273
+ `Project not initialized. Run "ctx init" first. (${CTX_DIR3}/${DB_FILENAME3} not found)`,
2274
+ ErrorCode.NOT_INITIALIZED
2275
+ );
2276
+ }
2277
+ const db = createDatabase(dbPath);
2278
+ try {
2279
+ const provider = options.provider ?? null;
2280
+ if (!provider) {
2281
+ const output = await fallbackSearch(db, query, options.limit);
2282
+ if (options.format === "text") {
2283
+ output.text = formatTextOutput2(output);
2284
+ }
2285
+ return output;
2286
+ }
2287
+ const executor = createSearchExecutor(db);
2288
+ if (options.noExplain) {
2289
+ return await runNoExplain(provider, query, options, executor);
2290
+ }
2291
+ return await runWithSteering(provider, query, options, executor);
2292
+ } finally {
2293
+ db.close();
2294
+ }
2295
+ }
2296
+ async function runNoExplain(provider, query, options, executor) {
2297
+ const plan = await planSearch(provider, query);
2298
+ const results = await executor(plan.strategies, options.limit);
2299
+ const output = {
2300
+ query,
2301
+ interpretation: plan.interpretation,
2302
+ results: results.map(toOutputResult2),
2303
+ explanation: "",
2304
+ stats: {
2305
+ strategies: plan.strategies.map((s) => s.strategy),
2306
+ tokensUsed: 0,
2307
+ costEstimate: 0,
2308
+ totalResults: results.length
2309
+ }
2310
+ };
2311
+ if (options.format === "text") {
2312
+ output.text = formatTextOutput2(output);
2313
+ }
2314
+ return output;
2315
+ }
2316
+ async function runWithSteering(provider, query, options, executor) {
2317
+ const result = await steer(provider, query, options.limit, executor);
2318
+ const output = {
2319
+ query,
2320
+ interpretation: result.interpretation,
2321
+ results: result.results.map(toOutputResult2),
2322
+ explanation: result.explanation,
2323
+ stats: {
2324
+ strategies: result.strategies.map((s) => s.strategy),
2325
+ tokensUsed: result.tokensUsed,
2326
+ costEstimate: result.costEstimate,
2327
+ totalResults: result.results.length
2328
+ }
2329
+ };
2330
+ if (options.format === "text") {
2331
+ output.text = formatTextOutput2(output);
2332
+ }
2333
+ return output;
2334
+ }
2335
+ function registerAskCommand(program2) {
2336
+ program2.command("ask <query>").description("LLM-steered natural language code search").option("-l, --limit <n>", "Max results", "10").option("-p, --provider <name>", "LLM provider: gemini|openai|anthropic").option("-f, --format <fmt>", "Output format: json|text", "text").option("--no-explain", "Skip explanation, just return search results").action(async (query, opts) => {
2337
+ const projectPath = process.cwd();
2338
+ const verbose = program2.opts()["verbose"] === true;
2339
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
2340
+ const providerName = opts["provider"];
2341
+ const provider = detectProvider(providerName);
2342
+ try {
2343
+ const output = await runAsk(projectPath, query, {
2344
+ limit: parseInt(String(opts["limit"] ?? "10"), 10),
2345
+ format: opts["format"] ?? "text",
2346
+ provider: provider ?? void 0,
2347
+ noExplain: opts["explain"] === false
2348
+ });
2349
+ if (output.text) {
2350
+ console.log(output.text);
2351
+ } else {
2352
+ console.log(JSON.stringify(output, null, 2));
2353
+ }
2354
+ } catch (err) {
2355
+ const wrapped = err instanceof KontextError ? err : new SearchError(
2356
+ err instanceof Error ? err.message : String(err),
2357
+ ErrorCode.SEARCH_FAILED,
2358
+ err instanceof Error ? err : void 0
2359
+ );
2360
+ process.exitCode = handleCommandError(wrapped, logger, verbose);
2361
+ }
2362
+ });
2363
+ }
2364
+
2365
+ // src/cli/commands/find.ts
2366
+ function registerFindCommand(program2) {
2367
+ program2.command("find <query>").description("Natural language code search").option("--full", "Include source code in output").option("--json", "Machine-readable JSON output").option("--no-llm", "Skip steering LLM, raw vector search only").option("-l, --limit <n>", "Max results", "5").option("--language <lang>", "Filter by language").action((_query, _options) => {
2368
+ console.log("ctx find \u2014 not yet implemented");
2369
+ });
2370
+ }
2371
+
2372
+ // src/cli/commands/update.ts
2373
+ function registerUpdateCommand(program2) {
2374
+ program2.command("update").description("Incremental re-index of changed files").action(() => {
2375
+ console.log("ctx update \u2014 not yet implemented");
2376
+ });
2377
+ }
2378
+
2379
+ // src/cli/commands/watch.ts
2380
+ import fs8 from "fs";
2381
+ import path8 from "path";
2382
+
2383
+ // src/watcher/watcher.ts
2384
+ import { watch } from "chokidar";
2385
+ import path7 from "path";
2386
+ var DEFAULT_DEBOUNCE_MS = 500;
2387
+ var ALWAYS_IGNORED_DIRS = /* @__PURE__ */ new Set([
2388
+ "node_modules",
2389
+ ".git",
2390
+ ".ctx",
2391
+ "dist",
2392
+ "build",
2393
+ "__pycache__"
2394
+ ]);
2395
+ var WATCHED_EXTENSIONS = new Set(Object.keys(LANGUAGE_MAP));
2396
+ function isWatchedFile(filePath) {
2397
+ const ext = path7.extname(filePath).toLowerCase();
2398
+ return WATCHED_EXTENSIONS.has(ext);
2399
+ }
2400
+ function createWatcher(options, events) {
2401
+ const debounceMs = options.debounceMs ?? DEFAULT_DEBOUNCE_MS;
2402
+ const projectPath = path7.resolve(options.projectPath);
2403
+ const extraIgnored = new Set(options.ignored ?? []);
2404
+ function isIgnored(filePath) {
2405
+ const segments = filePath.split(path7.sep);
2406
+ for (const seg of segments) {
2407
+ if (ALWAYS_IGNORED_DIRS.has(seg)) return true;
2408
+ if (extraIgnored.has(seg)) return true;
2409
+ }
2410
+ return false;
2411
+ }
2412
+ let watcher = null;
2413
+ let pendingChanges = /* @__PURE__ */ new Map();
2414
+ let debounceTimer = null;
2415
+ function flush() {
2416
+ if (pendingChanges.size === 0) return;
2417
+ const batch = [...pendingChanges.values()];
2418
+ pendingChanges = /* @__PURE__ */ new Map();
2419
+ events.onChange(batch);
2420
+ }
2421
+ function scheduleFlush() {
2422
+ if (debounceTimer) clearTimeout(debounceTimer);
2423
+ debounceTimer = setTimeout(flush, debounceMs);
2424
+ }
2425
+ function handleEvent(type, filePath) {
2426
+ if (!isWatchedFile(filePath)) return;
2427
+ pendingChanges.set(filePath, { type, path: filePath });
2428
+ scheduleFlush();
2429
+ }
2430
+ return {
2431
+ start() {
2432
+ return new Promise((resolve) => {
2433
+ watcher = watch(".", {
2434
+ cwd: projectPath,
2435
+ ignored: (fp) => isIgnored(fp),
2436
+ ignoreInitial: true,
2437
+ persistent: true
2438
+ });
2439
+ watcher.on("add", (fp) => handleEvent("add", fp));
2440
+ watcher.on("change", (fp) => handleEvent("change", fp));
2441
+ watcher.on("unlink", (fp) => handleEvent("unlink", fp));
2442
+ watcher.on("error", (err) => {
2443
+ events.onError(err instanceof Error ? err : new Error(String(err)));
2444
+ });
2445
+ watcher.on("ready", () => resolve());
2446
+ });
2447
+ },
2448
+ async stop() {
2449
+ if (debounceTimer) {
2450
+ clearTimeout(debounceTimer);
2451
+ debounceTimer = null;
2452
+ }
2453
+ pendingChanges.clear();
2454
+ if (watcher) {
2455
+ await watcher.close();
2456
+ watcher = null;
2457
+ }
2458
+ }
2459
+ };
2460
+ }
2461
+
2462
+ // src/cli/commands/watch.ts
2463
+ var CTX_DIR4 = ".ctx";
2464
+ var DB_FILENAME4 = "index.db";
2465
+ function timestamp() {
2466
+ return (/* @__PURE__ */ new Date()).toLocaleTimeString("en-GB", { hour12: false });
2467
+ }
2468
+ function detectLanguage(filePath) {
2469
+ const ext = path8.extname(filePath).toLowerCase();
2470
+ return LANGUAGE_MAP[ext] ?? null;
2471
+ }
2472
+ function formatDuration2(ms) {
2473
+ if (ms < 1e3) return `${Math.round(ms)}ms`;
2474
+ return `${(ms / 1e3).toFixed(1)}s`;
2475
+ }
2476
+ async function hashFile(absolutePath) {
2477
+ const { createHash: createHash3 } = await import("crypto");
2478
+ const content = fs8.readFileSync(absolutePath);
2479
+ return createHash3("sha256").update(content).digest("hex");
2480
+ }
2481
+ async function reindexChanges(db, changes, projectPath, options) {
2482
+ const start = performance.now();
2483
+ const log = options.log;
2484
+ let filesProcessed = 0;
2485
+ let chunksUpdated = 0;
2486
+ const allChunksWithMeta = [];
2487
+ for (const change of changes) {
2488
+ const absolutePath = path8.join(projectPath, change.path);
2489
+ const language = detectLanguage(change.path);
2490
+ if (change.type === "unlink") {
2491
+ log(`[${timestamp()}] Deleted: ${change.path}`);
2492
+ const existingFile2 = db.getFile(change.path);
2493
+ if (existingFile2) {
2494
+ db.deleteFile(change.path);
2495
+ }
2496
+ filesProcessed++;
2497
+ continue;
2498
+ }
2499
+ if (!language) continue;
2500
+ if (!fs8.existsSync(absolutePath)) continue;
2501
+ const label = change.type === "add" ? "Added" : "Changed";
2502
+ log(`[${timestamp()}] ${label}: ${change.path}`);
2503
+ const existingFile = db.getFile(change.path);
2504
+ if (existingFile) {
2505
+ db.deleteChunksByFile(existingFile.id);
2506
+ }
2507
+ let nodes;
2508
+ try {
2509
+ nodes = await parseFile(absolutePath, language);
2510
+ } catch {
2511
+ log(`[${timestamp()}] \u26A0 Skipping ${change.path} (parse error)`);
2512
+ continue;
2513
+ }
2514
+ const chunks = chunkFile(nodes, change.path);
2515
+ const hash = await hashFile(absolutePath);
2516
+ const size = fs8.statSync(absolutePath).size;
2517
+ const fileId = db.upsertFile({
2518
+ path: change.path,
2519
+ language,
2520
+ hash,
2521
+ size
2522
+ });
2523
+ const chunkIds = db.insertChunks(
2524
+ fileId,
2525
+ chunks.map((c) => ({
2526
+ lineStart: c.lineStart,
2527
+ lineEnd: c.lineEnd,
2528
+ type: c.type,
2529
+ name: c.name,
2530
+ parent: c.parent,
2531
+ text: c.text,
2532
+ imports: c.imports,
2533
+ exports: c.exports,
2534
+ hash: c.hash
2535
+ }))
2536
+ );
2537
+ for (let i = 0; i < chunks.length; i++) {
2538
+ allChunksWithMeta.push({
2539
+ fileRelPath: change.path,
2540
+ chunk: { ...chunks[i], id: String(chunkIds[i]) }
2541
+ });
2542
+ }
2543
+ chunksUpdated += chunks.length;
2544
+ filesProcessed++;
2545
+ }
2546
+ if (!options.skipEmbedding && allChunksWithMeta.length > 0) {
2547
+ const embedder = await loadEmbedder3();
2548
+ const texts = allChunksWithMeta.map(
2549
+ (cm) => prepareChunkText(cm.fileRelPath, cm.chunk.parent, cm.chunk.text)
2550
+ );
2551
+ const vectors = await embedder.embed(texts);
2552
+ db.transaction(() => {
2553
+ for (let i = 0; i < allChunksWithMeta.length; i++) {
2554
+ const chunkDbId = parseInt(allChunksWithMeta[i].chunk.id, 10);
2555
+ db.insertVector(chunkDbId, vectors[i]);
2556
+ }
2557
+ });
2558
+ }
2559
+ const durationMs = performance.now() - start;
2560
+ return { filesProcessed, chunksUpdated, durationMs };
2561
+ }
2562
+ var embedderInstance3 = null;
2563
+ async function loadEmbedder3() {
2564
+ if (embedderInstance3) return embedderInstance3;
2565
+ embedderInstance3 = await createLocalEmbedder();
2566
+ return embedderInstance3;
2567
+ }
2568
+ async function runWatch(projectPath, options = {}) {
2569
+ const absoluteRoot = path8.resolve(projectPath);
2570
+ const dbPath = path8.join(absoluteRoot, CTX_DIR4, DB_FILENAME4);
2571
+ const log = options.log ?? console.log;
2572
+ if (options.init) {
2573
+ await runInit(absoluteRoot, { log, skipEmbedding: options.skipEmbedding });
2574
+ }
2575
+ if (!fs8.existsSync(dbPath)) {
2576
+ throw new KontextError(
2577
+ `Project not initialized. Run "ctx init" first or use --init flag. (${CTX_DIR4}/${DB_FILENAME4} not found)`,
2578
+ ErrorCode.NOT_INITIALIZED
2579
+ );
2580
+ }
2581
+ await initParser();
2582
+ const db = createDatabase(dbPath);
2583
+ let watcherHandle = null;
2584
+ const watcher = createWatcher(
2585
+ {
2586
+ projectPath: absoluteRoot,
2587
+ dbPath,
2588
+ debounceMs: options.debounceMs
2589
+ },
2590
+ {
2591
+ onChange: (changes) => {
2592
+ void (async () => {
2593
+ try {
2594
+ const result = await reindexChanges(db, changes, absoluteRoot, {
2595
+ skipEmbedding: options.skipEmbedding,
2596
+ log
2597
+ });
2598
+ if (result.filesProcessed > 0) {
2599
+ log(
2600
+ `[${timestamp()}] Re-indexed: ${result.filesProcessed} file(s), ${result.chunksUpdated} chunks updated (${formatDuration2(result.durationMs)})`
2601
+ );
2602
+ }
2603
+ } catch (err) {
2604
+ log(
2605
+ `[${timestamp()}] Error: ${err instanceof Error ? err.message : String(err)}`
2606
+ );
2607
+ }
2608
+ })();
2609
+ },
2610
+ onError: (err) => {
2611
+ log(`[${timestamp()}] Watcher error: ${err.message}`);
2612
+ }
2613
+ }
2614
+ );
2615
+ await watcher.start();
2616
+ watcherHandle = watcher;
2617
+ log(`Watching ${absoluteRoot} for changes...`);
2618
+ return {
2619
+ async stop() {
2620
+ if (watcherHandle) {
2621
+ await watcherHandle.stop();
2622
+ watcherHandle = null;
2623
+ }
2624
+ db.close();
2625
+ log("Stopped watching. Database saved.");
2626
+ }
2627
+ };
2628
+ }
2629
+ function registerWatchCommand(program2) {
2630
+ program2.command("watch [path]").description("Watch mode \u2014 re-index on file changes").option("--init", "Run init before starting watch").option("--debounce <ms>", "Debounce interval in ms", "500").option("--embed", "Enable embedding during watch (slower)").action(async (inputPath, opts) => {
2631
+ const projectPath = inputPath ?? process.cwd();
2632
+ const verbose = program2.opts()["verbose"] === true;
2633
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
2634
+ const skipEmbedding = opts["embed"] !== true;
2635
+ try {
2636
+ const handle = await runWatch(projectPath, {
2637
+ init: opts["init"] === true,
2638
+ debounceMs: parseInt(String(opts["debounce"] ?? "500"), 10),
2639
+ skipEmbedding
2640
+ });
2641
+ const shutdown = () => {
2642
+ void handle.stop().then(() => process.exit(0));
2643
+ };
2644
+ process.on("SIGINT", shutdown);
2645
+ process.on("SIGTERM", shutdown);
2646
+ } catch (err) {
2647
+ const wrapped = err instanceof KontextError ? err : new IndexError(
2648
+ err instanceof Error ? err.message : String(err),
2649
+ ErrorCode.WATCHER_FAILED,
2650
+ err instanceof Error ? err : void 0
2651
+ );
2652
+ process.exitCode = handleCommandError(wrapped, logger, verbose);
2653
+ }
2654
+ });
2655
+ }
2656
+
2657
+ // src/cli/commands/status.ts
2658
+ import fs9 from "fs";
2659
+ import path9 from "path";
2660
+ var CTX_DIR5 = ".ctx";
2661
+ var DB_FILENAME5 = "index.db";
2662
+ var CONFIG_FILENAME2 = "config.json";
2663
+ function formatBytes2(bytes) {
2664
+ if (bytes < 1024) return `${bytes} B`;
2665
+ if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
2666
+ return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
2667
+ }
2668
+ function formatTimestamp(raw) {
2669
+ const num = Number(raw);
2670
+ if (Number.isNaN(num)) return raw;
2671
+ const date = new Date(num);
2672
+ return date.toISOString().replace("T", " ").replace(/\.\d+Z$/, "");
2673
+ }
2674
+ function capitalize(s) {
2675
+ return s.charAt(0).toUpperCase() + s.slice(1);
2676
+ }
2677
+ function readConfig(ctxDir) {
2678
+ const configPath2 = path9.join(ctxDir, CONFIG_FILENAME2);
2679
+ if (!fs9.existsSync(configPath2)) return null;
2680
+ try {
2681
+ const raw = fs9.readFileSync(configPath2, "utf-8");
2682
+ const parsed = JSON.parse(raw);
2683
+ return {
2684
+ model: parsed.model ?? "unknown",
2685
+ dimensions: parsed.dimensions ?? 0
2686
+ };
2687
+ } catch {
2688
+ return null;
2689
+ }
2690
+ }
2691
+ function formatNotInitialized(projectPath) {
2692
+ return [
2693
+ `Kontext Status \u2014 ${projectPath}`,
2694
+ "",
2695
+ ' Not initialized. Run "ctx init" first.',
2696
+ ""
2697
+ ].join("\n");
2698
+ }
2699
+ function formatStatus(projectPath, output) {
2700
+ const lines = [
2701
+ `Kontext Status \u2014 ${projectPath}`,
2702
+ "",
2703
+ ` Initialized: Yes`,
2704
+ ` Database: ${CTX_DIR5}/${DB_FILENAME5} (${formatBytes2(output.dbSizeBytes)})`
2705
+ ];
2706
+ if (output.lastIndexed) {
2707
+ lines.push(` Last indexed: ${formatTimestamp(output.lastIndexed)}`);
2708
+ }
2709
+ lines.push("");
2710
+ lines.push(` Files: ${output.fileCount.toLocaleString()}`);
2711
+ lines.push(` Chunks: ${output.chunkCount.toLocaleString()}`);
2712
+ lines.push(` Vectors: ${output.vectorCount.toLocaleString()}`);
2713
+ if (output.languages.size > 0) {
2714
+ lines.push("");
2715
+ lines.push(" Languages:");
2716
+ const maxLangLen = Math.max(
2717
+ ...[...output.languages.keys()].map((k) => capitalize(k).length)
2718
+ );
2719
+ for (const [lang, count] of output.languages) {
2720
+ const label = capitalize(lang).padEnd(maxLangLen + 2);
2721
+ lines.push(` ${label}${count} file${count !== 1 ? "s" : ""}`);
2722
+ }
2723
+ }
2724
+ if (output.config) {
2725
+ lines.push("");
2726
+ lines.push(
2727
+ ` Embedder: local (${output.config.model}, ${output.config.dimensions} dims)`
2728
+ );
2729
+ }
2730
+ lines.push("");
2731
+ return lines.join("\n");
2732
+ }
2733
+ async function runStatus(projectPath) {
2734
+ const absoluteRoot = path9.resolve(projectPath);
2735
+ const ctxDir = path9.join(absoluteRoot, CTX_DIR5);
2736
+ const dbPath = path9.join(ctxDir, DB_FILENAME5);
2737
+ if (!fs9.existsSync(dbPath)) {
2738
+ const output = {
2739
+ initialized: false,
2740
+ fileCount: 0,
2741
+ chunkCount: 0,
2742
+ vectorCount: 0,
2743
+ dbSizeBytes: 0,
2744
+ lastIndexed: null,
2745
+ languages: /* @__PURE__ */ new Map(),
2746
+ config: null,
2747
+ text: formatNotInitialized(absoluteRoot)
2748
+ };
2749
+ return output;
2750
+ }
2751
+ const db = createDatabase(dbPath);
2752
+ try {
2753
+ const fileCount = db.getFileCount();
2754
+ const chunkCount = db.getChunkCount();
2755
+ const vectorCount = db.getVectorCount();
2756
+ const languages = db.getLanguageBreakdown();
2757
+ const lastIndexed = db.getLastIndexed();
2758
+ const config = readConfig(ctxDir);
2759
+ const dbSizeBytes = fs9.statSync(dbPath).size;
2760
+ const output = {
2761
+ initialized: true,
2762
+ fileCount,
2763
+ chunkCount,
2764
+ vectorCount,
2765
+ dbSizeBytes,
2766
+ lastIndexed,
2767
+ languages,
2768
+ config,
2769
+ text: ""
2770
+ };
2771
+ output.text = formatStatus(absoluteRoot, output);
2772
+ return output;
2773
+ } finally {
2774
+ db.close();
2775
+ }
2776
+ }
2777
+ function registerStatusCommand(program2) {
2778
+ program2.command("status [path]").description("Show index statistics").action(async (inputPath) => {
2779
+ const projectPath = inputPath ?? process.cwd();
2780
+ const verbose = program2.opts()["verbose"] === true;
2781
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
2782
+ try {
2783
+ const output = await runStatus(projectPath);
2784
+ console.log(output.text);
2785
+ } catch (err) {
2786
+ process.exitCode = handleCommandError(err, logger, verbose);
2787
+ }
2788
+ });
2789
+ }
2790
+
2791
+ // src/cli/commands/symbols.ts
2792
+ function registerSymbolsCommand(program2) {
2793
+ program2.command("symbols").description("List all indexed symbols").option("--type <type>", "Filter by symbol type (function, class, etc.)").action((_options) => {
2794
+ console.log("ctx symbols \u2014 not yet implemented");
2795
+ });
2796
+ }
2797
+
2798
+ // src/cli/commands/deps.ts
2799
+ function registerDepsCommand(program2) {
2800
+ program2.command("deps <file>").description("Show dependency graph for a file").action((_file) => {
2801
+ console.log("ctx deps \u2014 not yet implemented");
2802
+ });
2803
+ }
2804
+
2805
+ // src/cli/commands/chunk.ts
2806
+ function registerChunkCommand(program2) {
2807
+ program2.command("chunk <location>").description("Show the chunk containing a file:line location").action((_location) => {
2808
+ console.log("ctx chunk \u2014 not yet implemented");
2809
+ });
2810
+ }
2811
+
2812
+ // src/cli/commands/config.ts
2813
+ import fs10 from "fs";
2814
+ import path10 from "path";
2815
+ var CTX_DIR6 = ".ctx";
2816
+ var CONFIG_FILENAME3 = "config.json";
2817
+ var DEFAULT_CONFIG = {
2818
+ embedder: {
2819
+ provider: "local",
2820
+ model: "Xenova/all-MiniLM-L6-v2",
2821
+ dimensions: 384
2822
+ },
2823
+ search: {
2824
+ defaultLimit: 10,
2825
+ strategies: ["vector", "fts", "ast", "path"],
2826
+ weights: { vector: 1, fts: 0.8, ast: 0.9, path: 0.7, dependency: 0.6 }
2827
+ },
2828
+ watch: {
2829
+ debounceMs: 500,
2830
+ ignored: []
2831
+ },
2832
+ llm: {
2833
+ provider: null,
2834
+ model: null
2835
+ }
2836
+ };
2837
+ var VALID_EMBEDDER_PROVIDERS = /* @__PURE__ */ new Set(["local", "voyage", "openai"]);
2838
+ var VALID_LLM_PROVIDERS = /* @__PURE__ */ new Set(["gemini", "openai", "anthropic"]);
2839
+ var VALIDATION_RULES = {
2840
+ "embedder.provider": {
2841
+ validate: (v) => typeof v === "string" && VALID_EMBEDDER_PROVIDERS.has(v),
2842
+ message: `Must be one of: ${[...VALID_EMBEDDER_PROVIDERS].join(", ")}`
2843
+ },
2844
+ "embedder.dimensions": {
2845
+ validate: (v) => typeof v === "number" && v > 0 && Number.isInteger(v),
2846
+ message: "Must be a positive integer"
2847
+ },
2848
+ "search.defaultLimit": {
2849
+ validate: (v) => typeof v === "number" && v > 0 && Number.isInteger(v),
2850
+ message: "Must be a positive integer"
2851
+ },
2852
+ "watch.debounceMs": {
2853
+ validate: (v) => typeof v === "number" && v >= 0 && Number.isInteger(v),
2854
+ message: "Must be a non-negative integer"
2855
+ },
2856
+ "llm.provider": {
2857
+ validate: (v) => v === null || typeof v === "string" && VALID_LLM_PROVIDERS.has(v),
2858
+ message: `Must be null or one of: ${[...VALID_LLM_PROVIDERS].join(", ")}`
2859
+ }
2860
+ };
2861
+ function resolveCtxDir(projectPath) {
2862
+ const absoluteRoot = path10.resolve(projectPath);
2863
+ const ctxDir = path10.join(absoluteRoot, CTX_DIR6);
2864
+ if (!fs10.existsSync(ctxDir)) {
2865
+ throw new ConfigError(
2866
+ `Project not initialized. Run "ctx init" first. (${CTX_DIR6}/ not found)`,
2867
+ ErrorCode.NOT_INITIALIZED
2868
+ );
2869
+ }
2870
+ return ctxDir;
2871
+ }
2872
+ function configPath(ctxDir) {
2873
+ return path10.join(ctxDir, CONFIG_FILENAME3);
2874
+ }
2875
+ function readConfig2(ctxDir) {
2876
+ const filePath = configPath(ctxDir);
2877
+ if (!fs10.existsSync(filePath)) {
2878
+ writeConfig(ctxDir, DEFAULT_CONFIG);
2879
+ return structuredClone(DEFAULT_CONFIG);
2880
+ }
2881
+ const raw = fs10.readFileSync(filePath, "utf-8");
2882
+ const parsed = JSON.parse(raw);
2883
+ return mergeWithDefaults(parsed);
2884
+ }
2885
+ function writeConfig(ctxDir, config) {
2886
+ fs10.writeFileSync(
2887
+ configPath(ctxDir),
2888
+ JSON.stringify(config, null, 2) + "\n"
2889
+ );
2890
+ }
2891
+ function mergeWithDefaults(partial) {
2892
+ return {
2893
+ embedder: { ...DEFAULT_CONFIG.embedder, ...partial.embedder },
2894
+ search: {
2895
+ ...DEFAULT_CONFIG.search,
2896
+ ...partial.search,
2897
+ weights: { ...DEFAULT_CONFIG.search.weights, ...partial.search?.weights }
2898
+ },
2899
+ watch: { ...DEFAULT_CONFIG.watch, ...partial.watch },
2900
+ llm: { ...DEFAULT_CONFIG.llm, ...partial.llm }
2901
+ };
2902
+ }
2903
+ function getNestedValue(obj, key) {
2904
+ const parts = key.split(".");
2905
+ let current = obj;
2906
+ for (const part of parts) {
2907
+ if (current === null || current === void 0 || typeof current !== "object") {
2908
+ return void 0;
2909
+ }
2910
+ current = current[part];
2911
+ }
2912
+ return current;
2913
+ }
2914
+ function setNestedValue(obj, key, value) {
2915
+ const parts = key.split(".");
2916
+ let current = obj;
2917
+ for (let i = 0; i < parts.length - 1; i++) {
2918
+ const part = parts[i];
2919
+ if (typeof current[part] !== "object" || current[part] === null) {
2920
+ current[part] = {};
2921
+ }
2922
+ current = current[part];
2923
+ }
2924
+ current[parts[parts.length - 1]] = value;
2925
+ }
2926
+ function parseValue(rawValue) {
2927
+ if (rawValue === "null") return null;
2928
+ if (rawValue === "true") return true;
2929
+ if (rawValue === "false") return false;
2930
+ const num = Number(rawValue);
2931
+ if (!Number.isNaN(num) && rawValue.trim() !== "") return num;
2932
+ if (rawValue.startsWith("[") || rawValue.startsWith("{")) {
2933
+ try {
2934
+ return JSON.parse(rawValue);
2935
+ } catch {
2936
+ }
2937
+ }
2938
+ return rawValue;
2939
+ }
2940
+ function runConfigShow(projectPath) {
2941
+ const ctxDir = resolveCtxDir(projectPath);
2942
+ const config = readConfig2(ctxDir);
2943
+ return {
2944
+ config,
2945
+ text: JSON.stringify(config, null, 2)
2946
+ };
2947
+ }
2948
+ function runConfigGet(projectPath, key) {
2949
+ const ctxDir = resolveCtxDir(projectPath);
2950
+ const config = readConfig2(ctxDir);
2951
+ return getNestedValue(config, key);
2952
+ }
2953
+ function runConfigSet(projectPath, key, rawValue) {
2954
+ const ctxDir = resolveCtxDir(projectPath);
2955
+ const config = readConfig2(ctxDir);
2956
+ const value = parseValue(rawValue);
2957
+ const rule = VALIDATION_RULES[key];
2958
+ if (rule && !rule.validate(value)) {
2959
+ throw new ConfigError(`Invalid value for "${key}": ${rule.message}`, ErrorCode.CONFIG_INVALID);
2960
+ }
2961
+ setNestedValue(config, key, value);
2962
+ writeConfig(ctxDir, config);
2963
+ }
2964
+ function runConfigReset(projectPath) {
2965
+ const ctxDir = resolveCtxDir(projectPath);
2966
+ writeConfig(ctxDir, structuredClone(DEFAULT_CONFIG));
2967
+ }
2968
+ function registerConfigCommand(program2) {
2969
+ const cmd = program2.command("config").description("Show or modify configuration");
2970
+ function configErrorHandler(err) {
2971
+ const verbose = program2.opts()["verbose"] === true;
2972
+ const logger = createLogger({ level: verbose ? LogLevel.DEBUG : LogLevel.INFO });
2973
+ process.exitCode = handleCommandError(err, logger, verbose);
2974
+ }
2975
+ cmd.command("show").description("Show current configuration").action(() => {
2976
+ try {
2977
+ const output = runConfigShow(process.cwd());
2978
+ console.log(output.text);
2979
+ } catch (err) {
2980
+ configErrorHandler(err);
2981
+ }
2982
+ });
2983
+ cmd.command("get <key>").description("Get a configuration value (dot notation)").action((key) => {
2984
+ try {
2985
+ const value = runConfigGet(process.cwd(), key);
2986
+ console.log(
2987
+ typeof value === "object" ? JSON.stringify(value, null, 2) : String(value)
2988
+ );
2989
+ } catch (err) {
2990
+ configErrorHandler(err);
2991
+ }
2992
+ });
2993
+ cmd.command("set <key> <value>").description("Set a configuration value (dot notation)").action((key, value) => {
2994
+ try {
2995
+ runConfigSet(process.cwd(), key, value);
2996
+ console.log(`Set ${key} = ${value}`);
2997
+ } catch (err) {
2998
+ configErrorHandler(err);
2999
+ }
3000
+ });
3001
+ cmd.command("reset").description("Reset configuration to defaults").action(() => {
3002
+ try {
3003
+ runConfigReset(process.cwd());
3004
+ console.log("Configuration reset to defaults.");
3005
+ } catch (err) {
3006
+ configErrorHandler(err);
3007
+ }
3008
+ });
3009
+ }
3010
+
3011
+ // src/cli/commands/auth.ts
3012
+ function registerAuthCommand(program2) {
3013
+ program2.command("auth").description("Set API keys for LLM and embedding providers").action(() => {
3014
+ console.log("ctx auth \u2014 not yet implemented");
3015
+ });
3016
+ }
3017
+
3018
+ // src/cli/index.ts
3019
+ var program = new Command();
3020
+ program.name("ctx").description("Kontext \u2014 Context engine for AI coding agents").version("0.1.0").option("--verbose", "Enable verbose/debug output");
3021
+ registerInitCommand(program);
3022
+ registerQueryCommand(program);
3023
+ registerAskCommand(program);
3024
+ registerFindCommand(program);
3025
+ registerUpdateCommand(program);
3026
+ registerWatchCommand(program);
3027
+ registerStatusCommand(program);
3028
+ registerSymbolsCommand(program);
3029
+ registerDepsCommand(program);
3030
+ registerChunkCommand(program);
3031
+ registerConfigCommand(program);
3032
+ registerAuthCommand(program);
3033
+ program.parse();
3034
+ //# sourceMappingURL=index.js.map