@liendev/lien 0.13.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -106,59 +106,9 @@ var init_schema = __esm({
106
106
  }
107
107
  });
108
108
 
109
- // src/config/merge.ts
110
- function deepMergeConfig(defaults, user) {
111
- return {
112
- version: user.version ?? defaults.version,
113
- core: {
114
- ...defaults.core,
115
- ...user.core
116
- },
117
- chunking: {
118
- ...defaults.chunking,
119
- ...user.chunking
120
- },
121
- mcp: {
122
- ...defaults.mcp,
123
- ...user.mcp
124
- },
125
- gitDetection: {
126
- ...defaults.gitDetection,
127
- ...user.gitDetection
128
- },
129
- fileWatching: {
130
- ...defaults.fileWatching,
131
- ...user.fileWatching
132
- },
133
- frameworks: user.frameworks ?? defaults.frameworks
134
- };
135
- }
136
- function detectNewFields(before, after) {
137
- const newFields = [];
138
- for (const key of Object.keys(after)) {
139
- if (!(key in before)) {
140
- newFields.push(key);
141
- continue;
142
- }
143
- if (typeof after[key] === "object" && after[key] !== null && !Array.isArray(after[key])) {
144
- const beforeSection = before[key] || {};
145
- const afterSection = after[key];
146
- for (const nestedKey of Object.keys(afterSection)) {
147
- if (!(nestedKey in beforeSection)) {
148
- newFields.push(`${key}.${nestedKey}`);
149
- }
150
- }
151
- }
152
- }
153
- return newFields;
154
- }
155
- var init_merge = __esm({
156
- "src/config/merge.ts"() {
157
- "use strict";
158
- }
159
- });
160
-
161
109
  // src/config/migration.ts
110
+ import fs from "fs/promises";
111
+ import path from "path";
162
112
  function needsMigration(config) {
163
113
  if (!config) {
164
114
  return false;
@@ -211,7 +161,7 @@ function migrateConfig(oldConfig) {
211
161
  path: ".",
212
162
  enabled: true,
213
163
  config: {
214
- include: oldConfig.indexing.include ?? ["**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}"],
164
+ include: oldConfig.indexing.include ?? ["**/*.{ts,tsx,js,jsx,py,php,go,rs,java,c,cpp,cs}"],
215
165
  exclude: oldConfig.indexing.exclude ?? [
216
166
  "**/node_modules/**",
217
167
  "**/dist/**",
@@ -231,7 +181,7 @@ function migrateConfig(oldConfig) {
231
181
  path: ".",
232
182
  enabled: true,
233
183
  config: {
234
- include: ["**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}"],
184
+ include: ["**/*.{ts,tsx,js,jsx,py,php,go,rs,java,c,cpp,cs}"],
235
185
  exclude: [
236
186
  "**/node_modules/**",
237
187
  "**/dist/**",
@@ -248,6 +198,36 @@ function migrateConfig(oldConfig) {
248
198
  }
249
199
  return newConfig;
250
200
  }
201
+ async function migrateConfigFile(rootDir = process.cwd()) {
202
+ const configPath = path.join(rootDir, ".lien.config.json");
203
+ try {
204
+ const configContent = await fs.readFile(configPath, "utf-8");
205
+ const oldConfig = JSON.parse(configContent);
206
+ if (!needsMigration(oldConfig)) {
207
+ return {
208
+ migrated: false,
209
+ config: oldConfig
210
+ };
211
+ }
212
+ const newConfig = migrateConfig(oldConfig);
213
+ const backupPath = `${configPath}.v0.2.0.backup`;
214
+ await fs.copyFile(configPath, backupPath);
215
+ await fs.writeFile(configPath, JSON.stringify(newConfig, null, 2) + "\n", "utf-8");
216
+ return {
217
+ migrated: true,
218
+ backupPath,
219
+ config: newConfig
220
+ };
221
+ } catch (error) {
222
+ if (error.code === "ENOENT") {
223
+ return {
224
+ migrated: false,
225
+ config: defaultConfig
226
+ };
227
+ }
228
+ throw error;
229
+ }
230
+ }
251
231
  var init_migration = __esm({
252
232
  "src/config/migration.ts"() {
253
233
  "use strict";
@@ -256,6 +236,58 @@ var init_migration = __esm({
256
236
  }
257
237
  });
258
238
 
239
+ // src/config/merge.ts
240
+ function deepMergeConfig(defaults, user) {
241
+ return {
242
+ version: user.version ?? defaults.version,
243
+ core: {
244
+ ...defaults.core,
245
+ ...user.core
246
+ },
247
+ chunking: {
248
+ ...defaults.chunking,
249
+ ...user.chunking
250
+ },
251
+ mcp: {
252
+ ...defaults.mcp,
253
+ ...user.mcp
254
+ },
255
+ gitDetection: {
256
+ ...defaults.gitDetection,
257
+ ...user.gitDetection
258
+ },
259
+ fileWatching: {
260
+ ...defaults.fileWatching,
261
+ ...user.fileWatching
262
+ },
263
+ frameworks: user.frameworks ?? defaults.frameworks
264
+ };
265
+ }
266
+ function detectNewFields(before, after) {
267
+ const newFields = [];
268
+ for (const key of Object.keys(after)) {
269
+ if (!(key in before)) {
270
+ newFields.push(key);
271
+ continue;
272
+ }
273
+ if (typeof after[key] === "object" && after[key] !== null && !Array.isArray(after[key])) {
274
+ const beforeSection = before[key] || {};
275
+ const afterSection = after[key];
276
+ for (const nestedKey of Object.keys(afterSection)) {
277
+ if (!(nestedKey in beforeSection)) {
278
+ newFields.push(`${key}.${nestedKey}`);
279
+ }
280
+ }
281
+ }
282
+ }
283
+ return newFields;
284
+ }
285
+ var init_merge = __esm({
286
+ "src/config/merge.ts"() {
287
+ "use strict";
288
+ }
289
+ });
290
+
259
291
  // src/errors/codes.ts
260
292
  var init_codes = __esm({
261
293
  "src/errors/codes.ts"() {
@@ -346,8 +378,8 @@ var init_errors = __esm({
346
378
  });
347
379
 
348
380
  // src/config/service.ts
349
- import fs6 from "fs/promises";
350
- import path6 from "path";
381
+ import fs8 from "fs/promises";
382
+ import path8 from "path";
351
383
  var ConfigService, configService;
352
384
  var init_service = __esm({
353
385
  "src/config/service.ts"() {
@@ -369,13 +401,13 @@ var init_service = __esm({
369
401
  async load(rootDir = process.cwd()) {
370
402
  const configPath = this.getConfigPath(rootDir);
371
403
  try {
372
- const configContent = await fs6.readFile(configPath, "utf-8");
404
+ const configContent = await fs8.readFile(configPath, "utf-8");
373
405
  const userConfig = JSON.parse(configContent);
374
406
  if (this.needsMigration(userConfig)) {
375
407
  console.log("\u{1F504} Migrating config from v0.2.0 to v0.3.0...");
376
408
  const result = await this.migrate(rootDir);
377
409
  if (result.migrated && result.backupPath) {
378
- const backupFilename = path6.basename(result.backupPath);
410
+ const backupFilename = path8.basename(result.backupPath);
379
411
  console.log(`\u2705 Migration complete! Backup saved as ${backupFilename}`);
380
412
  console.log("\u{1F4DD} Your config now uses the framework-based structure.");
381
413
  }
@@ -431,7 +463,7 @@ ${validation.errors.join("\n")}`,
431
463
  }
432
464
  try {
433
465
  const configJson = JSON.stringify(config, null, 2) + "\n";
434
- await fs6.writeFile(configPath, configJson, "utf-8");
466
+ await fs8.writeFile(configPath, configJson, "utf-8");
435
467
  } catch (error) {
436
468
  throw wrapError(error, "Failed to save configuration", { path: configPath });
437
469
  }
@@ -445,7 +477,7 @@ ${validation.errors.join("\n")}`,
445
477
  async exists(rootDir = process.cwd()) {
446
478
  const configPath = this.getConfigPath(rootDir);
447
479
  try {
448
- await fs6.access(configPath);
480
+ await fs8.access(configPath);
449
481
  return true;
450
482
  } catch {
451
483
  return false;
@@ -462,7 +494,7 @@ ${validation.errors.join("\n")}`,
462
494
  async migrate(rootDir = process.cwd()) {
463
495
  const configPath = this.getConfigPath(rootDir);
464
496
  try {
465
- const configContent = await fs6.readFile(configPath, "utf-8");
497
+ const configContent = await fs8.readFile(configPath, "utf-8");
466
498
  const oldConfig = JSON.parse(configContent);
467
499
  if (!this.needsMigration(oldConfig)) {
468
500
  return {
@@ -480,7 +512,7 @@ ${validation.errors.join("\n")}`,
480
512
  );
481
513
  }
482
514
  const backupPath = `${configPath}.v0.2.0.backup`;
483
- await fs6.copyFile(configPath, backupPath);
515
+ await fs8.copyFile(configPath, backupPath);
484
516
  await this.save(rootDir, newConfig);
485
517
  return {
486
518
  migrated: true,
@@ -578,7 +610,7 @@ ${validation.errors.join("\n")}`,
578
610
  * Get the full path to the config file
579
611
  */
580
612
  getConfigPath(rootDir) {
581
- return path6.join(rootDir, _ConfigService.CONFIG_FILENAME);
613
+ return path8.join(rootDir, _ConfigService.CONFIG_FILENAME);
582
614
  }
583
615
  /**
584
616
  * Validate modern (v0.3.0+) configuration
@@ -742,7 +774,7 @@ ${validation.errors.join("\n")}`,
742
774
  errors.push(`frameworks[${index}] missing required field: path`);
743
775
  } else if (typeof fw.path !== "string") {
744
776
  errors.push(`frameworks[${index}].path must be a string`);
745
- } else if (path6.isAbsolute(fw.path)) {
777
+ } else if (path8.isAbsolute(fw.path)) {
746
778
  errors.push(`frameworks[${index}].path must be relative, got: ${fw.path}`);
747
779
  }
748
780
  if (fw.enabled === void 0) {
@@ -802,12 +834,12 @@ __export(utils_exports, {
802
834
  });
803
835
  import { exec } from "child_process";
804
836
  import { promisify } from "util";
805
- import fs7 from "fs/promises";
806
- import path7 from "path";
837
+ import fs9 from "fs/promises";
838
+ import path9 from "path";
807
839
  async function isGitRepo(rootDir) {
808
840
  try {
809
- const gitDir = path7.join(rootDir, ".git");
810
- await fs7.access(gitDir);
841
+ const gitDir = path9.join(rootDir, ".git");
842
+ await fs9.access(gitDir);
811
843
  return true;
812
844
  } catch {
813
845
  return false;
@@ -846,7 +878,7 @@ async function getChangedFiles(rootDir, fromRef, toRef) {
846
878
  // 10 second timeout for diffs
847
879
  }
848
880
  );
849
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
881
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
850
882
  return files;
851
883
  } catch (error) {
852
884
  throw new Error(`Failed to get changed files: ${error}`);
@@ -861,7 +893,7 @@ async function getChangedFilesInCommit(rootDir, commitSha) {
861
893
  timeout: 1e4
862
894
  }
863
895
  );
864
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
896
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
865
897
  return files;
866
898
  } catch (error) {
867
899
  throw new Error(`Failed to get changed files in commit: ${error}`);
@@ -876,7 +908,7 @@ async function getChangedFilesBetweenCommits(rootDir, fromCommit, toCommit) {
876
908
  timeout: 1e4
877
909
  }
878
910
  );
879
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
911
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
880
912
  return files;
881
913
  } catch (error) {
882
914
  throw new Error(`Failed to get changed files between commits: ${error}`);
@@ -899,21 +931,21 @@ var init_utils = __esm({
899
931
  });
900
932
 
901
933
  // src/vectordb/version.ts
902
- import fs8 from "fs/promises";
903
- import path8 from "path";
934
+ import fs10 from "fs/promises";
935
+ import path10 from "path";
904
936
  async function writeVersionFile(indexPath) {
905
937
  try {
906
- const versionFilePath = path8.join(indexPath, VERSION_FILE);
938
+ const versionFilePath = path10.join(indexPath, VERSION_FILE);
907
939
  const timestamp = Date.now().toString();
908
- await fs8.writeFile(versionFilePath, timestamp, "utf-8");
940
+ await fs10.writeFile(versionFilePath, timestamp, "utf-8");
909
941
  } catch (error) {
910
942
  console.error(`Warning: Failed to write version file: ${error}`);
911
943
  }
912
944
  }
913
945
  async function readVersionFile(indexPath) {
914
946
  try {
915
- const versionFilePath = path8.join(indexPath, VERSION_FILE);
916
- const content = await fs8.readFile(versionFilePath, "utf-8");
947
+ const versionFilePath = path10.join(indexPath, VERSION_FILE);
948
+ const content = await fs10.readFile(versionFilePath, "utf-8");
917
949
  const timestamp = parseInt(content.trim(), 10);
918
950
  return isNaN(timestamp) ? 0 : timestamp;
919
951
  } catch (error) {
@@ -931,8 +963,8 @@ var init_version2 = __esm({
931
963
  // src/indexer/scanner.ts
932
964
  import { glob } from "glob";
933
965
  import ignore from "ignore";
934
- import fs10 from "fs/promises";
935
- import path10 from "path";
966
+ import fs12 from "fs/promises";
967
+ import path12 from "path";
936
968
  async function scanCodebaseWithFrameworks(rootDir, config) {
937
969
  const allFiles = [];
938
970
  for (const framework of config.frameworks) {
@@ -945,16 +977,16 @@ async function scanCodebaseWithFrameworks(rootDir, config) {
945
977
  return allFiles;
946
978
  }
947
979
  async function scanFramework(rootDir, framework) {
948
- const frameworkPath = path10.join(rootDir, framework.path);
949
- const gitignorePath = path10.join(frameworkPath, ".gitignore");
980
+ const frameworkPath = path12.join(rootDir, framework.path);
981
+ const gitignorePath = path12.join(frameworkPath, ".gitignore");
950
982
  let ig = ignore();
951
983
  try {
952
- const gitignoreContent = await fs10.readFile(gitignorePath, "utf-8");
984
+ const gitignoreContent = await fs12.readFile(gitignorePath, "utf-8");
953
985
  ig = ignore().add(gitignoreContent);
954
986
  } catch (e) {
955
- const rootGitignorePath = path10.join(rootDir, ".gitignore");
987
+ const rootGitignorePath = path12.join(rootDir, ".gitignore");
956
988
  try {
957
- const gitignoreContent = await fs10.readFile(rootGitignorePath, "utf-8");
989
+ const gitignoreContent = await fs12.readFile(rootGitignorePath, "utf-8");
958
990
  ig = ignore().add(gitignoreContent);
959
991
  } catch (e2) {
960
992
  }
@@ -976,15 +1008,15 @@ async function scanFramework(rootDir, framework) {
976
1008
  }
977
1009
  const uniqueFiles = Array.from(new Set(allFiles));
978
1010
  return uniqueFiles.filter((file) => !ig.ignores(file)).map((file) => {
979
- return framework.path === "." ? file : path10.join(framework.path, file);
1011
+ return framework.path === "." ? file : path12.join(framework.path, file);
980
1012
  });
981
1013
  }
982
1014
  async function scanCodebase(options) {
983
1015
  const { rootDir, includePatterns = [], excludePatterns = [] } = options;
984
- const gitignorePath = path10.join(rootDir, ".gitignore");
1016
+ const gitignorePath = path12.join(rootDir, ".gitignore");
985
1017
  let ig = ignore();
986
1018
  try {
987
- const gitignoreContent = await fs10.readFile(gitignorePath, "utf-8");
1019
+ const gitignoreContent = await fs12.readFile(gitignorePath, "utf-8");
988
1020
  ig = ignore().add(gitignoreContent);
989
1021
  } catch (e) {
990
1022
  }
@@ -998,7 +1030,7 @@ async function scanCodebase(options) {
998
1030
  ".lien/**",
999
1031
  ...excludePatterns
1000
1032
  ]);
1001
- const patterns = includePatterns.length > 0 ? includePatterns : ["**/*.{ts,tsx,js,jsx,py,go,rs,java,cpp,c,h,md,mdx}"];
1033
+ const patterns = includePatterns.length > 0 ? includePatterns : ["**/*.{ts,tsx,js,jsx,py,php,go,rs,java,cpp,c,cs,h,md,mdx}"];
1002
1034
  const allFiles = [];
1003
1035
  for (const pattern of patterns) {
1004
1036
  const files = await glob(pattern, {
@@ -1011,12 +1043,12 @@ async function scanCodebase(options) {
1011
1043
  }
1012
1044
  const uniqueFiles = Array.from(new Set(allFiles));
1013
1045
  return uniqueFiles.filter((file) => {
1014
- const relativePath = path10.relative(rootDir, file);
1046
+ const relativePath = path12.relative(rootDir, file);
1015
1047
  return !ig.ignores(relativePath);
1016
1048
  });
1017
1049
  }
1018
1050
  function detectLanguage(filepath) {
1019
- const ext = path10.extname(filepath).toLowerCase();
1051
+ const ext = path12.extname(filepath).toLowerCase();
1020
1052
  const languageMap = {
1021
1053
  ".ts": "typescript",
1022
1054
  ".tsx": "typescript",
@@ -1355,6 +1387,8 @@ var init_symbol_extractor = __esm({
1355
1387
  import Parser from "tree-sitter";
1356
1388
  import TypeScript from "tree-sitter-typescript";
1357
1389
  import JavaScript from "tree-sitter-javascript";
1390
+ import PHPParser from "tree-sitter-php";
1391
+ import Python from "tree-sitter-python";
1358
1392
  import { extname } from "path";
1359
1393
  function getParser(language) {
1360
1394
  if (!parserCache.has(language)) {
@@ -1379,6 +1413,10 @@ function detectLanguage2(filePath) {
1379
1413
  case "mjs":
1380
1414
  case "cjs":
1381
1415
  return "javascript";
1416
+ case "php":
1417
+ return "php";
1418
+ case "py":
1419
+ return "python";
1382
1420
  default:
1383
1421
  return null;
1384
1422
  }
@@ -1411,86 +1449,116 @@ var init_parser = __esm({
1411
1449
  parserCache = /* @__PURE__ */ new Map();
1412
1450
  languageConfig = {
1413
1451
  typescript: TypeScript.typescript,
1414
- javascript: JavaScript
1415
- // Use proper JavaScript parser
1452
+ javascript: JavaScript,
1453
+ php: PHPParser.php,
1454
+ // Note: tree-sitter-php exports both 'php' (mixed HTML/PHP) and 'php_only'
1455
+ python: Python
1416
1456
  };
1417
1457
  }
1418
1458
  });
1419
1459
 
1420
1460
  // src/indexer/ast/symbols.ts
1421
- function extractSymbolInfo(node, content, parentClass) {
1422
- const type = node.type;
1423
- if (type === "function_declaration" || type === "function") {
1424
- const nameNode = node.childForFieldName("name");
1425
- if (!nameNode) return null;
1426
- return {
1427
- name: nameNode.text,
1428
- type: parentClass ? "method" : "function",
1429
- startLine: node.startPosition.row + 1,
1430
- endLine: node.endPosition.row + 1,
1431
- parentClass,
1432
- signature: extractSignature(node, content),
1433
- parameters: extractParameters(node, content),
1434
- returnType: extractReturnType(node, content),
1435
- complexity: calculateComplexity(node)
1436
- };
1437
- }
1438
- if (type === "arrow_function" || type === "function_expression") {
1439
- const parent = node.parent;
1440
- let name = "anonymous";
1441
- if (parent?.type === "variable_declarator") {
1442
- const nameNode = parent.childForFieldName("name");
1443
- name = nameNode?.text || "anonymous";
1444
- }
1445
- return {
1446
- name,
1447
- type: parentClass ? "method" : "function",
1448
- startLine: node.startPosition.row + 1,
1449
- endLine: node.endPosition.row + 1,
1450
- parentClass,
1451
- signature: extractSignature(node, content),
1452
- parameters: extractParameters(node, content),
1453
- complexity: calculateComplexity(node)
1454
- };
1455
- }
1456
- if (type === "method_definition") {
1457
- const nameNode = node.childForFieldName("name");
1458
- if (!nameNode) return null;
1459
- return {
1460
- name: nameNode.text,
1461
- type: "method",
1462
- startLine: node.startPosition.row + 1,
1463
- endLine: node.endPosition.row + 1,
1464
- parentClass,
1465
- signature: extractSignature(node, content),
1466
- parameters: extractParameters(node, content),
1467
- returnType: extractReturnType(node, content),
1468
- complexity: calculateComplexity(node)
1469
- };
1470
- }
1471
- if (type === "class_declaration") {
1472
- const nameNode = node.childForFieldName("name");
1473
- if (!nameNode) return null;
1474
- return {
1475
- name: nameNode.text,
1476
- type: "class",
1477
- startLine: node.startPosition.row + 1,
1478
- endLine: node.endPosition.row + 1,
1479
- signature: `class ${nameNode.text}`
1480
- };
1461
+ function extractFunctionInfo(node, content, parentClass) {
1462
+ const nameNode = node.childForFieldName("name");
1463
+ if (!nameNode) return null;
1464
+ return {
1465
+ name: nameNode.text,
1466
+ type: parentClass ? "method" : "function",
1467
+ startLine: node.startPosition.row + 1,
1468
+ endLine: node.endPosition.row + 1,
1469
+ parentClass,
1470
+ signature: extractSignature(node, content),
1471
+ parameters: extractParameters(node, content),
1472
+ returnType: extractReturnType(node, content),
1473
+ complexity: calculateComplexity(node)
1474
+ };
1475
+ }
1476
+ function extractArrowFunctionInfo(node, content, parentClass) {
1477
+ const parent = node.parent;
1478
+ let name = "anonymous";
1479
+ if (parent?.type === "variable_declarator") {
1480
+ const nameNode = parent.childForFieldName("name");
1481
+ name = nameNode?.text || "anonymous";
1481
1482
  }
1482
- if (type === "interface_declaration") {
1483
- const nameNode = node.childForFieldName("name");
1484
- if (!nameNode) return null;
1485
- return {
1486
- name: nameNode.text,
1487
- type: "interface",
1488
- startLine: node.startPosition.row + 1,
1489
- endLine: node.endPosition.row + 1,
1490
- signature: `interface ${nameNode.text}`
1491
- };
1483
+ return {
1484
+ name,
1485
+ type: parentClass ? "method" : "function",
1486
+ startLine: node.startPosition.row + 1,
1487
+ endLine: node.endPosition.row + 1,
1488
+ parentClass,
1489
+ signature: extractSignature(node, content),
1490
+ parameters: extractParameters(node, content),
1491
+ complexity: calculateComplexity(node)
1492
+ };
1493
+ }
1494
+ function extractMethodInfo(node, content, parentClass) {
1495
+ const nameNode = node.childForFieldName("name");
1496
+ if (!nameNode) return null;
1497
+ return {
1498
+ name: nameNode.text,
1499
+ type: "method",
1500
+ startLine: node.startPosition.row + 1,
1501
+ endLine: node.endPosition.row + 1,
1502
+ parentClass,
1503
+ signature: extractSignature(node, content),
1504
+ parameters: extractParameters(node, content),
1505
+ returnType: extractReturnType(node, content),
1506
+ complexity: calculateComplexity(node)
1507
+ };
1508
+ }
1509
+ function extractClassInfo(node, _content, _parentClass) {
1510
+ const nameNode = node.childForFieldName("name");
1511
+ if (!nameNode) return null;
1512
+ return {
1513
+ name: nameNode.text,
1514
+ type: "class",
1515
+ startLine: node.startPosition.row + 1,
1516
+ endLine: node.endPosition.row + 1,
1517
+ signature: `class ${nameNode.text}`
1518
+ };
1519
+ }
1520
+ function extractInterfaceInfo(node, _content, _parentClass) {
1521
+ const nameNode = node.childForFieldName("name");
1522
+ if (!nameNode) return null;
1523
+ return {
1524
+ name: nameNode.text,
1525
+ type: "interface",
1526
+ startLine: node.startPosition.row + 1,
1527
+ endLine: node.endPosition.row + 1,
1528
+ signature: `interface ${nameNode.text}`
1529
+ };
1530
+ }
1531
+ function extractPythonFunctionInfo(node, content, parentClass) {
1532
+ const nameNode = node.childForFieldName("name");
1533
+ if (!nameNode) return null;
1534
+ return {
1535
+ name: nameNode.text,
1536
+ type: parentClass ? "method" : "function",
1537
+ startLine: node.startPosition.row + 1,
1538
+ endLine: node.endPosition.row + 1,
1539
+ parentClass,
1540
+ signature: extractSignature(node, content),
1541
+ parameters: extractParameters(node, content),
1542
+ complexity: calculateComplexity(node)
1543
+ };
1544
+ }
1545
+ function extractPythonClassInfo(node, _content, _parentClass) {
1546
+ const nameNode = node.childForFieldName("name");
1547
+ if (!nameNode) return null;
1548
+ return {
1549
+ name: nameNode.text,
1550
+ type: "class",
1551
+ startLine: node.startPosition.row + 1,
1552
+ endLine: node.endPosition.row + 1,
1553
+ signature: `class ${nameNode.text}`
1554
+ };
1555
+ }
1556
+ function extractSymbolInfo(node, content, parentClass, language) {
1557
+ if (node.type === "function_definition" && language === "python") {
1558
+ return extractPythonFunctionInfo(node, content, parentClass);
1492
1559
  }
1493
- return null;
1560
+ const extractor = symbolExtractors[node.type];
1561
+ return extractor ? extractor(node, content, parentClass) : null;
1494
1562
  }
1495
1563
  function extractSignature(node, content) {
1496
1564
  const startLine = node.startPosition.row;
@@ -1527,19 +1595,39 @@ function extractReturnType(node, _content) {
1527
1595
  function calculateComplexity(node) {
1528
1596
  let complexity = 1;
1529
1597
  const decisionPoints = [
1598
+ // Common across languages (TypeScript/JavaScript/Python/PHP)
1530
1599
  "if_statement",
1600
+ // if conditions
1531
1601
  "while_statement",
1602
+ // while loops
1603
+ "for_statement",
1604
+ // for loops
1605
+ "switch_case",
1606
+ // switch/case statements
1607
+ "catch_clause",
1608
+ // try/catch error handling
1609
+ "ternary_expression",
1610
+ // Ternary operator (a ? b : c)
1611
+ "binary_expression",
1612
+ // For && and || logical operators
1613
+ // TypeScript/JavaScript specific
1532
1614
  "do_statement",
1533
1615
  // do...while loops
1534
- "for_statement",
1535
1616
  "for_in_statement",
1617
+ // for...in loops
1536
1618
  "for_of_statement",
1537
1619
  // for...of loops
1538
- "switch_case",
1539
- "catch_clause",
1540
- "ternary_expression",
1541
- "binary_expression"
1542
- // For && and ||
1620
+ // PHP specific
1621
+ "foreach_statement",
1622
+ // PHP foreach loops
1623
+ // Python specific
1624
+ "elif_clause",
1625
+ // Python elif (adds decision point)
1626
+ // Note: 'else_clause' is NOT a decision point (it's the default path)
1627
+ "except_clause",
1628
+ // Python except (try/except)
1629
+ "conditional_expression"
1630
+ // Python ternary (x if cond else y)
1543
1631
  ];
1544
1632
  function traverse(n) {
1545
1633
  if (decisionPoints.includes(n.type)) {
@@ -1568,7 +1656,13 @@ function extractImports(rootNode) {
1568
1656
  if (sourceNode) {
1569
1657
  const importPath = sourceNode.text.replace(/['"]/g, "");
1570
1658
  imports.push(importPath);
1659
+ } else {
1660
+ const importText = node.text.split("\n")[0];
1661
+ imports.push(importText);
1571
1662
  }
1663
+ } else if (node.type === "import_from_statement") {
1664
+ const importText = node.text.split("\n")[0];
1665
+ imports.push(importText);
1572
1666
  }
1573
1667
  if (node === rootNode) {
1574
1668
  for (let i = 0; i < node.namedChildCount; i++) {
@@ -1580,9 +1674,272 @@ function extractImports(rootNode) {
1580
1674
  traverse(rootNode);
1581
1675
  return imports;
1582
1676
  }
1677
+ var symbolExtractors;
1583
1678
  var init_symbols = __esm({
1584
1679
  "src/indexer/ast/symbols.ts"() {
1585
1680
  "use strict";
1681
+ symbolExtractors = {
1682
+ // TypeScript/JavaScript
1683
+ "function_declaration": extractFunctionInfo,
1684
+ "function": extractFunctionInfo,
1685
+ "arrow_function": extractArrowFunctionInfo,
1686
+ "function_expression": extractArrowFunctionInfo,
1687
+ "method_definition": extractMethodInfo,
1688
+ "class_declaration": extractClassInfo,
1689
+ "interface_declaration": extractInterfaceInfo,
1690
+ // PHP
1691
+ "function_definition": extractFunctionInfo,
1692
+ // PHP functions (Python handled via language check in extractSymbolInfo)
1693
+ "method_declaration": extractMethodInfo,
1694
+ // PHP methods
1695
+ // Python
1696
+ "async_function_definition": extractPythonFunctionInfo,
1697
+ // Python async functions
1698
+ "class_definition": extractPythonClassInfo
1699
+ // Python classes
1700
+ // Note: Python regular functions use 'function_definition' (same as PHP)
1701
+ // They are dispatched to extractPythonFunctionInfo via language check in extractSymbolInfo()
1702
+ };
1703
+ }
1704
+ });
1705
+
1706
+ // src/indexer/ast/traversers/typescript.ts
1707
+ var TypeScriptTraverser, JavaScriptTraverser;
1708
+ var init_typescript = __esm({
1709
+ "src/indexer/ast/traversers/typescript.ts"() {
1710
+ "use strict";
1711
+ TypeScriptTraverser = class {
1712
+ targetNodeTypes = [
1713
+ "function_declaration",
1714
+ "function",
1715
+ "interface_declaration",
1716
+ "method_definition",
1717
+ "lexical_declaration",
1718
+ // For const/let with arrow functions
1719
+ "variable_declaration"
1720
+ // For var with functions
1721
+ ];
1722
+ containerTypes = [
1723
+ "class_declaration"
1724
+ // We extract methods, not the class itself
1725
+ ];
1726
+ declarationTypes = [
1727
+ "lexical_declaration",
1728
+ // const/let
1729
+ "variable_declaration"
1730
+ // var
1731
+ ];
1732
+ functionTypes = [
1733
+ "arrow_function",
1734
+ "function_expression",
1735
+ "function"
1736
+ ];
1737
+ shouldExtractChildren(node) {
1738
+ return this.containerTypes.includes(node.type);
1739
+ }
1740
+ isDeclarationWithFunction(node) {
1741
+ return this.declarationTypes.includes(node.type);
1742
+ }
1743
+ getContainerBody(node) {
1744
+ if (node.type === "class_declaration") {
1745
+ return node.childForFieldName("body");
1746
+ }
1747
+ return null;
1748
+ }
1749
+ shouldTraverseChildren(node) {
1750
+ return node.type === "program" || node.type === "export_statement" || node.type === "class_body";
1751
+ }
1752
+ findParentContainerName(node) {
1753
+ let current = node.parent;
1754
+ while (current) {
1755
+ if (current.type === "class_declaration") {
1756
+ const nameNode = current.childForFieldName("name");
1757
+ return nameNode?.text;
1758
+ }
1759
+ current = current.parent;
1760
+ }
1761
+ return void 0;
1762
+ }
1763
+ /**
1764
+ * Check if a declaration node contains a function (arrow, function expression, etc.)
1765
+ */
1766
+ findFunctionInDeclaration(node) {
1767
+ const search2 = (n, depth) => {
1768
+ if (depth > 3) return null;
1769
+ if (this.functionTypes.includes(n.type)) {
1770
+ return n;
1771
+ }
1772
+ for (let i = 0; i < n.childCount; i++) {
1773
+ const child = n.child(i);
1774
+ if (child) {
1775
+ const result = search2(child, depth + 1);
1776
+ if (result) return result;
1777
+ }
1778
+ }
1779
+ return null;
1780
+ };
1781
+ const functionNode = search2(node, 0);
1782
+ return {
1783
+ hasFunction: functionNode !== null,
1784
+ functionNode
1785
+ };
1786
+ }
1787
+ };
1788
+ JavaScriptTraverser = class extends TypeScriptTraverser {
1789
+ };
1790
+ }
1791
+ });
1792
+
1793
+ // src/indexer/ast/traversers/php.ts
1794
+ var PHPTraverser;
1795
+ var init_php = __esm({
1796
+ "src/indexer/ast/traversers/php.ts"() {
1797
+ "use strict";
1798
+ PHPTraverser = class {
1799
+ targetNodeTypes = [
1800
+ "function_definition",
1801
+ // function foo() {}
1802
+ "method_declaration"
1803
+ // public function bar() {}
1804
+ ];
1805
+ containerTypes = [
1806
+ "class_declaration",
1807
+ // We extract methods, not the class itself
1808
+ "trait_declaration",
1809
+ // PHP traits
1810
+ "interface_declaration"
1811
+ // PHP interfaces (for interface methods)
1812
+ ];
1813
+ declarationTypes = [
1814
+ // PHP doesn't have arrow functions or const/let like JS
1815
+ // Functions are always defined with 'function' keyword
1816
+ ];
1817
+ functionTypes = [
1818
+ "function_definition",
1819
+ "method_declaration"
1820
+ ];
1821
+ shouldExtractChildren(node) {
1822
+ return this.containerTypes.includes(node.type);
1823
+ }
1824
+ isDeclarationWithFunction(_node) {
1825
+ return false;
1826
+ }
1827
+ getContainerBody(node) {
1828
+ if (node.type === "class_declaration" || node.type === "trait_declaration" || node.type === "interface_declaration") {
1829
+ return node.childForFieldName("body");
1830
+ }
1831
+ return null;
1832
+ }
1833
+ shouldTraverseChildren(node) {
1834
+ return node.type === "program" || // Top-level PHP file
1835
+ node.type === "php" || // PHP block
1836
+ node.type === "declaration_list";
1837
+ }
1838
+ findParentContainerName(node) {
1839
+ let current = node.parent;
1840
+ while (current) {
1841
+ if (current.type === "class_declaration" || current.type === "trait_declaration") {
1842
+ const nameNode = current.childForFieldName("name");
1843
+ return nameNode?.text;
1844
+ }
1845
+ current = current.parent;
1846
+ }
1847
+ return void 0;
1848
+ }
1849
+ findFunctionInDeclaration(_node) {
1850
+ return {
1851
+ hasFunction: false,
1852
+ functionNode: null
1853
+ };
1854
+ }
1855
+ };
1856
+ }
1857
+ });
1858
+
1859
+ // src/indexer/ast/traversers/python.ts
1860
+ var PythonTraverser;
1861
+ var init_python = __esm({
1862
+ "src/indexer/ast/traversers/python.ts"() {
1863
+ "use strict";
1864
+ PythonTraverser = class {
1865
+ targetNodeTypes = [
1866
+ "function_definition",
1867
+ "async_function_definition"
1868
+ ];
1869
+ containerTypes = [
1870
+ "class_definition"
1871
+ // We extract methods, not the class itself
1872
+ ];
1873
+ declarationTypes = [
1874
+ // Python doesn't have const/let/var declarations like JS/TS
1875
+ // Functions are always defined with 'def' or 'async def'
1876
+ ];
1877
+ functionTypes = [
1878
+ "function_definition",
1879
+ "async_function_definition"
1880
+ ];
1881
+ shouldExtractChildren(node) {
1882
+ return this.containerTypes.includes(node.type);
1883
+ }
1884
+ isDeclarationWithFunction(_node) {
1885
+ return false;
1886
+ }
1887
+ getContainerBody(node) {
1888
+ if (node.type === "class_definition") {
1889
+ return node.childForFieldName("body");
1890
+ }
1891
+ return null;
1892
+ }
1893
+ shouldTraverseChildren(node) {
1894
+ return node.type === "module" || // Top-level Python file
1895
+ node.type === "block";
1896
+ }
1897
+ findParentContainerName(node) {
1898
+ let current = node.parent;
1899
+ while (current) {
1900
+ if (current.type === "class_definition") {
1901
+ const nameNode = current.childForFieldName("name");
1902
+ return nameNode?.text;
1903
+ }
1904
+ current = current.parent;
1905
+ }
1906
+ return void 0;
1907
+ }
1908
+ /**
1909
+ * Python doesn't have this pattern (const x = () => {})
1910
+ * Functions are always defined with 'def' or 'async def'
1911
+ */
1912
+ findFunctionInDeclaration(_node) {
1913
+ return {
1914
+ hasFunction: false,
1915
+ functionNode: null
1916
+ };
1917
+ }
1918
+ };
1919
+ }
1920
+ });
1921
+
1922
+ // src/indexer/ast/traversers/index.ts
1923
+ function getTraverser(language) {
1924
+ const traverser = traverserRegistry[language];
1925
+ if (!traverser) {
1926
+ throw new Error(`No traverser available for language: ${language}`);
1927
+ }
1928
+ return traverser;
1929
+ }
1930
+ var traverserRegistry;
1931
+ var init_traversers = __esm({
1932
+ "src/indexer/ast/traversers/index.ts"() {
1933
+ "use strict";
1934
+ init_typescript();
1935
+ init_php();
1936
+ init_python();
1937
+ traverserRegistry = {
1938
+ typescript: new TypeScriptTraverser(),
1939
+ javascript: new JavaScriptTraverser(),
1940
+ php: new PHPTraverser(),
1941
+ python: new PythonTraverser()
1942
+ };
1586
1943
  }
1587
1944
  });
1588
1945
 
@@ -1600,21 +1957,19 @@ function chunkByAST(filepath, content, options = {}) {
1600
1957
  const chunks = [];
1601
1958
  const lines = content.split("\n");
1602
1959
  const rootNode = parseResult.tree.rootNode;
1960
+ const traverser = getTraverser(language);
1603
1961
  const fileImports = extractImports(rootNode);
1604
- const topLevelNodes = findTopLevelNodes(rootNode);
1962
+ const topLevelNodes = findTopLevelNodes(rootNode, traverser);
1605
1963
  for (const node of topLevelNodes) {
1606
1964
  let actualNode = node;
1607
- if (node.type === "lexical_declaration" || node.type === "variable_declaration") {
1608
- const funcNode = findActualFunctionNode(node);
1609
- if (funcNode) {
1610
- actualNode = funcNode;
1965
+ if (traverser.isDeclarationWithFunction(node)) {
1966
+ const declInfo = traverser.findFunctionInDeclaration(node);
1967
+ if (declInfo.functionNode) {
1968
+ actualNode = declInfo.functionNode;
1611
1969
  }
1612
1970
  }
1613
- let parentClassName;
1614
- if (actualNode.type === "method_definition") {
1615
- parentClassName = findParentClassName(actualNode);
1616
- }
1617
- const symbolInfo = extractSymbolInfo(actualNode, content, parentClassName);
1971
+ const parentClassName = traverser.findParentContainerName(actualNode);
1972
+ const symbolInfo = extractSymbolInfo(actualNode, content, parentClassName, language);
1618
1973
  const nodeContent = getNodeContent(node, lines);
1619
1974
  chunks.push(createChunk(filepath, node, nodeContent, symbolInfo, fileImports, language));
1620
1975
  }
@@ -1634,57 +1989,28 @@ function chunkByAST(filepath, content, options = {}) {
1634
1989
  chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
1635
1990
  return chunks;
1636
1991
  }
1637
- function findParentClassName(methodNode) {
1638
- let current = methodNode.parent;
1639
- while (current) {
1640
- if (current.type === "class_declaration") {
1641
- const nameNode = current.childForFieldName("name");
1642
- return nameNode?.text;
1643
- }
1644
- current = current.parent;
1645
- }
1646
- return void 0;
1647
- }
1648
- function findTopLevelNodes(rootNode) {
1992
+ function findTopLevelNodes(rootNode, traverser) {
1649
1993
  const nodes = [];
1650
- const targetTypes = [
1651
- "function_declaration",
1652
- "function",
1653
- // Note: 'class_declaration' is NOT included here - we extract methods individually
1654
- "interface_declaration",
1655
- "method_definition",
1656
- "lexical_declaration",
1657
- // For const/let with arrow functions
1658
- "variable_declaration"
1659
- // For var with functions
1660
- ];
1661
1994
  function traverse(node, depth) {
1662
- if ((node.type === "lexical_declaration" || node.type === "variable_declaration") && depth === 0) {
1663
- const hasFunction = findFunctionInDeclaration(node);
1664
- if (hasFunction) {
1995
+ if (traverser.isDeclarationWithFunction(node) && depth === 0) {
1996
+ const declInfo = traverser.findFunctionInDeclaration(node);
1997
+ if (declInfo.hasFunction) {
1665
1998
  nodes.push(node);
1666
1999
  return;
1667
2000
  }
1668
2001
  }
1669
- if (depth <= 1 && targetTypes.includes(node.type)) {
2002
+ if (depth <= 1 && traverser.targetNodeTypes.includes(node.type)) {
1670
2003
  nodes.push(node);
1671
2004
  return;
1672
2005
  }
1673
- if (node.type === "class_body") {
1674
- for (let i = 0; i < node.namedChildCount; i++) {
1675
- const child = node.namedChild(i);
1676
- if (child) traverse(child, depth);
1677
- }
1678
- return;
1679
- }
1680
- if (node.type === "class_declaration") {
1681
- const body = node.childForFieldName("body");
2006
+ if (traverser.shouldExtractChildren(node)) {
2007
+ const body = traverser.getContainerBody(node);
1682
2008
  if (body) {
1683
2009
  traverse(body, depth + 1);
1684
2010
  }
1685
2011
  return;
1686
2012
  }
1687
- if (node.type === "program" || node.type === "export_statement") {
2013
+ if (traverser.shouldTraverseChildren(node)) {
1688
2014
  for (let i = 0; i < node.namedChildCount; i++) {
1689
2015
  const child = node.namedChild(i);
1690
2016
  if (child) traverse(child, depth);
@@ -1694,41 +2020,6 @@ function findTopLevelNodes(rootNode) {
1694
2020
  traverse(rootNode, 0);
1695
2021
  return nodes;
1696
2022
  }
1697
- function findFunctionInDeclaration(node) {
1698
- const functionTypes = ["arrow_function", "function_expression", "function"];
1699
- function search(n, depth) {
1700
- if (depth > 3) return false;
1701
- if (functionTypes.includes(n.type)) {
1702
- return true;
1703
- }
1704
- for (let i = 0; i < n.childCount; i++) {
1705
- const child = n.child(i);
1706
- if (child && search(child, depth + 1)) {
1707
- return true;
1708
- }
1709
- }
1710
- return false;
1711
- }
1712
- return search(node, 0);
1713
- }
1714
- function findActualFunctionNode(node) {
1715
- const functionTypes = ["arrow_function", "function_expression", "function"];
1716
- function search(n, depth) {
1717
- if (depth > 3) return null;
1718
- if (functionTypes.includes(n.type)) {
1719
- return n;
1720
- }
1721
- for (let i = 0; i < n.childCount; i++) {
1722
- const child = n.child(i);
1723
- if (child) {
1724
- const result = search(child, depth + 1);
1725
- if (result) return result;
1726
- }
1727
- }
1728
- return null;
1729
- }
1730
- return search(node, 0);
1731
- }
1732
2023
  function getNodeContent(node, lines) {
1733
2024
  const startLine = node.startPosition.row;
1734
2025
  const endLine = node.endPosition.row;
@@ -1770,67 +2061,323 @@ function createChunk(filepath, node, content, symbolInfo, imports, language) {
1770
2061
  }
1771
2062
  };
1772
2063
  }
1773
- function extractUncoveredCode(lines, coveredRanges, filepath, minChunkSize, imports, language) {
1774
- const chunks = [];
2064
+ function findUncoveredRanges(coveredRanges, totalLines) {
2065
+ const uncoveredRanges = [];
1775
2066
  let currentStart = 0;
1776
- coveredRanges.sort((a, b) => a.start - b.start);
1777
- for (const range of coveredRanges) {
2067
+ const sortedRanges = [...coveredRanges].sort((a, b) => a.start - b.start);
2068
+ for (const range of sortedRanges) {
1778
2069
  if (currentStart < range.start) {
1779
- const uncoveredLines = lines.slice(currentStart, range.start);
1780
- const content = uncoveredLines.join("\n").trim();
1781
- if (content.length > 0 && uncoveredLines.length >= minChunkSize) {
1782
- chunks.push({
1783
- content,
1784
- metadata: {
1785
- file: filepath,
1786
- startLine: currentStart + 1,
1787
- endLine: range.start,
1788
- type: "block",
1789
- language,
1790
- // Empty symbols for uncovered code (imports, exports, etc.)
1791
- symbols: { functions: [], classes: [], interfaces: [] },
1792
- imports
1793
- }
1794
- });
1795
- }
2070
+ uncoveredRanges.push({
2071
+ start: currentStart,
2072
+ end: range.start - 1
2073
+ });
1796
2074
  }
1797
2075
  currentStart = range.end + 1;
1798
2076
  }
1799
- if (currentStart < lines.length) {
1800
- const uncoveredLines = lines.slice(currentStart);
1801
- const content = uncoveredLines.join("\n").trim();
1802
- if (content.length > 0 && uncoveredLines.length >= minChunkSize) {
2077
+ if (currentStart < totalLines) {
2078
+ uncoveredRanges.push({
2079
+ start: currentStart,
2080
+ end: totalLines - 1
2081
+ });
2082
+ }
2083
+ return uncoveredRanges;
2084
+ }
2085
+ function createChunkFromRange(range, lines, filepath, language, imports) {
2086
+ const uncoveredLines = lines.slice(range.start, range.end + 1);
2087
+ const content = uncoveredLines.join("\n").trim();
2088
+ return {
2089
+ content,
2090
+ metadata: {
2091
+ file: filepath,
2092
+ startLine: range.start + 1,
2093
+ endLine: range.end + 1,
2094
+ type: "block",
2095
+ language,
2096
+ // Empty symbols for uncovered code (imports, exports, etc.)
2097
+ symbols: { functions: [], classes: [], interfaces: [] },
2098
+ imports
2099
+ }
2100
+ };
2101
+ }
2102
+ function isValidChunk(chunk, minChunkSize) {
2103
+ const lineCount = chunk.metadata.endLine - chunk.metadata.startLine + 1;
2104
+ return chunk.content.length > 0 && lineCount >= minChunkSize;
2105
+ }
2106
+ function extractUncoveredCode(lines, coveredRanges, filepath, minChunkSize, imports, language) {
2107
+ const uncoveredRanges = findUncoveredRanges(coveredRanges, lines.length);
2108
+ return uncoveredRanges.map((range) => createChunkFromRange(range, lines, filepath, language, imports)).filter((chunk) => isValidChunk(chunk, minChunkSize));
2109
+ }
2110
+ function shouldUseAST(filepath) {
2111
+ return isASTSupported(filepath);
2112
+ }
2113
+ var init_chunker = __esm({
2114
+ "src/indexer/ast/chunker.ts"() {
2115
+ "use strict";
2116
+ init_parser();
2117
+ init_symbols();
2118
+ init_traversers();
2119
+ }
2120
+ });
2121
+
2122
+ // src/indexer/liquid-chunker.ts
2123
+ function extractSchemaName(schemaContent) {
2124
+ try {
2125
+ let jsonContent = schemaContent.replace(/\{%-?\s*schema\s*-?%\}/g, "").replace(/\{%-?\s*endschema\s*-?%\}/g, "").trim();
2126
+ const schema = JSON.parse(jsonContent);
2127
+ return typeof schema.name === "string" ? schema.name : void 0;
2128
+ } catch (error) {
2129
+ }
2130
+ return void 0;
2131
+ }
2132
+ function removeComments(content) {
2133
+ return content.replace(/\{%-?\s*comment\s*-?%\}[\s\S]*?\{%-?\s*endcomment\s*-?%\}/g, "");
2134
+ }
2135
+ function extractRenderTags(contentWithoutComments) {
2136
+ const dependencies = /* @__PURE__ */ new Set();
2137
+ const renderPattern = /\{%-?\s*render\s+['"]([^'"]+)['"]/g;
2138
+ let match;
2139
+ while ((match = renderPattern.exec(contentWithoutComments)) !== null) {
2140
+ dependencies.add(match[1]);
2141
+ }
2142
+ const includePattern = /\{%-?\s*include\s+['"]([^'"]+)['"]/g;
2143
+ while ((match = includePattern.exec(contentWithoutComments)) !== null) {
2144
+ dependencies.add(match[1]);
2145
+ }
2146
+ const sectionPattern = /\{%-?\s*section\s+['"]([^'"]+)['"]/g;
2147
+ while ((match = sectionPattern.exec(contentWithoutComments)) !== null) {
2148
+ dependencies.add(match[1]);
2149
+ }
2150
+ return Array.from(dependencies);
2151
+ }
2152
+ function findLiquidBlocks(content) {
2153
+ const lines = content.split("\n");
2154
+ const blocks = [];
2155
+ const blockPatterns = [
2156
+ { type: "schema", start: /\{%-?\s*schema\s*-?%\}/, end: /\{%-?\s*endschema\s*-?%\}/ },
2157
+ { type: "style", start: /\{%-?\s*style\s*-?%\}/, end: /\{%-?\s*endstyle\s*-?%\}/ },
2158
+ { type: "javascript", start: /\{%-?\s*javascript\s*-?%\}/, end: /\{%-?\s*endjavascript\s*-?%\}/ }
2159
+ ];
2160
+ for (const pattern of blockPatterns) {
2161
+ let searchStart = 0;
2162
+ while (searchStart < lines.length) {
2163
+ const startIdx = lines.findIndex(
2164
+ (line, idx) => idx >= searchStart && pattern.start.test(line)
2165
+ );
2166
+ if (startIdx === -1) break;
2167
+ const endIdx = lines.findIndex(
2168
+ (line, idx) => idx >= startIdx && pattern.end.test(line)
2169
+ );
2170
+ if (endIdx === -1) {
2171
+ break;
2172
+ }
2173
+ const blockContent = lines.slice(startIdx, endIdx + 1).join("\n");
2174
+ blocks.push({
2175
+ type: pattern.type,
2176
+ startLine: startIdx,
2177
+ endLine: endIdx,
2178
+ content: blockContent
2179
+ });
2180
+ searchStart = endIdx + 1;
2181
+ }
2182
+ }
2183
+ return blocks.sort((a, b) => a.startLine - b.startLine);
2184
+ }
2185
+ function chunkLiquidFile(filepath, content, chunkSize = 75, chunkOverlap = 10) {
2186
+ const lines = content.split("\n");
2187
+ const blocks = findLiquidBlocks(content);
2188
+ const chunks = [];
2189
+ const contentWithoutComments = removeComments(content);
2190
+ const linesWithoutComments = contentWithoutComments.split("\n");
2191
+ const coveredLines = /* @__PURE__ */ new Set();
2192
+ for (const block of blocks) {
2193
+ for (let i = block.startLine; i <= block.endLine; i++) {
2194
+ coveredLines.add(i);
2195
+ }
2196
+ let symbolName;
2197
+ if (block.type === "schema") {
2198
+ symbolName = extractSchemaName(block.content);
2199
+ }
2200
+ const blockContentWithoutComments = linesWithoutComments.slice(block.startLine, block.endLine + 1).join("\n");
2201
+ const imports = extractRenderTags(blockContentWithoutComments);
2202
+ const blockLineCount = block.endLine - block.startLine + 1;
2203
+ const maxBlockSize = chunkSize * 3;
2204
+ if (blockLineCount <= maxBlockSize) {
1803
2205
  chunks.push({
1804
- content,
2206
+ content: block.content,
1805
2207
  metadata: {
1806
2208
  file: filepath,
1807
- startLine: currentStart + 1,
1808
- endLine: lines.length,
2209
+ startLine: block.startLine + 1,
2210
+ // 1-indexed
2211
+ endLine: block.endLine + 1,
2212
+ language: "liquid",
1809
2213
  type: "block",
1810
- language,
1811
- // Empty symbols for uncovered code (imports, exports, etc.)
1812
- symbols: { functions: [], classes: [], interfaces: [] },
1813
- imports
2214
+ symbolName,
2215
+ symbolType: block.type,
2216
+ imports: imports.length > 0 ? imports : void 0
1814
2217
  }
1815
2218
  });
2219
+ } else {
2220
+ const blockLines = block.content.split("\n");
2221
+ for (let offset = 0; offset < blockLines.length; offset += chunkSize - chunkOverlap) {
2222
+ const endOffset = Math.min(offset + chunkSize, blockLines.length);
2223
+ const chunkContent = blockLines.slice(offset, endOffset).join("\n");
2224
+ if (chunkContent.trim().length > 0) {
2225
+ chunks.push({
2226
+ content: chunkContent,
2227
+ metadata: {
2228
+ file: filepath,
2229
+ startLine: block.startLine + offset + 1,
2230
+ // 1-indexed
2231
+ endLine: block.startLine + endOffset,
2232
+ // 1-indexed (endOffset already accounts for exclusivity)
2233
+ language: "liquid",
2234
+ type: "block",
2235
+ symbolName,
2236
+ // Preserve symbol name for all split chunks
2237
+ symbolType: block.type,
2238
+ imports: imports.length > 0 ? imports : void 0
2239
+ }
2240
+ });
2241
+ }
2242
+ if (endOffset >= blockLines.length) break;
2243
+ }
1816
2244
  }
1817
2245
  }
1818
- return chunks;
2246
+ let currentChunk = [];
2247
+ let chunkStartLine = 0;
2248
+ for (let i = 0; i < lines.length; i++) {
2249
+ if (coveredLines.has(i)) {
2250
+ if (currentChunk.length > 0) {
2251
+ const chunkContent = currentChunk.join("\n");
2252
+ if (chunkContent.trim().length > 0) {
2253
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i).join("\n");
2254
+ const imports = extractRenderTags(cleanedChunk);
2255
+ chunks.push({
2256
+ content: chunkContent,
2257
+ metadata: {
2258
+ file: filepath,
2259
+ startLine: chunkStartLine + 1,
2260
+ endLine: i,
2261
+ language: "liquid",
2262
+ type: "template",
2263
+ imports: imports.length > 0 ? imports : void 0
2264
+ }
2265
+ });
2266
+ }
2267
+ currentChunk = [];
2268
+ }
2269
+ continue;
2270
+ }
2271
+ if (currentChunk.length === 0) {
2272
+ chunkStartLine = i;
2273
+ }
2274
+ currentChunk.push(lines[i]);
2275
+ if (currentChunk.length >= chunkSize) {
2276
+ const chunkContent = currentChunk.join("\n");
2277
+ if (chunkContent.trim().length > 0) {
2278
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i + 1).join("\n");
2279
+ const imports = extractRenderTags(cleanedChunk);
2280
+ chunks.push({
2281
+ content: chunkContent,
2282
+ metadata: {
2283
+ file: filepath,
2284
+ startLine: chunkStartLine + 1,
2285
+ endLine: i + 1,
2286
+ language: "liquid",
2287
+ type: "template",
2288
+ imports: imports.length > 0 ? imports : void 0
2289
+ }
2290
+ });
2291
+ }
2292
+ currentChunk = currentChunk.slice(-chunkOverlap);
2293
+ chunkStartLine = Math.max(0, i + 1 - chunkOverlap);
2294
+ }
2295
+ }
2296
+ if (currentChunk.length > 0) {
2297
+ const chunkContent = currentChunk.join("\n");
2298
+ if (chunkContent.trim().length === 0) {
2299
+ return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
2300
+ }
2301
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, lines.length).join("\n");
2302
+ const imports = extractRenderTags(cleanedChunk);
2303
+ chunks.push({
2304
+ content: chunkContent,
2305
+ metadata: {
2306
+ file: filepath,
2307
+ startLine: chunkStartLine + 1,
2308
+ endLine: lines.length,
2309
+ language: "liquid",
2310
+ type: "template",
2311
+ imports: imports.length > 0 ? imports : void 0
2312
+ }
2313
+ });
2314
+ }
2315
+ return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
1819
2316
  }
1820
- function shouldUseAST(filepath) {
1821
- return isASTSupported(filepath);
2317
+ var init_liquid_chunker = __esm({
2318
+ "src/indexer/liquid-chunker.ts"() {
2319
+ "use strict";
2320
+ }
2321
+ });
2322
+
2323
+ // src/indexer/json-template-chunker.ts
2324
+ function extractSectionReferences(jsonContent) {
2325
+ try {
2326
+ const template = JSON.parse(jsonContent);
2327
+ const sectionTypes = /* @__PURE__ */ new Set();
2328
+ if (template.sections && typeof template.sections === "object") {
2329
+ for (const section of Object.values(template.sections)) {
2330
+ if (typeof section === "object" && section !== null && "type" in section && typeof section.type === "string") {
2331
+ sectionTypes.add(section.type);
2332
+ }
2333
+ }
2334
+ }
2335
+ return Array.from(sectionTypes);
2336
+ } catch (error) {
2337
+ console.warn(`[Lien] Failed to parse JSON template: ${error instanceof Error ? error.message : String(error)}`);
2338
+ return [];
2339
+ }
1822
2340
  }
1823
- var init_chunker = __esm({
1824
- "src/indexer/ast/chunker.ts"() {
2341
+ function extractTemplateName(filepath) {
2342
+ const match = filepath.match(/templates\/(.+)\.json$/);
2343
+ return match ? match[1] : void 0;
2344
+ }
2345
+ function chunkJSONTemplate(filepath, content) {
2346
+ if (content.trim().length === 0) {
2347
+ return [];
2348
+ }
2349
+ const lines = content.split("\n");
2350
+ const templateName = extractTemplateName(filepath);
2351
+ const sectionReferences = extractSectionReferences(content);
2352
+ return [{
2353
+ content,
2354
+ metadata: {
2355
+ file: filepath,
2356
+ startLine: 1,
2357
+ endLine: lines.length,
2358
+ language: "json",
2359
+ type: "template",
2360
+ symbolName: templateName,
2361
+ symbolType: "template",
2362
+ imports: sectionReferences.length > 0 ? sectionReferences : void 0
2363
+ }
2364
+ }];
2365
+ }
2366
+ var init_json_template_chunker = __esm({
2367
+ "src/indexer/json-template-chunker.ts"() {
1825
2368
  "use strict";
1826
- init_parser();
1827
- init_symbols();
1828
2369
  }
1829
2370
  });
1830
2371
 
1831
2372
  // src/indexer/chunker.ts
1832
2373
  function chunkFile(filepath, content, options = {}) {
1833
2374
  const { chunkSize = 75, chunkOverlap = 10, useAST = true, astFallback = "line-based" } = options;
2375
+ if (filepath.endsWith(".liquid")) {
2376
+ return chunkLiquidFile(filepath, content, chunkSize, chunkOverlap);
2377
+ }
2378
+ if (filepath.endsWith(".json") && /(?:^|\/)templates\//.test(filepath)) {
2379
+ return chunkJSONTemplate(filepath, content);
2380
+ }
1834
2381
  if (useAST && shouldUseAST(filepath)) {
1835
2382
  try {
1836
2383
  return chunkByAST(filepath, content, {
@@ -1884,6 +2431,8 @@ var init_chunker2 = __esm({
1884
2431
  init_scanner();
1885
2432
  init_symbol_extractor();
1886
2433
  init_chunker();
2434
+ init_liquid_chunker();
2435
+ init_json_template_chunker();
1887
2436
  }
1888
2437
  });
1889
2438
 
@@ -1994,18 +2543,11 @@ var init_intent_classifier = __esm({
1994
2543
  }
1995
2544
  });
1996
2545
 
1997
- // src/vectordb/lancedb.ts
1998
- var lancedb_exports = {};
1999
- __export(lancedb_exports, {
2000
- VectorDB: () => VectorDB
2001
- });
2002
- import * as lancedb from "vectordb";
2003
- import path11 from "path";
2004
- import os2 from "os";
2005
- import crypto2 from "crypto";
2546
+ // src/vectordb/query.ts
2547
+ import path13 from "path";
2006
2548
  function isDocumentationFile(filepath) {
2007
2549
  const lower = filepath.toLowerCase();
2008
- const filename = path11.basename(filepath).toLowerCase();
2550
+ const filename = path13.basename(filepath).toLowerCase();
2009
2551
  if (filename.startsWith("readme")) return true;
2010
2552
  if (filename.startsWith("changelog")) return true;
2011
2553
  if (filename.endsWith(".md") || filename.endsWith(".mdx") || filename.endsWith(".markdown")) {
@@ -2052,7 +2594,7 @@ function boostPathRelevance(query, filepath, baseScore) {
2052
2594
  return baseScore * boostFactor;
2053
2595
  }
2054
2596
  function boostFilenameRelevance(query, filepath, baseScore) {
2055
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2597
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
2056
2598
  const queryTokens = query.toLowerCase().split(/\s+/);
2057
2599
  let boostFactor = 1;
2058
2600
  for (const token of queryTokens) {
@@ -2067,7 +2609,7 @@ function boostFilenameRelevance(query, filepath, baseScore) {
2067
2609
  }
2068
2610
  function boostForLocationIntent(query, filepath, baseScore) {
2069
2611
  let score = baseScore;
2070
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2612
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
2071
2613
  const queryTokens = query.toLowerCase().split(/\s+/);
2072
2614
  for (const token of queryTokens) {
2073
2615
  if (token.length <= 2) continue;
@@ -2095,7 +2637,7 @@ function boostForConceptualIntent(query, filepath, baseScore) {
2095
2637
  if (isUtilityFile(filepath)) {
2096
2638
  score *= 1.05;
2097
2639
  }
2098
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2640
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
2099
2641
  const queryTokens = query.toLowerCase().split(/\s+/);
2100
2642
  for (const token of queryTokens) {
2101
2643
  if (token.length <= 2) continue;
@@ -2103,43 +2645,400 @@ function boostForConceptualIntent(query, filepath, baseScore) {
2103
2645
  score *= 0.9;
2104
2646
  }
2105
2647
  }
2106
- const pathSegments = filepath.toLowerCase().split(path11.sep);
2107
- for (const token of queryTokens) {
2108
- if (token.length <= 2) continue;
2109
- for (const segment of pathSegments) {
2110
- if (segment.includes(token)) {
2111
- score *= 0.95;
2112
- break;
2113
- }
2648
+ const pathSegments = filepath.toLowerCase().split(path13.sep);
2649
+ for (const token of queryTokens) {
2650
+ if (token.length <= 2) continue;
2651
+ for (const segment of pathSegments) {
2652
+ if (segment.includes(token)) {
2653
+ score *= 0.95;
2654
+ break;
2655
+ }
2656
+ }
2657
+ }
2658
+ return score;
2659
+ }
2660
+ function boostForImplementationIntent(query, filepath, baseScore) {
2661
+ let score = baseScore;
2662
+ score = boostFilenameRelevance(query, filepath, score);
2663
+ score = boostPathRelevance(query, filepath, score);
2664
+ if (isTestFile(filepath)) {
2665
+ score *= 0.9;
2666
+ }
2667
+ return score;
2668
+ }
2669
+ function applyRelevanceBoosting(query, filepath, baseScore) {
2670
+ if (!query) {
2671
+ return baseScore;
2672
+ }
2673
+ const intent = classifyQueryIntent(query);
2674
+ switch (intent) {
2675
+ case "location" /* LOCATION */:
2676
+ return boostForLocationIntent(query, filepath, baseScore);
2677
+ case "conceptual" /* CONCEPTUAL */:
2678
+ return boostForConceptualIntent(query, filepath, baseScore);
2679
+ case "implementation" /* IMPLEMENTATION */:
2680
+ return boostForImplementationIntent(query, filepath, baseScore);
2681
+ default:
2682
+ return boostForImplementationIntent(query, filepath, baseScore);
2683
+ }
2684
+ }
2685
+ function dbRecordToSearchResult(r, query) {
2686
+ const baseScore = r._distance ?? 0;
2687
+ const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
2688
+ return {
2689
+ content: r.content,
2690
+ metadata: {
2691
+ file: r.file,
2692
+ startLine: r.startLine,
2693
+ endLine: r.endLine,
2694
+ type: r.type,
2695
+ language: r.language,
2696
+ // AST-derived metadata (v0.13.0)
2697
+ symbolName: r.symbolName || void 0,
2698
+ symbolType: r.symbolType,
2699
+ parentClass: r.parentClass || void 0,
2700
+ complexity: r.complexity || void 0,
2701
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2702
+ signature: r.signature || void 0,
2703
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2704
+ },
2705
+ score: boostedScore,
2706
+ relevance: calculateRelevance(boostedScore)
2707
+ };
2708
+ }
2709
+ async function search(table, queryVector, limit = 5, query) {
2710
+ if (!table) {
2711
+ throw new DatabaseError("Vector database not initialized");
2712
+ }
2713
+ try {
2714
+ const results = await table.search(Array.from(queryVector)).limit(limit + 20).execute();
2715
+ const filtered = results.filter(
2716
+ (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2717
+ ).map((r) => dbRecordToSearchResult(r, query)).sort((a, b) => a.score - b.score).slice(0, limit);
2718
+ return filtered;
2719
+ } catch (error) {
2720
+ const errorMsg = String(error);
2721
+ if (errorMsg.includes("Not found:") || errorMsg.includes(".lance")) {
2722
+ throw new DatabaseError(
2723
+ `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,
2724
+ { originalError: error }
2725
+ );
2726
+ }
2727
+ throw wrapError(error, "Failed to search vector database");
2728
+ }
2729
+ }
2730
+ async function scanWithFilter(table, options) {
2731
+ if (!table) {
2732
+ throw new DatabaseError("Vector database not initialized");
2733
+ }
2734
+ const { language, pattern, limit = 100 } = options;
2735
+ try {
2736
+ const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2737
+ const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 5, 200));
2738
+ const results = await query.execute();
2739
+ let filtered = results.filter(
2740
+ (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2741
+ );
2742
+ if (language) {
2743
+ filtered = filtered.filter(
2744
+ (r) => r.language && r.language.toLowerCase() === language.toLowerCase()
2745
+ );
2746
+ }
2747
+ if (pattern) {
2748
+ const regex = new RegExp(pattern, "i");
2749
+ filtered = filtered.filter(
2750
+ (r) => regex.test(r.content) || regex.test(r.file)
2751
+ );
2752
+ }
2753
+ return filtered.slice(0, limit).map((r) => {
2754
+ const score = 0;
2755
+ return {
2756
+ content: r.content,
2757
+ metadata: {
2758
+ file: r.file,
2759
+ startLine: r.startLine,
2760
+ endLine: r.endLine,
2761
+ type: r.type,
2762
+ language: r.language,
2763
+ // AST-derived metadata (v0.13.0)
2764
+ symbolName: r.symbolName || void 0,
2765
+ symbolType: r.symbolType,
2766
+ parentClass: r.parentClass || void 0,
2767
+ complexity: r.complexity || void 0,
2768
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2769
+ signature: r.signature || void 0,
2770
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2771
+ },
2772
+ score,
2773
+ relevance: calculateRelevance(score)
2774
+ };
2775
+ });
2776
+ } catch (error) {
2777
+ throw wrapError(error, "Failed to scan with filter");
2778
+ }
2779
+ }
2780
+ function matchesSymbolType(record, symbolType, symbols) {
2781
+ if (record.symbolType) {
2782
+ if (symbolType === "function") {
2783
+ return record.symbolType === "function" || record.symbolType === "method";
2784
+ } else if (symbolType === "class") {
2785
+ return record.symbolType === "class";
2786
+ } else if (symbolType === "interface") {
2787
+ return record.symbolType === "interface";
2788
+ }
2789
+ return false;
2790
+ }
2791
+ return symbols.length > 0 && symbols.some((s) => s.length > 0 && s !== "");
2792
+ }
2793
+ async function querySymbols(table, options) {
2794
+ if (!table) {
2795
+ throw new DatabaseError("Vector database not initialized");
2796
+ }
2797
+ const { language, pattern, symbolType, limit = 50 } = options;
2798
+ try {
2799
+ const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2800
+ const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 10, 500));
2801
+ const results = await query.execute();
2802
+ let filtered = results.filter((r) => {
2803
+ if (!r.content || r.content.trim().length === 0) {
2804
+ return false;
2805
+ }
2806
+ if (!r.file || r.file.length === 0) {
2807
+ return false;
2808
+ }
2809
+ if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
2810
+ return false;
2811
+ }
2812
+ const symbols = symbolType === "function" ? r.functionNames || [] : symbolType === "class" ? r.classNames || [] : symbolType === "interface" ? r.interfaceNames || [] : [...r.functionNames || [], ...r.classNames || [], ...r.interfaceNames || []];
2813
+ const astSymbolName = r.symbolName || "";
2814
+ if (symbols.length === 0 && !astSymbolName) {
2815
+ return false;
2816
+ }
2817
+ if (pattern) {
2818
+ const regex = new RegExp(pattern, "i");
2819
+ const matchesOldSymbols = symbols.some((s) => regex.test(s));
2820
+ const matchesASTSymbol = regex.test(astSymbolName);
2821
+ const nameMatches = matchesOldSymbols || matchesASTSymbol;
2822
+ if (!nameMatches) return false;
2823
+ if (symbolType) {
2824
+ return matchesSymbolType(r, symbolType, symbols);
2825
+ }
2826
+ return nameMatches;
2827
+ }
2828
+ if (symbolType) {
2829
+ return matchesSymbolType(r, symbolType, symbols);
2830
+ }
2831
+ return true;
2832
+ });
2833
+ return filtered.slice(0, limit).map((r) => {
2834
+ const score = 0;
2835
+ return {
2836
+ content: r.content,
2837
+ metadata: {
2838
+ file: r.file,
2839
+ startLine: r.startLine,
2840
+ endLine: r.endLine,
2841
+ type: r.type,
2842
+ language: r.language,
2843
+ symbols: {
2844
+ functions: r.functionNames && r.functionNames.length > 0 && r.functionNames[0] !== "" ? r.functionNames : [],
2845
+ classes: r.classNames && r.classNames.length > 0 && r.classNames[0] !== "" ? r.classNames : [],
2846
+ interfaces: r.interfaceNames && r.interfaceNames.length > 0 && r.interfaceNames[0] !== "" ? r.interfaceNames : []
2847
+ },
2848
+ // AST-derived metadata (v0.13.0)
2849
+ symbolName: r.symbolName || void 0,
2850
+ symbolType: r.symbolType,
2851
+ parentClass: r.parentClass || void 0,
2852
+ complexity: r.complexity || void 0,
2853
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2854
+ signature: r.signature || void 0,
2855
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2856
+ },
2857
+ score,
2858
+ relevance: calculateRelevance(score)
2859
+ };
2860
+ });
2861
+ } catch (error) {
2862
+ throw wrapError(error, "Failed to query symbols");
2863
+ }
2864
+ }
2865
+ var init_query = __esm({
2866
+ "src/vectordb/query.ts"() {
2867
+ "use strict";
2868
+ init_types();
2869
+ init_errors();
2870
+ init_relevance();
2871
+ init_intent_classifier();
2872
+ }
2873
+ });
2874
+
2875
+ // src/vectordb/batch-insert.ts
2876
+ async function insertBatch(db, table, tableName, vectors, metadatas, contents) {
2877
+ if (!db) {
2878
+ throw new DatabaseError("Vector database not initialized");
2879
+ }
2880
+ if (vectors.length !== metadatas.length || vectors.length !== contents.length) {
2881
+ throw new DatabaseError("Vectors, metadatas, and contents arrays must have the same length", {
2882
+ vectorsLength: vectors.length,
2883
+ metadatasLength: metadatas.length,
2884
+ contentsLength: contents.length
2885
+ });
2886
+ }
2887
+ if (vectors.length === 0) {
2888
+ return table;
2889
+ }
2890
+ if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {
2891
+ let currentTable = table;
2892
+ for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {
2893
+ const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2894
+ const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2895
+ const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2896
+ currentTable = await insertBatchInternal(db, currentTable, tableName, batchVectors, batchMetadata, batchContents);
2897
+ }
2898
+ if (!currentTable) {
2899
+ throw new DatabaseError("Failed to create table during batch insert");
2900
+ }
2901
+ return currentTable;
2902
+ } else {
2903
+ return insertBatchInternal(db, table, tableName, vectors, metadatas, contents);
2904
+ }
2905
+ }
2906
+ async function insertBatchInternal(db, table, tableName, vectors, metadatas, contents) {
2907
+ const queue = [{ vectors, metadatas, contents }];
2908
+ const failedRecords = [];
2909
+ let currentTable = table;
2910
+ while (queue.length > 0) {
2911
+ const batch = queue.shift();
2912
+ if (!batch) break;
2913
+ try {
2914
+ const records = batch.vectors.map((vector, i) => ({
2915
+ vector: Array.from(vector),
2916
+ content: batch.contents[i],
2917
+ file: batch.metadatas[i].file,
2918
+ startLine: batch.metadatas[i].startLine,
2919
+ endLine: batch.metadatas[i].endLine,
2920
+ type: batch.metadatas[i].type,
2921
+ language: batch.metadatas[i].language,
2922
+ // Ensure arrays have at least empty string for Arrow type inference
2923
+ functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
2924
+ classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
2925
+ interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""],
2926
+ // AST-derived metadata (v0.13.0)
2927
+ symbolName: batch.metadatas[i].symbolName || "",
2928
+ symbolType: batch.metadatas[i].symbolType || "",
2929
+ parentClass: batch.metadatas[i].parentClass || "",
2930
+ complexity: batch.metadatas[i].complexity || 0,
2931
+ parameters: batch.metadatas[i].parameters && batch.metadatas[i].parameters.length > 0 ? batch.metadatas[i].parameters : [""],
2932
+ signature: batch.metadatas[i].signature || "",
2933
+ imports: batch.metadatas[i].imports && batch.metadatas[i].imports.length > 0 ? batch.metadatas[i].imports : [""]
2934
+ }));
2935
+ if (!currentTable) {
2936
+ currentTable = await db.createTable(tableName, records);
2937
+ } else {
2938
+ await currentTable.add(records);
2939
+ }
2940
+ } catch (error) {
2941
+ if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
2942
+ const half = Math.floor(batch.vectors.length / 2);
2943
+ queue.push({
2944
+ vectors: batch.vectors.slice(0, half),
2945
+ metadatas: batch.metadatas.slice(0, half),
2946
+ contents: batch.contents.slice(0, half)
2947
+ });
2948
+ queue.push({
2949
+ vectors: batch.vectors.slice(half),
2950
+ metadatas: batch.metadatas.slice(half),
2951
+ contents: batch.contents.slice(half)
2952
+ });
2953
+ } else {
2954
+ failedRecords.push(batch);
2955
+ }
2956
+ }
2957
+ }
2958
+ if (failedRecords.length > 0) {
2959
+ const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
2960
+ throw new DatabaseError(
2961
+ `Failed to insert ${totalFailed} record(s) after retry attempts`,
2962
+ {
2963
+ failedBatches: failedRecords.length,
2964
+ totalRecords: totalFailed,
2965
+ sampleFile: failedRecords[0].metadatas[0].file
2966
+ }
2967
+ );
2968
+ }
2969
+ if (!currentTable) {
2970
+ throw new DatabaseError("Failed to create table during batch insert");
2971
+ }
2972
+ return currentTable;
2973
+ }
2974
+ var init_batch_insert = __esm({
2975
+ "src/vectordb/batch-insert.ts"() {
2976
+ "use strict";
2977
+ init_errors();
2978
+ init_constants();
2979
+ }
2980
+ });
2981
+
2982
+ // src/vectordb/maintenance.ts
2983
+ async function clear(db, table, tableName) {
2984
+ if (!db) {
2985
+ throw new DatabaseError("Vector database not initialized");
2986
+ }
2987
+ try {
2988
+ if (table) {
2989
+ await db.dropTable(tableName);
2114
2990
  }
2991
+ } catch (error) {
2992
+ throw wrapError(error, "Failed to clear vector database");
2115
2993
  }
2116
- return score;
2117
2994
  }
2118
- function boostForImplementationIntent(query, filepath, baseScore) {
2119
- let score = baseScore;
2120
- score = boostFilenameRelevance(query, filepath, score);
2121
- score = boostPathRelevance(query, filepath, score);
2122
- if (isTestFile(filepath)) {
2123
- score *= 0.9;
2995
+ async function deleteByFile(table, filepath) {
2996
+ if (!table) {
2997
+ throw new DatabaseError("Vector database not initialized");
2998
+ }
2999
+ try {
3000
+ await table.delete(`file = "${filepath}"`);
3001
+ } catch (error) {
3002
+ throw wrapError(error, "Failed to delete file from vector database");
2124
3003
  }
2125
- return score;
2126
3004
  }
2127
- function applyRelevanceBoosting(query, filepath, baseScore) {
2128
- if (!query) {
2129
- return baseScore;
3005
+ async function updateFile(db, table, tableName, dbPath, filepath, vectors, metadatas, contents) {
3006
+ if (!table) {
3007
+ throw new DatabaseError("Vector database not initialized");
2130
3008
  }
2131
- const intent = classifyQueryIntent(query);
2132
- switch (intent) {
2133
- case "location" /* LOCATION */:
2134
- return boostForLocationIntent(query, filepath, baseScore);
2135
- case "conceptual" /* CONCEPTUAL */:
2136
- return boostForConceptualIntent(query, filepath, baseScore);
2137
- case "implementation" /* IMPLEMENTATION */:
2138
- return boostForImplementationIntent(query, filepath, baseScore);
2139
- default:
2140
- return boostForImplementationIntent(query, filepath, baseScore);
3009
+ try {
3010
+ await deleteByFile(table, filepath);
3011
+ let updatedTable = table;
3012
+ if (vectors.length > 0) {
3013
+ updatedTable = await insertBatch(db, table, tableName, vectors, metadatas, contents);
3014
+ if (!updatedTable) {
3015
+ throw new DatabaseError("insertBatch unexpectedly returned null");
3016
+ }
3017
+ }
3018
+ await writeVersionFile(dbPath);
3019
+ return updatedTable;
3020
+ } catch (error) {
3021
+ throw wrapError(error, "Failed to update file in vector database");
2141
3022
  }
2142
3023
  }
3024
+ var init_maintenance = __esm({
3025
+ "src/vectordb/maintenance.ts"() {
3026
+ "use strict";
3027
+ init_errors();
3028
+ init_version2();
3029
+ init_batch_insert();
3030
+ }
3031
+ });
3032
+
3033
+ // src/vectordb/lancedb.ts
3034
+ var lancedb_exports = {};
3035
+ __export(lancedb_exports, {
3036
+ VectorDB: () => VectorDB
3037
+ });
3038
+ import * as lancedb from "vectordb";
3039
+ import path14 from "path";
3040
+ import os2 from "os";
3041
+ import crypto2 from "crypto";
2143
3042
  var VectorDB;
2144
3043
  var init_lancedb = __esm({
2145
3044
  "src/vectordb/lancedb.ts"() {
@@ -2147,9 +3046,9 @@ var init_lancedb = __esm({
2147
3046
  init_types();
2148
3047
  init_version2();
2149
3048
  init_errors();
2150
- init_relevance();
2151
- init_intent_classifier();
2152
- init_constants();
3049
+ init_query();
3050
+ init_batch_insert();
3051
+ init_maintenance();
2153
3052
  VectorDB = class _VectorDB {
2154
3053
  db = null;
2155
3054
  table = null;
@@ -2158,9 +3057,9 @@ var init_lancedb = __esm({
2158
3057
  lastVersionCheck = 0;
2159
3058
  currentVersion = 0;
2160
3059
  constructor(projectRoot) {
2161
- const projectName = path11.basename(projectRoot);
3060
+ const projectName = path14.basename(projectRoot);
2162
3061
  const pathHash = crypto2.createHash("md5").update(projectRoot).digest("hex").substring(0, 8);
2163
- this.dbPath = path11.join(
3062
+ this.dbPath = path14.join(
2164
3063
  os2.homedir(),
2165
3064
  ".lien",
2166
3065
  "indices",
@@ -2188,150 +3087,30 @@ var init_lancedb = __esm({
2188
3087
  if (!this.db) {
2189
3088
  throw new DatabaseError("Vector database not initialized");
2190
3089
  }
2191
- if (vectors.length !== metadatas.length || vectors.length !== contents.length) {
2192
- throw new DatabaseError("Vectors, metadatas, and contents arrays must have the same length", {
2193
- vectorsLength: vectors.length,
2194
- metadatasLength: metadatas.length,
2195
- contentsLength: contents.length
2196
- });
2197
- }
2198
- if (vectors.length === 0) {
2199
- return;
2200
- }
2201
- if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {
2202
- for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {
2203
- const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2204
- const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2205
- const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2206
- await this._insertBatchInternal(batchVectors, batchMetadata, batchContents);
2207
- }
2208
- } else {
2209
- await this._insertBatchInternal(vectors, metadatas, contents);
2210
- }
2211
- }
2212
- /**
2213
- * Internal method to insert a single batch with iterative retry logic.
2214
- * Uses a queue-based approach to avoid deep recursion on large batch failures.
2215
- */
2216
- async _insertBatchInternal(vectors, metadatas, contents) {
2217
- const queue = [{ vectors, metadatas, contents }];
2218
- const failedRecords = [];
2219
- while (queue.length > 0) {
2220
- const batch = queue.shift();
2221
- try {
2222
- const records = batch.vectors.map((vector, i) => ({
2223
- vector: Array.from(vector),
2224
- content: batch.contents[i],
2225
- file: batch.metadatas[i].file,
2226
- startLine: batch.metadatas[i].startLine,
2227
- endLine: batch.metadatas[i].endLine,
2228
- type: batch.metadatas[i].type,
2229
- language: batch.metadatas[i].language,
2230
- // Ensure arrays have at least empty string for Arrow type inference
2231
- functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
2232
- classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
2233
- interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""],
2234
- // AST-derived metadata (v0.13.0)
2235
- symbolName: batch.metadatas[i].symbolName || "",
2236
- symbolType: batch.metadatas[i].symbolType || "",
2237
- parentClass: batch.metadatas[i].parentClass || "",
2238
- complexity: batch.metadatas[i].complexity || 0,
2239
- parameters: batch.metadatas[i].parameters && batch.metadatas[i].parameters.length > 0 ? batch.metadatas[i].parameters : [""],
2240
- signature: batch.metadatas[i].signature || "",
2241
- imports: batch.metadatas[i].imports && batch.metadatas[i].imports.length > 0 ? batch.metadatas[i].imports : [""]
2242
- }));
2243
- if (!this.table) {
2244
- this.table = await this.db.createTable(this.tableName, records);
2245
- } else {
2246
- await this.table.add(records);
2247
- }
2248
- } catch (error) {
2249
- if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
2250
- const half = Math.floor(batch.vectors.length / 2);
2251
- queue.push({
2252
- vectors: batch.vectors.slice(0, half),
2253
- metadatas: batch.metadatas.slice(0, half),
2254
- contents: batch.contents.slice(0, half)
2255
- });
2256
- queue.push({
2257
- vectors: batch.vectors.slice(half),
2258
- metadatas: batch.metadatas.slice(half),
2259
- contents: batch.contents.slice(half)
2260
- });
2261
- } else {
2262
- failedRecords.push(batch);
2263
- }
2264
- }
2265
- }
2266
- if (failedRecords.length > 0) {
2267
- const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
2268
- throw new DatabaseError(
2269
- `Failed to insert ${totalFailed} record(s) after retry attempts`,
2270
- {
2271
- failedBatches: failedRecords.length,
2272
- totalRecords: totalFailed,
2273
- sampleFile: failedRecords[0].metadatas[0].file
2274
- }
2275
- );
2276
- }
3090
+ this.table = await insertBatch(
3091
+ this.db,
3092
+ this.table,
3093
+ this.tableName,
3094
+ vectors,
3095
+ metadatas,
3096
+ contents
3097
+ );
2277
3098
  }
2278
3099
  async search(queryVector, limit = 5, query) {
2279
3100
  if (!this.table) {
2280
3101
  throw new DatabaseError("Vector database not initialized");
2281
3102
  }
2282
3103
  try {
2283
- const results = await this.table.search(Array.from(queryVector)).limit(limit + 20).execute();
2284
- const filtered = results.filter(
2285
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2286
- ).map((r) => {
2287
- const baseScore = r._distance ?? 0;
2288
- const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
2289
- return {
2290
- content: r.content,
2291
- metadata: {
2292
- file: r.file,
2293
- startLine: r.startLine,
2294
- endLine: r.endLine,
2295
- type: r.type,
2296
- language: r.language,
2297
- // AST-derived metadata (v0.13.0)
2298
- symbolName: r.symbolName || void 0,
2299
- symbolType: r.symbolType,
2300
- parentClass: r.parentClass || void 0,
2301
- complexity: r.complexity || void 0,
2302
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2303
- signature: r.signature || void 0,
2304
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2305
- },
2306
- score: boostedScore,
2307
- relevance: calculateRelevance(boostedScore)
2308
- };
2309
- }).sort((a, b) => a.score - b.score).slice(0, limit);
2310
- return filtered;
3104
+ return await search(this.table, queryVector, limit, query);
2311
3105
  } catch (error) {
2312
3106
  const errorMsg = String(error);
2313
3107
  if (errorMsg.includes("Not found:") || errorMsg.includes(".lance")) {
2314
3108
  try {
2315
3109
  await this.initialize();
2316
- const results = await this.table.search(Array.from(queryVector)).limit(limit + 20).execute();
2317
- return results.filter(
2318
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2319
- ).map((r) => {
2320
- const baseScore = r._distance ?? 0;
2321
- const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
2322
- return {
2323
- content: r.content,
2324
- metadata: {
2325
- file: r.file,
2326
- startLine: r.startLine,
2327
- endLine: r.endLine,
2328
- type: r.type,
2329
- language: r.language
2330
- },
2331
- score: boostedScore,
2332
- relevance: calculateRelevance(boostedScore)
2333
- };
2334
- }).sort((a, b) => a.score - b.score).slice(0, limit);
3110
+ if (!this.table) {
3111
+ throw new DatabaseError("Vector database not initialized after reconnection");
3112
+ }
3113
+ return await search(this.table, queryVector, limit, query);
2335
3114
  } catch (retryError) {
2336
3115
  throw new DatabaseError(
2337
3116
  `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,
@@ -2339,209 +3118,52 @@ var init_lancedb = __esm({
2339
3118
  );
2340
3119
  }
2341
3120
  }
2342
- throw wrapError(error, "Failed to search vector database");
3121
+ throw error;
2343
3122
  }
2344
3123
  }
2345
3124
  async scanWithFilter(options) {
2346
3125
  if (!this.table) {
2347
3126
  throw new DatabaseError("Vector database not initialized");
2348
3127
  }
2349
- const { language, pattern, limit = 100 } = options;
2350
- try {
2351
- const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2352
- const query = this.table.search(zeroVector).where('file != ""').limit(Math.max(limit * 5, 200));
2353
- const results = await query.execute();
2354
- let filtered = results.filter(
2355
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2356
- );
2357
- if (language) {
2358
- filtered = filtered.filter(
2359
- (r) => r.language && r.language.toLowerCase() === language.toLowerCase()
2360
- );
2361
- }
2362
- if (pattern) {
2363
- const regex = new RegExp(pattern, "i");
2364
- filtered = filtered.filter(
2365
- (r) => regex.test(r.content) || regex.test(r.file)
2366
- );
2367
- }
2368
- return filtered.slice(0, limit).map((r) => {
2369
- const score = 0;
2370
- return {
2371
- content: r.content,
2372
- metadata: {
2373
- file: r.file,
2374
- startLine: r.startLine,
2375
- endLine: r.endLine,
2376
- type: r.type,
2377
- language: r.language,
2378
- // AST-derived metadata (v0.13.0)
2379
- symbolName: r.symbolName || void 0,
2380
- symbolType: r.symbolType,
2381
- parentClass: r.parentClass || void 0,
2382
- complexity: r.complexity || void 0,
2383
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2384
- signature: r.signature || void 0,
2385
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2386
- },
2387
- score,
2388
- relevance: calculateRelevance(score)
2389
- };
2390
- });
2391
- } catch (error) {
2392
- throw wrapError(error, "Failed to scan with filter");
2393
- }
3128
+ return scanWithFilter(this.table, options);
2394
3129
  }
2395
3130
  async querySymbols(options) {
2396
3131
  if (!this.table) {
2397
3132
  throw new DatabaseError("Vector database not initialized");
2398
3133
  }
2399
- const { language, pattern, symbolType, limit = 50 } = options;
2400
- try {
2401
- const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2402
- const query = this.table.search(zeroVector).where('file != ""').limit(Math.max(limit * 10, 500));
2403
- const results = await query.execute();
2404
- let filtered = results.filter((r) => {
2405
- if (!r.content || r.content.trim().length === 0) {
2406
- return false;
2407
- }
2408
- if (!r.file || r.file.length === 0) {
2409
- return false;
2410
- }
2411
- if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
2412
- return false;
2413
- }
2414
- const symbols = symbolType === "function" ? r.functionNames || [] : symbolType === "class" ? r.classNames || [] : symbolType === "interface" ? r.interfaceNames || [] : [...r.functionNames || [], ...r.classNames || [], ...r.interfaceNames || []];
2415
- const astSymbolName = r.symbolName || "";
2416
- if (symbols.length === 0 && !astSymbolName) {
2417
- return false;
2418
- }
2419
- if (pattern) {
2420
- const regex = new RegExp(pattern, "i");
2421
- const matchesOldSymbols = symbols.some((s) => regex.test(s));
2422
- const matchesASTSymbol = regex.test(astSymbolName);
2423
- const nameMatches = matchesOldSymbols || matchesASTSymbol;
2424
- if (!nameMatches) return false;
2425
- if (symbolType) {
2426
- if (r.symbolType) {
2427
- if (symbolType === "function") {
2428
- return r.symbolType === "function" || r.symbolType === "method";
2429
- } else if (symbolType === "class") {
2430
- return r.symbolType === "class";
2431
- } else if (symbolType === "interface") {
2432
- return r.symbolType === "interface";
2433
- }
2434
- return false;
2435
- }
2436
- return nameMatches;
2437
- }
2438
- return nameMatches;
2439
- }
2440
- if (symbolType) {
2441
- if (r.symbolType) {
2442
- if (symbolType === "function") {
2443
- return r.symbolType === "function" || r.symbolType === "method";
2444
- } else if (symbolType === "class") {
2445
- return r.symbolType === "class";
2446
- } else if (symbolType === "interface") {
2447
- return r.symbolType === "interface";
2448
- }
2449
- return false;
2450
- }
2451
- return symbols.length > 0 && symbols.some((s) => s.length > 0 && s !== "");
2452
- }
2453
- return true;
2454
- });
2455
- return filtered.slice(0, limit).map((r) => {
2456
- const score = 0;
2457
- return {
2458
- content: r.content,
2459
- metadata: {
2460
- file: r.file,
2461
- startLine: r.startLine,
2462
- endLine: r.endLine,
2463
- type: r.type,
2464
- language: r.language,
2465
- symbols: {
2466
- functions: r.functionNames || [],
2467
- classes: r.classNames || [],
2468
- interfaces: r.interfaceNames || []
2469
- },
2470
- // AST-derived metadata (v0.13.0)
2471
- symbolName: r.symbolName || void 0,
2472
- symbolType: r.symbolType,
2473
- parentClass: r.parentClass || void 0,
2474
- complexity: r.complexity || void 0,
2475
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2476
- signature: r.signature || void 0,
2477
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2478
- },
2479
- score,
2480
- relevance: calculateRelevance(score)
2481
- };
2482
- });
2483
- } catch (error) {
2484
- throw wrapError(error, "Failed to query symbols");
2485
- }
3134
+ return querySymbols(this.table, options);
2486
3135
  }
2487
3136
  async clear() {
2488
3137
  if (!this.db) {
2489
3138
  throw new DatabaseError("Vector database not initialized");
2490
3139
  }
2491
- try {
2492
- if (this.table) {
2493
- await this.db.dropTable(this.tableName);
2494
- }
2495
- this.table = null;
2496
- } catch (error) {
2497
- throw wrapError(error, "Failed to clear vector database");
2498
- }
3140
+ await clear(this.db, this.table, this.tableName);
3141
+ this.table = null;
2499
3142
  }
2500
- /**
2501
- * Deletes all chunks from a specific file.
2502
- * Used for incremental reindexing when a file is deleted or needs to be re-indexed.
2503
- *
2504
- * @param filepath - Path to the file whose chunks should be deleted
2505
- */
2506
3143
  async deleteByFile(filepath) {
2507
3144
  if (!this.table) {
2508
3145
  throw new DatabaseError("Vector database not initialized");
2509
3146
  }
2510
- try {
2511
- await this.table.delete(`file = "${filepath}"`);
2512
- } catch (error) {
2513
- throw wrapError(error, "Failed to delete file from vector database");
2514
- }
3147
+ await deleteByFile(this.table, filepath);
2515
3148
  }
2516
- /**
2517
- * Updates a file in the index by atomically deleting old chunks and inserting new ones.
2518
- * This is the primary method for incremental reindexing.
2519
- *
2520
- * @param filepath - Path to the file being updated
2521
- * @param vectors - New embedding vectors
2522
- * @param metadatas - New chunk metadata
2523
- * @param contents - New chunk contents
2524
- */
2525
3149
  async updateFile(filepath, vectors, metadatas, contents) {
2526
- if (!this.table) {
2527
- throw new DatabaseError("Vector database not initialized");
3150
+ if (!this.db) {
3151
+ throw new DatabaseError("Vector database connection not initialized");
2528
3152
  }
2529
- try {
2530
- await this.deleteByFile(filepath);
2531
- if (vectors.length > 0) {
2532
- await this.insertBatch(vectors, metadatas, contents);
2533
- }
2534
- await writeVersionFile(this.dbPath);
2535
- } catch (error) {
2536
- throw wrapError(error, "Failed to update file in vector database");
3153
+ if (!this.table) {
3154
+ throw new DatabaseError("Vector database table not initialized");
2537
3155
  }
3156
+ this.table = await updateFile(
3157
+ this.db,
3158
+ this.table,
3159
+ this.tableName,
3160
+ this.dbPath,
3161
+ filepath,
3162
+ vectors,
3163
+ metadatas,
3164
+ contents
3165
+ );
2538
3166
  }
2539
- /**
2540
- * Checks if the index version has changed since last check.
2541
- * Uses caching to minimize I/O overhead (checks at most once per second).
2542
- *
2543
- * @returns true if version has changed, false otherwise
2544
- */
2545
3167
  async checkVersion() {
2546
3168
  const now = Date.now();
2547
3169
  if (now - this.lastVersionCheck < 1e3) {
@@ -2559,11 +3181,6 @@ var init_lancedb = __esm({
2559
3181
  return false;
2560
3182
  }
2561
3183
  }
2562
- /**
2563
- * Reconnects to the database by reinitializing the connection.
2564
- * Used when the index has been rebuilt/reindexed.
2565
- * Forces a complete reload from disk by closing existing connections first.
2566
- */
2567
3184
  async reconnect() {
2568
3185
  try {
2569
3186
  this.table = null;
@@ -2573,31 +3190,15 @@ var init_lancedb = __esm({
2573
3190
  throw wrapError(error, "Failed to reconnect to vector database");
2574
3191
  }
2575
3192
  }
2576
- /**
2577
- * Gets the current index version (timestamp of last reindex).
2578
- *
2579
- * @returns Version timestamp, or 0 if unknown
2580
- */
2581
3193
  getCurrentVersion() {
2582
3194
  return this.currentVersion;
2583
3195
  }
2584
- /**
2585
- * Gets the current index version as a human-readable date string.
2586
- *
2587
- * @returns Formatted date string, or 'Unknown' if no version
2588
- */
2589
3196
  getVersionDate() {
2590
3197
  if (this.currentVersion === 0) {
2591
3198
  return "Unknown";
2592
3199
  }
2593
3200
  return new Date(this.currentVersion).toLocaleString();
2594
3201
  }
2595
- /**
2596
- * Checks if the database contains real indexed data.
2597
- * Used to detect first run and trigger auto-indexing.
2598
- *
2599
- * @returns true if database has real code chunks, false if empty or only schema rows
2600
- */
2601
3202
  async hasData() {
2602
3203
  if (!this.table) {
2603
3204
  return false;
@@ -2630,8 +3231,8 @@ var manifest_exports = {};
2630
3231
  __export(manifest_exports, {
2631
3232
  ManifestManager: () => ManifestManager
2632
3233
  });
2633
- import fs11 from "fs/promises";
2634
- import path12 from "path";
3234
+ import fs13 from "fs/promises";
3235
+ import path15 from "path";
2635
3236
  var MANIFEST_FILE, ManifestManager;
2636
3237
  var init_manifest = __esm({
2637
3238
  "src/indexer/manifest.ts"() {
@@ -2653,7 +3254,7 @@ var init_manifest = __esm({
2653
3254
  */
2654
3255
  constructor(indexPath) {
2655
3256
  this.indexPath = indexPath;
2656
- this.manifestPath = path12.join(indexPath, MANIFEST_FILE);
3257
+ this.manifestPath = path15.join(indexPath, MANIFEST_FILE);
2657
3258
  }
2658
3259
  /**
2659
3260
  * Loads the manifest from disk.
@@ -2666,7 +3267,7 @@ var init_manifest = __esm({
2666
3267
  */
2667
3268
  async load() {
2668
3269
  try {
2669
- const content = await fs11.readFile(this.manifestPath, "utf-8");
3270
+ const content = await fs13.readFile(this.manifestPath, "utf-8");
2670
3271
  const manifest = JSON.parse(content);
2671
3272
  if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {
2672
3273
  console.error(
@@ -2693,7 +3294,7 @@ var init_manifest = __esm({
2693
3294
  */
2694
3295
  async save(manifest) {
2695
3296
  try {
2696
- await fs11.mkdir(this.indexPath, { recursive: true });
3297
+ await fs13.mkdir(this.indexPath, { recursive: true });
2697
3298
  const manifestToSave = {
2698
3299
  ...manifest,
2699
3300
  formatVersion: INDEX_FORMAT_VERSION,
@@ -2701,7 +3302,7 @@ var init_manifest = __esm({
2701
3302
  lastIndexed: Date.now()
2702
3303
  };
2703
3304
  const content = JSON.stringify(manifestToSave, null, 2);
2704
- await fs11.writeFile(this.manifestPath, content, "utf-8");
3305
+ await fs13.writeFile(this.manifestPath, content, "utf-8");
2705
3306
  } catch (error) {
2706
3307
  console.error(`[Lien] Warning: Failed to save manifest: ${error}`);
2707
3308
  }
@@ -2838,7 +3439,7 @@ var init_manifest = __esm({
2838
3439
  */
2839
3440
  async clear() {
2840
3441
  try {
2841
- await fs11.unlink(this.manifestPath);
3442
+ await fs13.unlink(this.manifestPath);
2842
3443
  } catch (error) {
2843
3444
  if (error.code !== "ENOENT") {
2844
3445
  console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);
@@ -2867,8 +3468,8 @@ var tracker_exports = {};
2867
3468
  __export(tracker_exports, {
2868
3469
  GitStateTracker: () => GitStateTracker
2869
3470
  });
2870
- import fs12 from "fs/promises";
2871
- import path13 from "path";
3471
+ import fs14 from "fs/promises";
3472
+ import path16 from "path";
2872
3473
  var GitStateTracker;
2873
3474
  var init_tracker = __esm({
2874
3475
  "src/git/tracker.ts"() {
@@ -2880,7 +3481,7 @@ var init_tracker = __esm({
2880
3481
  currentState = null;
2881
3482
  constructor(rootDir, indexPath) {
2882
3483
  this.rootDir = rootDir;
2883
- this.stateFile = path13.join(indexPath, ".git-state.json");
3484
+ this.stateFile = path16.join(indexPath, ".git-state.json");
2884
3485
  }
2885
3486
  /**
2886
3487
  * Loads the last known git state from disk.
@@ -2888,7 +3489,7 @@ var init_tracker = __esm({
2888
3489
  */
2889
3490
  async loadState() {
2890
3491
  try {
2891
- const content = await fs12.readFile(this.stateFile, "utf-8");
3492
+ const content = await fs14.readFile(this.stateFile, "utf-8");
2892
3493
  return JSON.parse(content);
2893
3494
  } catch {
2894
3495
  return null;
@@ -2900,7 +3501,7 @@ var init_tracker = __esm({
2900
3501
  async saveState(state) {
2901
3502
  try {
2902
3503
  const content = JSON.stringify(state, null, 2);
2903
- await fs12.writeFile(this.stateFile, content, "utf-8");
3504
+ await fs14.writeFile(this.stateFile, content, "utf-8");
2904
3505
  } catch (error) {
2905
3506
  console.error(`[Lien] Warning: Failed to save git state: ${error}`);
2906
3507
  }
@@ -3051,7 +3652,7 @@ var init_tracker = __esm({
3051
3652
  });
3052
3653
 
3053
3654
  // src/indexer/change-detector.ts
3054
- import fs13 from "fs/promises";
3655
+ import fs15 from "fs/promises";
3055
3656
  async function detectChanges(rootDir, vectorDB, config) {
3056
3657
  const manifest = new ManifestManager(vectorDB.dbPath);
3057
3658
  const savedManifest = await manifest.load();
@@ -3155,7 +3756,7 @@ async function mtimeBasedDetection(rootDir, savedManifest, config) {
3155
3756
  const fileStats = /* @__PURE__ */ new Map();
3156
3757
  for (const filepath of currentFiles) {
3157
3758
  try {
3158
- const stats = await fs13.stat(filepath);
3759
+ const stats = await fs15.stat(filepath);
3159
3760
  fileStats.set(filepath, stats.mtimeMs);
3160
3761
  } catch {
3161
3762
  continue;
@@ -3193,7 +3794,7 @@ var init_change_detector = __esm({
3193
3794
  });
3194
3795
 
3195
3796
  // src/indexer/incremental.ts
3196
- import fs14 from "fs/promises";
3797
+ import fs16 from "fs/promises";
3197
3798
  async function processFileContent(filepath, content, embeddings, config, verbose) {
3198
3799
  const chunkSize = isModernConfig(config) ? config.core.chunkSize : isLegacyConfig(config) ? config.indexing.chunkSize : 75;
3199
3800
  const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : isLegacyConfig(config) ? config.indexing.chunkOverlap : 10;
@@ -3232,7 +3833,7 @@ async function indexSingleFile(filepath, vectorDB, embeddings, config, options =
3232
3833
  const { verbose } = options;
3233
3834
  try {
3234
3835
  try {
3235
- await fs14.access(filepath);
3836
+ await fs16.access(filepath);
3236
3837
  } catch {
3237
3838
  if (verbose) {
3238
3839
  console.error(`[Lien] File deleted: ${filepath}`);
@@ -3242,9 +3843,9 @@ async function indexSingleFile(filepath, vectorDB, embeddings, config, options =
3242
3843
  await manifest2.removeFile(filepath);
3243
3844
  return;
3244
3845
  }
3245
- const content = await fs14.readFile(filepath, "utf-8");
3846
+ const content = await fs16.readFile(filepath, "utf-8");
3246
3847
  const result = await processFileContent(filepath, content, embeddings, config, verbose || false);
3247
- const stats = await fs14.stat(filepath);
3848
+ const stats = await fs16.stat(filepath);
3248
3849
  const manifest = new ManifestManager(vectorDB.dbPath);
3249
3850
  if (result === null) {
3250
3851
  await vectorDB.deleteByFile(filepath);
@@ -3281,9 +3882,9 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
3281
3882
  let content;
3282
3883
  let fileMtime;
3283
3884
  try {
3284
- const stats = await fs14.stat(filepath);
3885
+ const stats = await fs16.stat(filepath);
3285
3886
  fileMtime = stats.mtimeMs;
3286
- content = await fs14.readFile(filepath, "utf-8");
3887
+ content = await fs16.readFile(filepath, "utf-8");
3287
3888
  } catch (error) {
3288
3889
  if (verbose) {
3289
3890
  console.error(`[Lien] File not readable: ${filepath}`);
@@ -3448,9 +4049,9 @@ var indexer_exports = {};
3448
4049
  __export(indexer_exports, {
3449
4050
  indexCodebase: () => indexCodebase
3450
4051
  });
3451
- import fs15 from "fs/promises";
4052
+ import fs17 from "fs/promises";
3452
4053
  import ora from "ora";
3453
- import chalk4 from "chalk";
4054
+ import chalk5 from "chalk";
3454
4055
  import pLimit from "p-limit";
3455
4056
  async function indexCodebase(options = {}) {
3456
4057
  const rootDir = options.rootDir ?? process.cwd();
@@ -3523,7 +4124,7 @@ async function indexCodebase(options = {}) {
3523
4124
  await manifest2.updateGitState(gitState);
3524
4125
  }
3525
4126
  }
3526
- console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
4127
+ console.log(chalk5.dim("\nNext step: Run"), chalk5.bold("lien serve"), chalk5.dim("to start the MCP server"));
3527
4128
  return;
3528
4129
  }
3529
4130
  spinner.text = "Full reindex required...";
@@ -3612,8 +4213,8 @@ async function indexCodebase(options = {}) {
3612
4213
  const filePromises = files.map(
3613
4214
  (file) => limit(async () => {
3614
4215
  try {
3615
- const stats = await fs15.stat(file);
3616
- const content = await fs15.readFile(file, "utf-8");
4216
+ const stats = await fs17.stat(file);
4217
+ const content = await fs17.readFile(file, "utf-8");
3617
4218
  const chunkSize = isModernConfig(config) ? config.core.chunkSize : 75;
3618
4219
  const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : 10;
3619
4220
  const useAST = isModernConfig(config) ? config.chunking.useAST : true;
@@ -3647,7 +4248,7 @@ async function indexCodebase(options = {}) {
3647
4248
  }
3648
4249
  } catch (error) {
3649
4250
  if (options.verbose) {
3650
- console.error(chalk4.yellow(`
4251
+ console.error(chalk5.yellow(`
3651
4252
  \u26A0\uFE0F Skipping ${file}: ${error}`));
3652
4253
  }
3653
4254
  processedFiles++;
@@ -3687,7 +4288,7 @@ async function indexCodebase(options = {}) {
3687
4288
  spinner.succeed(
3688
4289
  `Indexed ${processedFiles} files (${processedChunks} chunks) in ${totalTime}s using ${concurrency}x concurrency`
3689
4290
  );
3690
- console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
4291
+ console.log(chalk5.dim("\nNext step: Run"), chalk5.bold("lien serve"), chalk5.dim("to start the MCP server"));
3691
4292
  } catch (error) {
3692
4293
  if (updateInterval) {
3693
4294
  clearInterval(updateInterval);
@@ -3722,11 +4323,10 @@ import { dirname as dirname4, join as join4 } from "path";
3722
4323
 
3723
4324
  // src/cli/init.ts
3724
4325
  init_schema();
3725
- init_merge();
3726
- import fs5 from "fs/promises";
3727
- import path5 from "path";
4326
+ import fs7 from "fs/promises";
4327
+ import path7 from "path";
3728
4328
  import { fileURLToPath as fileURLToPath3 } from "url";
3729
- import chalk2 from "chalk";
4329
+ import chalk3 from "chalk";
3730
4330
  import inquirer from "inquirer";
3731
4331
 
3732
4332
  // src/utils/banner.ts
@@ -3788,12 +4388,111 @@ function showCompactBanner() {
3788
4388
  console.log();
3789
4389
  }
3790
4390
 
3791
- // src/cli/init.ts
4391
+ // src/config/migration-manager.ts
4392
+ init_schema();
3792
4393
  init_migration();
4394
+ init_merge();
4395
+ init_constants();
4396
+ import fs2 from "fs/promises";
4397
+ import path2 from "path";
4398
+ import chalk2 from "chalk";
4399
+ var MigrationManager = class {
4400
+ constructor(rootDir = process.cwd()) {
4401
+ this.rootDir = rootDir;
4402
+ }
4403
+ /**
4404
+ * Get the config file path
4405
+ */
4406
+ getConfigPath() {
4407
+ return path2.join(this.rootDir, ".lien.config.json");
4408
+ }
4409
+ /**
4410
+ * Check if the current config needs migration
4411
+ */
4412
+ async needsMigration() {
4413
+ try {
4414
+ const configPath = this.getConfigPath();
4415
+ const content = await fs2.readFile(configPath, "utf-8");
4416
+ const config = JSON.parse(content);
4417
+ return needsMigration(config);
4418
+ } catch (error) {
4419
+ return false;
4420
+ }
4421
+ }
4422
+ /**
4423
+ * Perform silent migration (for auto-migration during load)
4424
+ * Returns the migrated config without user interaction
4425
+ */
4426
+ async autoMigrate() {
4427
+ const result = await migrateConfigFile(this.rootDir);
4428
+ if (result.migrated && result.backupPath) {
4429
+ const backupFilename = path2.basename(result.backupPath);
4430
+ console.log(`\u2705 Migration complete! Backup saved as ${backupFilename}`);
4431
+ console.log("\u{1F4DD} Your config now uses the framework-based structure.");
4432
+ }
4433
+ return result.config;
4434
+ }
4435
+ /**
4436
+ * Perform interactive upgrade (for CLI upgrade command)
4437
+ * Provides detailed feedback and handles edge cases
4438
+ */
4439
+ async upgradeInteractive() {
4440
+ const configPath = this.getConfigPath();
4441
+ try {
4442
+ const existingContent = await fs2.readFile(configPath, "utf-8");
4443
+ const existingConfig = JSON.parse(existingContent);
4444
+ const migrationNeeded = needsMigration(existingConfig);
4445
+ const newFields = migrationNeeded ? [] : detectNewFields(existingConfig, defaultConfig);
4446
+ const hasChanges = migrationNeeded || newFields.length > 0;
4447
+ if (!hasChanges) {
4448
+ console.log(chalk2.green("\u2713 Config is already up to date"));
4449
+ console.log(chalk2.dim("No changes needed"));
4450
+ return;
4451
+ }
4452
+ const backupPath = `${configPath}.backup`;
4453
+ await fs2.copyFile(configPath, backupPath);
4454
+ let upgradedConfig;
4455
+ let migrated = false;
4456
+ if (migrationNeeded) {
4457
+ console.log(chalk2.blue(`\u{1F504} Migrating config from v0.2.0 to v${CURRENT_CONFIG_VERSION}...`));
4458
+ upgradedConfig = migrateConfig(existingConfig);
4459
+ migrated = true;
4460
+ } else {
4461
+ upgradedConfig = deepMergeConfig(defaultConfig, existingConfig);
4462
+ console.log(chalk2.dim("\nNew options added:"));
4463
+ newFields.forEach((field) => console.log(chalk2.dim(" \u2022"), chalk2.bold(field)));
4464
+ }
4465
+ await fs2.writeFile(
4466
+ configPath,
4467
+ JSON.stringify(upgradedConfig, null, 2) + "\n",
4468
+ "utf-8"
4469
+ );
4470
+ console.log(chalk2.green("\u2713 Config upgraded successfully"));
4471
+ console.log(chalk2.dim("Backup saved to:"), backupPath);
4472
+ if (migrated) {
4473
+ console.log(chalk2.dim("\n\u{1F4DD} Your config now uses the framework-based structure."));
4474
+ }
4475
+ } catch (error) {
4476
+ if (error.code === "ENOENT") {
4477
+ console.log(chalk2.red("Error: No config file found"));
4478
+ console.log(chalk2.dim("Run"), chalk2.bold("lien init"), chalk2.dim("to create a config file"));
4479
+ return;
4480
+ }
4481
+ throw error;
4482
+ }
4483
+ }
4484
+ /**
4485
+ * Perform migration and return result
4486
+ * Used when programmatic access to migration result is needed
4487
+ */
4488
+ async migrate() {
4489
+ return migrateConfigFile(this.rootDir);
4490
+ }
4491
+ };
3793
4492
 
3794
4493
  // src/frameworks/detector-service.ts
3795
- import fs4 from "fs/promises";
3796
- import path4 from "path";
4494
+ import fs6 from "fs/promises";
4495
+ import path6 from "path";
3797
4496
 
3798
4497
  // src/frameworks/types.ts
3799
4498
  var defaultDetectionOptions = {
@@ -3815,8 +4514,8 @@ var defaultDetectionOptions = {
3815
4514
  };
3816
4515
 
3817
4516
  // src/frameworks/nodejs/detector.ts
3818
- import fs from "fs/promises";
3819
- import path from "path";
4517
+ import fs3 from "fs/promises";
4518
+ import path3 from "path";
3820
4519
 
3821
4520
  // src/frameworks/nodejs/config.ts
3822
4521
  async function generateNodeJsConfig(_rootDir, _relativePath) {
@@ -3867,7 +4566,7 @@ var nodejsDetector = {
3867
4566
  priority: 50,
3868
4567
  // Generic, yields to specific frameworks like Laravel
3869
4568
  async detect(rootDir, relativePath) {
3870
- const fullPath = path.join(rootDir, relativePath);
4569
+ const fullPath = path3.join(rootDir, relativePath);
3871
4570
  const result = {
3872
4571
  detected: false,
3873
4572
  name: "nodejs",
@@ -3875,10 +4574,10 @@ var nodejsDetector = {
3875
4574
  confidence: "low",
3876
4575
  evidence: []
3877
4576
  };
3878
- const packageJsonPath = path.join(fullPath, "package.json");
4577
+ const packageJsonPath = path3.join(fullPath, "package.json");
3879
4578
  let packageJson5 = null;
3880
4579
  try {
3881
- const content = await fs.readFile(packageJsonPath, "utf-8");
4580
+ const content = await fs3.readFile(packageJsonPath, "utf-8");
3882
4581
  packageJson5 = JSON.parse(content);
3883
4582
  result.evidence.push("Found package.json");
3884
4583
  } catch {
@@ -3926,8 +4625,8 @@ var nodejsDetector = {
3926
4625
  };
3927
4626
 
3928
4627
  // src/frameworks/laravel/detector.ts
3929
- import fs2 from "fs/promises";
3930
- import path2 from "path";
4628
+ import fs4 from "fs/promises";
4629
+ import path4 from "path";
3931
4630
 
3932
4631
  // src/frameworks/laravel/config.ts
3933
4632
  async function generateLaravelConfig(_rootDir, _relativePath) {
@@ -3984,7 +4683,7 @@ var laravelDetector = {
3984
4683
  priority: 100,
3985
4684
  // Laravel takes precedence over Node.js
3986
4685
  async detect(rootDir, relativePath) {
3987
- const fullPath = path2.join(rootDir, relativePath);
4686
+ const fullPath = path4.join(rootDir, relativePath);
3988
4687
  const result = {
3989
4688
  detected: false,
3990
4689
  name: "laravel",
@@ -3992,10 +4691,10 @@ var laravelDetector = {
3992
4691
  confidence: "low",
3993
4692
  evidence: []
3994
4693
  };
3995
- const composerJsonPath = path2.join(fullPath, "composer.json");
4694
+ const composerJsonPath = path4.join(fullPath, "composer.json");
3996
4695
  let composerJson = null;
3997
4696
  try {
3998
- const content = await fs2.readFile(composerJsonPath, "utf-8");
4697
+ const content = await fs4.readFile(composerJsonPath, "utf-8");
3999
4698
  composerJson = JSON.parse(content);
4000
4699
  result.evidence.push("Found composer.json");
4001
4700
  } catch {
@@ -4006,9 +4705,9 @@ var laravelDetector = {
4006
4705
  return result;
4007
4706
  }
4008
4707
  result.evidence.push("Laravel framework detected in composer.json");
4009
- const artisanPath = path2.join(fullPath, "artisan");
4708
+ const artisanPath = path4.join(fullPath, "artisan");
4010
4709
  try {
4011
- await fs2.access(artisanPath);
4710
+ await fs4.access(artisanPath);
4012
4711
  result.evidence.push("Found artisan file");
4013
4712
  result.confidence = "high";
4014
4713
  } catch {
@@ -4018,8 +4717,8 @@ var laravelDetector = {
4018
4717
  let foundDirs = 0;
4019
4718
  for (const dir of laravelDirs) {
4020
4719
  try {
4021
- const dirPath = path2.join(fullPath, dir);
4022
- const stats = await fs2.stat(dirPath);
4720
+ const dirPath = path4.join(fullPath, dir);
4721
+ const stats = await fs4.stat(dirPath);
4023
4722
  if (stats.isDirectory()) {
4024
4723
  foundDirs++;
4025
4724
  }
@@ -4031,14 +4730,14 @@ var laravelDetector = {
4031
4730
  result.confidence = "high";
4032
4731
  }
4033
4732
  const testDirsToCheck = [
4034
- path2.join(fullPath, "tests", "Feature"),
4035
- path2.join(fullPath, "tests", "Unit")
4733
+ path4.join(fullPath, "tests", "Feature"),
4734
+ path4.join(fullPath, "tests", "Unit")
4036
4735
  ];
4037
4736
  for (const testDir of testDirsToCheck) {
4038
4737
  try {
4039
- const stats = await fs2.stat(testDir);
4738
+ const stats = await fs4.stat(testDir);
4040
4739
  if (stats.isDirectory()) {
4041
- const dirName = path2.basename(path2.dirname(testDir)) + "/" + path2.basename(testDir);
4740
+ const dirName = path4.basename(path4.dirname(testDir)) + "/" + path4.basename(testDir);
4042
4741
  result.evidence.push(`Found ${dirName} test directory`);
4043
4742
  }
4044
4743
  } catch {
@@ -4056,8 +4755,8 @@ var laravelDetector = {
4056
4755
  };
4057
4756
 
4058
4757
  // src/frameworks/shopify/detector.ts
4059
- import fs3 from "fs/promises";
4060
- import path3 from "path";
4758
+ import fs5 from "fs/promises";
4759
+ import path5 from "path";
4061
4760
 
4062
4761
  // src/frameworks/shopify/config.ts
4063
4762
  async function generateShopifyConfig(_rootDir, _relativePath) {
@@ -4069,6 +4768,8 @@ async function generateShopifyConfig(_rootDir, _relativePath) {
4069
4768
  "snippets/**/*.liquid",
4070
4769
  "templates/**/*.liquid",
4071
4770
  // Matches any nesting level (e.g., templates/customers/account.liquid)
4771
+ "templates/**/*.json",
4772
+ // JSON template definitions (Shopify 2.0+)
4072
4773
  // Theme editor blocks (Online Store 2.0)
4073
4774
  "blocks/**/*.liquid",
4074
4775
  // Assets (CSS, JS with optional Liquid templating)
@@ -4113,7 +4814,7 @@ var shopifyDetector = {
4113
4814
  priority: 100,
4114
4815
  // High priority (same as Laravel)
4115
4816
  async detect(rootDir, relativePath) {
4116
- const fullPath = path3.join(rootDir, relativePath);
4817
+ const fullPath = path5.join(rootDir, relativePath);
4117
4818
  const result = {
4118
4819
  detected: false,
4119
4820
  name: "shopify",
@@ -4121,18 +4822,18 @@ var shopifyDetector = {
4121
4822
  confidence: "low",
4122
4823
  evidence: []
4123
4824
  };
4124
- const settingsSchemaPath = path3.join(fullPath, "config", "settings_schema.json");
4825
+ const settingsSchemaPath = path5.join(fullPath, "config", "settings_schema.json");
4125
4826
  let hasSettingsSchema = false;
4126
4827
  try {
4127
- await fs3.access(settingsSchemaPath);
4828
+ await fs5.access(settingsSchemaPath);
4128
4829
  hasSettingsSchema = true;
4129
4830
  result.evidence.push("Found config/settings_schema.json");
4130
4831
  } catch {
4131
4832
  }
4132
- const themeLayoutPath = path3.join(fullPath, "layout", "theme.liquid");
4833
+ const themeLayoutPath = path5.join(fullPath, "layout", "theme.liquid");
4133
4834
  let hasThemeLayout = false;
4134
4835
  try {
4135
- await fs3.access(themeLayoutPath);
4836
+ await fs5.access(themeLayoutPath);
4136
4837
  hasThemeLayout = true;
4137
4838
  result.evidence.push("Found layout/theme.liquid");
4138
4839
  } catch {
@@ -4141,8 +4842,8 @@ var shopifyDetector = {
4141
4842
  let foundDirs = 0;
4142
4843
  for (const dir of shopifyDirs) {
4143
4844
  try {
4144
- const dirPath = path3.join(fullPath, dir);
4145
- const stats = await fs3.stat(dirPath);
4845
+ const dirPath = path5.join(fullPath, dir);
4846
+ const stats = await fs5.stat(dirPath);
4146
4847
  if (stats.isDirectory()) {
4147
4848
  foundDirs++;
4148
4849
  }
@@ -4153,14 +4854,14 @@ var shopifyDetector = {
4153
4854
  result.evidence.push(`Shopify directory structure detected (${foundDirs}/${shopifyDirs.length} dirs)`);
4154
4855
  }
4155
4856
  try {
4156
- const tomlPath = path3.join(fullPath, "shopify.theme.toml");
4157
- await fs3.access(tomlPath);
4857
+ const tomlPath = path5.join(fullPath, "shopify.theme.toml");
4858
+ await fs5.access(tomlPath);
4158
4859
  result.evidence.push("Found shopify.theme.toml");
4159
4860
  } catch {
4160
4861
  }
4161
4862
  try {
4162
- const ignorePath = path3.join(fullPath, ".shopifyignore");
4163
- await fs3.access(ignorePath);
4863
+ const ignorePath = path5.join(fullPath, ".shopifyignore");
4864
+ await fs5.access(ignorePath);
4164
4865
  result.evidence.push("Found .shopifyignore");
4165
4866
  } catch {
4166
4867
  }
@@ -4206,7 +4907,7 @@ async function detectAllFrameworks(rootDir, options = {}) {
4206
4907
  return results;
4207
4908
  }
4208
4909
  async function detectAtPath(rootDir, relativePath, results, visited) {
4209
- const fullPath = path4.join(rootDir, relativePath);
4910
+ const fullPath = path6.join(rootDir, relativePath);
4210
4911
  if (visited.has(fullPath)) {
4211
4912
  return;
4212
4913
  }
@@ -4273,9 +4974,9 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
4273
4974
  if (depth >= options.maxDepth) {
4274
4975
  return;
4275
4976
  }
4276
- const fullPath = path4.join(rootDir, relativePath);
4977
+ const fullPath = path6.join(rootDir, relativePath);
4277
4978
  try {
4278
- const entries = await fs4.readdir(fullPath, { withFileTypes: true });
4979
+ const entries = await fs6.readdir(fullPath, { withFileTypes: true });
4279
4980
  const dirs = entries.filter((e) => e.isDirectory());
4280
4981
  for (const dir of dirs) {
4281
4982
  if (options.skipDirs.includes(dir.name)) {
@@ -4284,7 +4985,7 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
4284
4985
  if (dir.name.startsWith(".")) {
4285
4986
  continue;
4286
4987
  }
4287
- const subPath = relativePath === "." ? dir.name : path4.join(relativePath, dir.name);
4988
+ const subPath = relativePath === "." ? dir.name : path6.join(relativePath, dir.name);
4288
4989
  await detectAtPath(rootDir, subPath, results, visited);
4289
4990
  await scanSubdirectories(rootDir, subPath, results, visited, depth + 1, options);
4290
4991
  }
@@ -4294,44 +4995,44 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
4294
4995
  }
4295
4996
 
4296
4997
  // src/cli/init.ts
4297
- init_constants();
4298
4998
  var __filename3 = fileURLToPath3(import.meta.url);
4299
- var __dirname3 = path5.dirname(__filename3);
4999
+ var __dirname3 = path7.dirname(__filename3);
4300
5000
  async function initCommand(options = {}) {
4301
5001
  const rootDir = options.path || process.cwd();
4302
- const configPath = path5.join(rootDir, ".lien.config.json");
5002
+ const configPath = path7.join(rootDir, ".lien.config.json");
4303
5003
  try {
4304
5004
  let configExists = false;
4305
5005
  try {
4306
- await fs5.access(configPath);
5006
+ await fs7.access(configPath);
4307
5007
  configExists = true;
4308
5008
  } catch {
4309
5009
  }
4310
5010
  if (configExists && options.upgrade) {
4311
- await upgradeConfig(configPath);
5011
+ const migrationManager = new MigrationManager(rootDir);
5012
+ await migrationManager.upgradeInteractive();
4312
5013
  return;
4313
5014
  }
4314
5015
  if (configExists && !options.upgrade) {
4315
- console.log(chalk2.yellow("\u26A0\uFE0F .lien.config.json already exists"));
4316
- console.log(chalk2.dim("Run"), chalk2.bold("lien init --upgrade"), chalk2.dim("to merge new config options"));
5016
+ console.log(chalk3.yellow("\u26A0\uFE0F .lien.config.json already exists"));
5017
+ console.log(chalk3.dim("Run"), chalk3.bold("lien init --upgrade"), chalk3.dim("to merge new config options"));
4317
5018
  return;
4318
5019
  }
4319
5020
  if (!configExists) {
4320
5021
  await createNewConfig(rootDir, options);
4321
5022
  }
4322
5023
  } catch (error) {
4323
- console.error(chalk2.red("Error creating config file:"), error);
5024
+ console.error(chalk3.red("Error creating config file:"), error);
4324
5025
  process.exit(1);
4325
5026
  }
4326
5027
  }
4327
5028
  async function createNewConfig(rootDir, options) {
4328
5029
  showCompactBanner();
4329
- console.log(chalk2.bold("Initializing Lien...\n"));
4330
- console.log(chalk2.dim("\u{1F50D} Detecting frameworks in"), chalk2.bold(rootDir));
5030
+ console.log(chalk3.bold("Initializing Lien...\n"));
5031
+ console.log(chalk3.dim("\u{1F50D} Detecting frameworks in"), chalk3.bold(rootDir));
4331
5032
  const detections = await detectAllFrameworks(rootDir);
4332
5033
  let frameworks = [];
4333
5034
  if (detections.length === 0) {
4334
- console.log(chalk2.yellow("\n\u26A0\uFE0F No frameworks detected"));
5035
+ console.log(chalk3.yellow("\n\u26A0\uFE0F No frameworks detected"));
4335
5036
  if (!options.yes) {
4336
5037
  const { useGeneric } = await inquirer.prompt([
4337
5038
  {
@@ -4342,7 +5043,7 @@ async function createNewConfig(rootDir, options) {
4342
5043
  }
4343
5044
  ]);
4344
5045
  if (!useGeneric) {
4345
- console.log(chalk2.dim("Aborted."));
5046
+ console.log(chalk3.dim("Aborted."));
4346
5047
  return;
4347
5048
  }
4348
5049
  }
@@ -4351,7 +5052,7 @@ async function createNewConfig(rootDir, options) {
4351
5052
  path: ".",
4352
5053
  enabled: true,
4353
5054
  config: {
4354
- include: ["**/*.{ts,tsx,js,jsx,py,go,rs,java,c,cpp,cs}"],
5055
+ include: ["**/*.{ts,tsx,js,jsx,py,php,go,rs,java,c,cpp,cs}"],
4355
5056
  exclude: [
4356
5057
  "**/node_modules/**",
4357
5058
  "**/dist/**",
@@ -4365,16 +5066,16 @@ async function createNewConfig(rootDir, options) {
4365
5066
  }
4366
5067
  });
4367
5068
  } else {
4368
- console.log(chalk2.green(`
5069
+ console.log(chalk3.green(`
4369
5070
  \u2713 Found ${detections.length} framework(s):
4370
5071
  `));
4371
5072
  for (const det of detections) {
4372
5073
  const pathDisplay = det.path === "." ? "root" : det.path;
4373
- console.log(chalk2.bold(` ${det.name}`), chalk2.dim(`(${det.confidence} confidence)`));
4374
- console.log(chalk2.dim(` Location: ${pathDisplay}`));
5074
+ console.log(chalk3.bold(` ${det.name}`), chalk3.dim(`(${det.confidence} confidence)`));
5075
+ console.log(chalk3.dim(` Location: ${pathDisplay}`));
4375
5076
  if (det.evidence.length > 0) {
4376
5077
  det.evidence.forEach((e) => {
4377
- console.log(chalk2.dim(` \u2022 ${e}`));
5078
+ console.log(chalk3.dim(` \u2022 ${e}`));
4378
5079
  });
4379
5080
  }
4380
5081
  console.log();
@@ -4389,14 +5090,14 @@ async function createNewConfig(rootDir, options) {
4389
5090
  }
4390
5091
  ]);
4391
5092
  if (!confirm) {
4392
- console.log(chalk2.dim("Aborted."));
5093
+ console.log(chalk3.dim("Aborted."));
4393
5094
  return;
4394
5095
  }
4395
5096
  }
4396
5097
  for (const det of detections) {
4397
5098
  const detector = getFrameworkDetector(det.name);
4398
5099
  if (!detector) {
4399
- console.warn(chalk2.yellow(`\u26A0\uFE0F No detector found for ${det.name}, skipping`));
5100
+ console.warn(chalk3.yellow(`\u26A0\uFE0F No detector found for ${det.name}, skipping`));
4400
5101
  continue;
4401
5102
  }
4402
5103
  const frameworkConfig = await detector.generateConfig(rootDir, det.path);
@@ -4418,7 +5119,7 @@ async function createNewConfig(rootDir, options) {
4418
5119
  finalConfig = { ...frameworkConfig, ...customized };
4419
5120
  } else {
4420
5121
  const pathDisplay = det.path === "." ? "root" : det.path;
4421
- console.log(chalk2.dim(` \u2192 Using defaults for ${det.name} at ${pathDisplay}`));
5122
+ console.log(chalk3.dim(` \u2192 Using defaults for ${det.name} at ${pathDisplay}`));
4422
5123
  }
4423
5124
  frameworks.push({
4424
5125
  name: det.name,
@@ -4439,23 +5140,23 @@ async function createNewConfig(rootDir, options) {
4439
5140
  ]);
4440
5141
  if (installCursorRules) {
4441
5142
  try {
4442
- const cursorRulesDir = path5.join(rootDir, ".cursor");
4443
- await fs5.mkdir(cursorRulesDir, { recursive: true });
4444
- const templatePath = path5.join(__dirname3, "../CURSOR_RULES_TEMPLATE.md");
4445
- const rulesPath = path5.join(cursorRulesDir, "rules");
5143
+ const cursorRulesDir = path7.join(rootDir, ".cursor");
5144
+ await fs7.mkdir(cursorRulesDir, { recursive: true });
5145
+ const templatePath = path7.join(__dirname3, "../CURSOR_RULES_TEMPLATE.md");
5146
+ const rulesPath = path7.join(cursorRulesDir, "rules");
4446
5147
  let targetPath;
4447
5148
  let isDirectory = false;
4448
5149
  let isFile = false;
4449
5150
  try {
4450
- const stats = await fs5.stat(rulesPath);
5151
+ const stats = await fs7.stat(rulesPath);
4451
5152
  isDirectory = stats.isDirectory();
4452
5153
  isFile = stats.isFile();
4453
5154
  } catch {
4454
5155
  }
4455
5156
  if (isDirectory) {
4456
- targetPath = path5.join(rulesPath, "lien.mdc");
4457
- await fs5.copyFile(templatePath, targetPath);
4458
- console.log(chalk2.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
5157
+ targetPath = path7.join(rulesPath, "lien.mdc");
5158
+ await fs7.copyFile(templatePath, targetPath);
5159
+ console.log(chalk3.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
4459
5160
  } else if (isFile) {
4460
5161
  const { convertToDir } = await inquirer.prompt([
4461
5162
  {
@@ -4466,27 +5167,27 @@ async function createNewConfig(rootDir, options) {
4466
5167
  }
4467
5168
  ]);
4468
5169
  if (convertToDir) {
4469
- const existingRules = await fs5.readFile(rulesPath, "utf-8");
4470
- await fs5.unlink(rulesPath);
4471
- await fs5.mkdir(rulesPath);
4472
- await fs5.writeFile(path5.join(rulesPath, "project.mdc"), existingRules);
4473
- await fs5.copyFile(templatePath, path5.join(rulesPath, "lien.mdc"));
4474
- console.log(chalk2.green("\u2713 Converted .cursor/rules to directory"));
4475
- console.log(chalk2.green(" - Your project rules: .cursor/rules/project.mdc"));
4476
- console.log(chalk2.green(" - Lien rules: .cursor/rules/lien.mdc"));
5170
+ const existingRules = await fs7.readFile(rulesPath, "utf-8");
5171
+ await fs7.unlink(rulesPath);
5172
+ await fs7.mkdir(rulesPath);
5173
+ await fs7.writeFile(path7.join(rulesPath, "project.mdc"), existingRules);
5174
+ await fs7.copyFile(templatePath, path7.join(rulesPath, "lien.mdc"));
5175
+ console.log(chalk3.green("\u2713 Converted .cursor/rules to directory"));
5176
+ console.log(chalk3.green(" - Your project rules: .cursor/rules/project.mdc"));
5177
+ console.log(chalk3.green(" - Lien rules: .cursor/rules/lien.mdc"));
4477
5178
  } else {
4478
- console.log(chalk2.dim("Skipped Cursor rules installation (preserving existing file)"));
5179
+ console.log(chalk3.dim("Skipped Cursor rules installation (preserving existing file)"));
4479
5180
  }
4480
5181
  } else {
4481
- await fs5.mkdir(rulesPath, { recursive: true });
4482
- targetPath = path5.join(rulesPath, "lien.mdc");
4483
- await fs5.copyFile(templatePath, targetPath);
4484
- console.log(chalk2.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
5182
+ await fs7.mkdir(rulesPath, { recursive: true });
5183
+ targetPath = path7.join(rulesPath, "lien.mdc");
5184
+ await fs7.copyFile(templatePath, targetPath);
5185
+ console.log(chalk3.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
4485
5186
  }
4486
5187
  } catch (error) {
4487
- console.log(chalk2.yellow("\u26A0\uFE0F Could not install Cursor rules"));
4488
- console.log(chalk2.dim(`Error: ${error instanceof Error ? error.message : "Unknown error"}`));
4489
- console.log(chalk2.dim("You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc"));
5188
+ console.log(chalk3.yellow("\u26A0\uFE0F Could not install Cursor rules"));
5189
+ console.log(chalk3.dim(`Error: ${error instanceof Error ? error.message : "Unknown error"}`));
5190
+ console.log(chalk3.dim("You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc"));
4490
5191
  }
4491
5192
  }
4492
5193
  }
@@ -4494,17 +5195,17 @@ async function createNewConfig(rootDir, options) {
4494
5195
  ...defaultConfig,
4495
5196
  frameworks
4496
5197
  };
4497
- const configPath = path5.join(rootDir, ".lien.config.json");
4498
- await fs5.writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8");
4499
- console.log(chalk2.green("\n\u2713 Created .lien.config.json"));
4500
- console.log(chalk2.green(`\u2713 Configured ${frameworks.length} framework(s)`));
4501
- console.log(chalk2.dim("\nNext steps:"));
4502
- console.log(chalk2.dim(" 1. Run"), chalk2.bold("lien index"), chalk2.dim("to index your codebase"));
4503
- console.log(chalk2.dim(" 2. Run"), chalk2.bold("lien serve"), chalk2.dim("to start the MCP server"));
4504
- console.log(chalk2.dim(" 3. Configure Cursor to use the MCP server (see README.md)"));
5198
+ const configPath = path7.join(rootDir, ".lien.config.json");
5199
+ await fs7.writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8");
5200
+ console.log(chalk3.green("\n\u2713 Created .lien.config.json"));
5201
+ console.log(chalk3.green(`\u2713 Configured ${frameworks.length} framework(s)`));
5202
+ console.log(chalk3.dim("\nNext steps:"));
5203
+ console.log(chalk3.dim(" 1. Run"), chalk3.bold("lien index"), chalk3.dim("to index your codebase"));
5204
+ console.log(chalk3.dim(" 2. Run"), chalk3.bold("lien serve"), chalk3.dim("to start the MCP server"));
5205
+ console.log(chalk3.dim(" 3. Configure Cursor to use the MCP server (see README.md)"));
4505
5206
  }
4506
5207
  async function promptForCustomization(frameworkName, config) {
4507
- console.log(chalk2.bold(`
5208
+ console.log(chalk3.bold(`
4508
5209
  Customizing ${frameworkName} settings:`));
4509
5210
  const answers = await inquirer.prompt([
4510
5211
  {
@@ -4527,155 +5228,115 @@ Customizing ${frameworkName} settings:`));
4527
5228
  exclude: answers.exclude
4528
5229
  };
4529
5230
  }
4530
- async function upgradeConfig(configPath) {
4531
- try {
4532
- const existingContent = await fs5.readFile(configPath, "utf-8");
4533
- const existingConfig = JSON.parse(existingContent);
4534
- const migrationNeeded = needsMigration(existingConfig);
4535
- const newFields = migrationNeeded ? [] : detectNewFields(existingConfig, defaultConfig);
4536
- const hasChanges = migrationNeeded || newFields.length > 0;
4537
- if (!hasChanges) {
4538
- console.log(chalk2.green("\u2713 Config is already up to date"));
4539
- console.log(chalk2.dim("No changes needed"));
4540
- return;
4541
- }
4542
- const backupPath = `${configPath}.backup`;
4543
- await fs5.copyFile(configPath, backupPath);
4544
- let upgradedConfig;
4545
- let migrated = false;
4546
- if (migrationNeeded) {
4547
- console.log(chalk2.blue(`\u{1F504} Migrating config from v0.2.0 to v${CURRENT_CONFIG_VERSION}...`));
4548
- upgradedConfig = migrateConfig(existingConfig);
4549
- migrated = true;
4550
- } else {
4551
- upgradedConfig = deepMergeConfig(defaultConfig, existingConfig);
4552
- console.log(chalk2.dim("\nNew options added:"));
4553
- newFields.forEach((field) => console.log(chalk2.dim(" \u2022"), chalk2.bold(field)));
4554
- }
4555
- await fs5.writeFile(
4556
- configPath,
4557
- JSON.stringify(upgradedConfig, null, 2) + "\n",
4558
- "utf-8"
4559
- );
4560
- console.log(chalk2.green("\u2713 Config upgraded successfully"));
4561
- console.log(chalk2.dim("Backup saved to:"), backupPath);
4562
- if (migrated) {
4563
- console.log(chalk2.dim("\n\u{1F4DD} Your config now uses the framework-based structure."));
4564
- }
4565
- } catch (error) {
4566
- console.error(chalk2.red("Error upgrading config:"), error);
4567
- throw error;
4568
- }
4569
- }
4570
5231
 
4571
5232
  // src/cli/status.ts
4572
5233
  init_service();
4573
5234
  init_utils();
4574
5235
  init_version2();
4575
- import chalk3 from "chalk";
4576
- import fs9 from "fs/promises";
4577
- import path9 from "path";
5236
+ import chalk4 from "chalk";
5237
+ import fs11 from "fs/promises";
5238
+ import path11 from "path";
4578
5239
  import os from "os";
4579
5240
  import crypto from "crypto";
4580
5241
  init_schema();
4581
5242
  async function statusCommand() {
4582
5243
  const rootDir = process.cwd();
4583
- const projectName = path9.basename(rootDir);
5244
+ const projectName = path11.basename(rootDir);
4584
5245
  const pathHash = crypto.createHash("md5").update(rootDir).digest("hex").substring(0, 8);
4585
- const indexPath = path9.join(os.homedir(), ".lien", "indices", `${projectName}-${pathHash}`);
5246
+ const indexPath = path11.join(os.homedir(), ".lien", "indices", `${projectName}-${pathHash}`);
4586
5247
  showCompactBanner();
4587
- console.log(chalk3.bold("Status\n"));
5248
+ console.log(chalk4.bold("Status\n"));
4588
5249
  const hasConfig = await configService.exists(rootDir);
4589
- console.log(chalk3.dim("Configuration:"), hasConfig ? chalk3.green("\u2713 Found") : chalk3.red("\u2717 Not initialized"));
5250
+ console.log(chalk4.dim("Configuration:"), hasConfig ? chalk4.green("\u2713 Found") : chalk4.red("\u2717 Not initialized"));
4590
5251
  if (!hasConfig) {
4591
- console.log(chalk3.yellow("\nRun"), chalk3.bold("lien init"), chalk3.yellow("to initialize"));
5252
+ console.log(chalk4.yellow("\nRun"), chalk4.bold("lien init"), chalk4.yellow("to initialize"));
4592
5253
  return;
4593
5254
  }
4594
5255
  try {
4595
- const stats = await fs9.stat(indexPath);
4596
- console.log(chalk3.dim("Index location:"), indexPath);
4597
- console.log(chalk3.dim("Index status:"), chalk3.green("\u2713 Exists"));
5256
+ const stats = await fs11.stat(indexPath);
5257
+ console.log(chalk4.dim("Index location:"), indexPath);
5258
+ console.log(chalk4.dim("Index status:"), chalk4.green("\u2713 Exists"));
4598
5259
  try {
4599
- const files = await fs9.readdir(indexPath, { recursive: true });
4600
- console.log(chalk3.dim("Index files:"), files.length);
5260
+ const files = await fs11.readdir(indexPath, { recursive: true });
5261
+ console.log(chalk4.dim("Index files:"), files.length);
4601
5262
  } catch (e) {
4602
5263
  }
4603
- console.log(chalk3.dim("Last modified:"), stats.mtime.toLocaleString());
5264
+ console.log(chalk4.dim("Last modified:"), stats.mtime.toLocaleString());
4604
5265
  try {
4605
5266
  const version = await readVersionFile(indexPath);
4606
5267
  if (version > 0) {
4607
5268
  const versionDate = new Date(version);
4608
- console.log(chalk3.dim("Last reindex:"), versionDate.toLocaleString());
5269
+ console.log(chalk4.dim("Last reindex:"), versionDate.toLocaleString());
4609
5270
  }
4610
5271
  } catch {
4611
5272
  }
4612
5273
  } catch (error) {
4613
- console.log(chalk3.dim("Index status:"), chalk3.yellow("\u2717 Not indexed"));
4614
- console.log(chalk3.yellow("\nRun"), chalk3.bold("lien index"), chalk3.yellow("to index your codebase"));
5274
+ console.log(chalk4.dim("Index status:"), chalk4.yellow("\u2717 Not indexed"));
5275
+ console.log(chalk4.yellow("\nRun"), chalk4.bold("lien index"), chalk4.yellow("to index your codebase"));
4615
5276
  }
4616
5277
  try {
4617
5278
  const config = await configService.load(rootDir);
4618
- console.log(chalk3.bold("\nFeatures:"));
5279
+ console.log(chalk4.bold("\nFeatures:"));
4619
5280
  const isRepo = await isGitRepo(rootDir);
4620
5281
  if (config.gitDetection.enabled && isRepo) {
4621
- console.log(chalk3.dim("Git detection:"), chalk3.green("\u2713 Enabled"));
4622
- console.log(chalk3.dim(" Poll interval:"), `${config.gitDetection.pollIntervalMs / 1e3}s`);
5282
+ console.log(chalk4.dim("Git detection:"), chalk4.green("\u2713 Enabled"));
5283
+ console.log(chalk4.dim(" Poll interval:"), `${config.gitDetection.pollIntervalMs / 1e3}s`);
4623
5284
  try {
4624
5285
  const branch = await getCurrentBranch(rootDir);
4625
5286
  const commit = await getCurrentCommit(rootDir);
4626
- console.log(chalk3.dim(" Current branch:"), branch);
4627
- console.log(chalk3.dim(" Current commit:"), commit.substring(0, 8));
4628
- const gitStateFile = path9.join(indexPath, ".git-state.json");
5287
+ console.log(chalk4.dim(" Current branch:"), branch);
5288
+ console.log(chalk4.dim(" Current commit:"), commit.substring(0, 8));
5289
+ const gitStateFile = path11.join(indexPath, ".git-state.json");
4629
5290
  try {
4630
- const gitStateContent = await fs9.readFile(gitStateFile, "utf-8");
5291
+ const gitStateContent = await fs11.readFile(gitStateFile, "utf-8");
4631
5292
  const gitState = JSON.parse(gitStateContent);
4632
5293
  if (gitState.branch !== branch || gitState.commit !== commit) {
4633
- console.log(chalk3.yellow(" \u26A0\uFE0F Git state changed - will reindex on next serve"));
5294
+ console.log(chalk4.yellow(" \u26A0\uFE0F Git state changed - will reindex on next serve"));
4634
5295
  }
4635
5296
  } catch {
4636
5297
  }
4637
5298
  } catch {
4638
5299
  }
4639
5300
  } else if (config.gitDetection.enabled && !isRepo) {
4640
- console.log(chalk3.dim("Git detection:"), chalk3.yellow("Enabled (not a git repo)"));
5301
+ console.log(chalk4.dim("Git detection:"), chalk4.yellow("Enabled (not a git repo)"));
4641
5302
  } else {
4642
- console.log(chalk3.dim("Git detection:"), chalk3.gray("Disabled"));
5303
+ console.log(chalk4.dim("Git detection:"), chalk4.gray("Disabled"));
4643
5304
  }
4644
5305
  if (config.fileWatching.enabled) {
4645
- console.log(chalk3.dim("File watching:"), chalk3.green("\u2713 Enabled"));
4646
- console.log(chalk3.dim(" Debounce:"), `${config.fileWatching.debounceMs}ms`);
5306
+ console.log(chalk4.dim("File watching:"), chalk4.green("\u2713 Enabled"));
5307
+ console.log(chalk4.dim(" Debounce:"), `${config.fileWatching.debounceMs}ms`);
4647
5308
  } else {
4648
- console.log(chalk3.dim("File watching:"), chalk3.gray("Disabled"));
4649
- console.log(chalk3.dim(" Enable with:"), chalk3.bold("lien serve --watch"));
5309
+ console.log(chalk4.dim("File watching:"), chalk4.gray("Disabled"));
5310
+ console.log(chalk4.dim(" Enable with:"), chalk4.bold("lien serve --watch"));
4650
5311
  }
4651
- console.log(chalk3.bold("\nIndexing Settings:"));
5312
+ console.log(chalk4.bold("\nIndexing Settings:"));
4652
5313
  if (isModernConfig(config)) {
4653
- console.log(chalk3.dim("Concurrency:"), config.core.concurrency);
4654
- console.log(chalk3.dim("Batch size:"), config.core.embeddingBatchSize);
4655
- console.log(chalk3.dim("Chunk size:"), config.core.chunkSize);
4656
- console.log(chalk3.dim("Chunk overlap:"), config.core.chunkOverlap);
5314
+ console.log(chalk4.dim("Concurrency:"), config.core.concurrency);
5315
+ console.log(chalk4.dim("Batch size:"), config.core.embeddingBatchSize);
5316
+ console.log(chalk4.dim("Chunk size:"), config.core.chunkSize);
5317
+ console.log(chalk4.dim("Chunk overlap:"), config.core.chunkOverlap);
4657
5318
  }
4658
5319
  } catch (error) {
4659
- console.log(chalk3.yellow("\nWarning: Could not load configuration"));
5320
+ console.log(chalk4.yellow("\nWarning: Could not load configuration"));
4660
5321
  }
4661
5322
  }
4662
5323
 
4663
5324
  // src/cli/index-cmd.ts
4664
5325
  init_indexer();
4665
- import chalk5 from "chalk";
5326
+ import chalk6 from "chalk";
4666
5327
  async function indexCommand(options) {
4667
5328
  showCompactBanner();
4668
5329
  try {
4669
5330
  if (options.force) {
4670
5331
  const { VectorDB: VectorDB2 } = await Promise.resolve().then(() => (init_lancedb(), lancedb_exports));
4671
5332
  const { ManifestManager: ManifestManager2 } = await Promise.resolve().then(() => (init_manifest(), manifest_exports));
4672
- console.log(chalk5.yellow("Clearing existing index and manifest..."));
5333
+ console.log(chalk6.yellow("Clearing existing index and manifest..."));
4673
5334
  const vectorDB = new VectorDB2(process.cwd());
4674
5335
  await vectorDB.initialize();
4675
5336
  await vectorDB.clear();
4676
5337
  const manifest = new ManifestManager2(vectorDB.dbPath);
4677
5338
  await manifest.clear();
4678
- console.log(chalk5.green("\u2713 Index and manifest cleared\n"));
5339
+ console.log(chalk6.green("\u2713 Index and manifest cleared\n"));
4679
5340
  }
4680
5341
  await indexCodebase({
4681
5342
  rootDir: process.cwd(),
@@ -4683,18 +5344,18 @@ async function indexCommand(options) {
4683
5344
  force: options.force || false
4684
5345
  });
4685
5346
  if (options.watch) {
4686
- console.log(chalk5.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
5347
+ console.log(chalk6.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
4687
5348
  }
4688
5349
  } catch (error) {
4689
- console.error(chalk5.red("Error during indexing:"), error);
5350
+ console.error(chalk6.red("Error during indexing:"), error);
4690
5351
  process.exit(1);
4691
5352
  }
4692
5353
  }
4693
5354
 
4694
5355
  // src/cli/serve.ts
4695
- import chalk6 from "chalk";
4696
- import fs16 from "fs/promises";
4697
- import path14 from "path";
5356
+ import chalk7 from "chalk";
5357
+ import fs18 from "fs/promises";
5358
+ import path17 from "path";
4698
5359
 
4699
5360
  // src/mcp/server.ts
4700
5361
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
@@ -4769,22 +5430,48 @@ var tools = [
4769
5430
  toMCPToolSchema(
4770
5431
  SemanticSearchSchema,
4771
5432
  "semantic_search",
4772
- "Search the codebase semantically for relevant code using natural language. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity."
5433
+ `Search codebase by MEANING, not text. USE THIS INSTEAD OF grep/ripgrep for finding implementations, features, or understanding how code works.
5434
+
5435
+ Examples:
5436
+ - "Where is authentication handled?" \u2192 semantic_search({ query: "handles user authentication" })
5437
+ - "How does payment work?" \u2192 semantic_search({ query: "processes payment transactions" })
5438
+
5439
+ Use natural language describing what the code DOES, not function names. For exact string matching, use grep instead.
5440
+
5441
+ Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) for each match.`
4773
5442
  ),
4774
5443
  toMCPToolSchema(
4775
5444
  FindSimilarSchema,
4776
5445
  "find_similar",
4777
- "Find code similar to a given code snippet. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity."
5446
+ `Find code structurally similar to a given snippet. Use for:
5447
+ - Ensuring consistency when adding new code
5448
+ - Finding duplicate implementations
5449
+ - Refactoring similar patterns together
5450
+
5451
+ Provide at least 10 characters of code to match against. Results include a relevance category for each match.`
4778
5452
  ),
4779
5453
  toMCPToolSchema(
4780
5454
  GetFileContextSchema,
4781
5455
  "get_file_context",
4782
- "Get all chunks and related context for a specific file. Results include a relevance category (highly_relevant, relevant, loosely_related, not_relevant) based on semantic similarity."
5456
+ `Get full context for a file including related code and dependencies.
5457
+
5458
+ IMPORTANT: Call this BEFORE editing any file to understand:
5459
+ - What the file does
5460
+ - What depends on it
5461
+ - Related test files (via testAssociations)
5462
+
5463
+ Results include a relevance category for each related chunk. Typical flow: semantic_search \u2192 find file \u2192 get_file_context \u2192 make changes.`
4783
5464
  ),
4784
5465
  toMCPToolSchema(
4785
5466
  ListFunctionsSchema,
4786
5467
  "list_functions",
4787
- "List functions, classes, and interfaces by name pattern and language"
5468
+ `Fast symbol lookup by naming pattern. Use when searching by NAME, not behavior.
5469
+
5470
+ Examples:
5471
+ - "Show all controllers" \u2192 list_functions({ pattern: ".*Controller.*" })
5472
+ - "Find service classes" \u2192 list_functions({ pattern: ".*Service$" })
5473
+
5474
+ 10x faster than semantic_search for structural/architectural queries. Use semantic_search instead when searching by what code DOES.`
4788
5475
  )
4789
5476
  ];
4790
5477
 
@@ -5324,36 +6011,36 @@ async function startMCPServer(options) {
5324
6011
 
5325
6012
  // src/cli/serve.ts
5326
6013
  async function serveCommand(options) {
5327
- const rootDir = options.root ? path14.resolve(options.root) : process.cwd();
6014
+ const rootDir = options.root ? path17.resolve(options.root) : process.cwd();
5328
6015
  try {
5329
6016
  if (options.root) {
5330
6017
  try {
5331
- const stats = await fs16.stat(rootDir);
6018
+ const stats = await fs18.stat(rootDir);
5332
6019
  if (!stats.isDirectory()) {
5333
- console.error(chalk6.red(`Error: --root path is not a directory: ${rootDir}`));
6020
+ console.error(chalk7.red(`Error: --root path is not a directory: ${rootDir}`));
5334
6021
  process.exit(1);
5335
6022
  }
5336
6023
  } catch (error) {
5337
6024
  if (error.code === "ENOENT") {
5338
- console.error(chalk6.red(`Error: --root directory does not exist: ${rootDir}`));
6025
+ console.error(chalk7.red(`Error: --root directory does not exist: ${rootDir}`));
5339
6026
  } else if (error.code === "EACCES") {
5340
- console.error(chalk6.red(`Error: --root directory is not accessible: ${rootDir}`));
6027
+ console.error(chalk7.red(`Error: --root directory is not accessible: ${rootDir}`));
5341
6028
  } else {
5342
- console.error(chalk6.red(`Error: Failed to access --root directory: ${rootDir}`));
5343
- console.error(chalk6.dim(error.message));
6029
+ console.error(chalk7.red(`Error: Failed to access --root directory: ${rootDir}`));
6030
+ console.error(chalk7.dim(error.message));
5344
6031
  }
5345
6032
  process.exit(1);
5346
6033
  }
5347
6034
  }
5348
6035
  showBanner();
5349
- console.error(chalk6.bold("Starting MCP server...\n"));
6036
+ console.error(chalk7.bold("Starting MCP server...\n"));
5350
6037
  if (options.root) {
5351
- console.error(chalk6.dim(`Serving from: ${rootDir}
6038
+ console.error(chalk7.dim(`Serving from: ${rootDir}
5352
6039
  `));
5353
6040
  }
5354
6041
  if (options.watch) {
5355
- console.error(chalk6.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
5356
- console.error(chalk6.dim(" Use --no-watch to disable file watching\n"));
6042
+ console.error(chalk7.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
6043
+ console.error(chalk7.dim(" Use --no-watch to disable file watching\n"));
5357
6044
  }
5358
6045
  const watch = options.noWatch ? false : options.watch ? true : void 0;
5359
6046
  await startMCPServer({
@@ -5362,7 +6049,7 @@ async function serveCommand(options) {
5362
6049
  watch
5363
6050
  });
5364
6051
  } catch (error) {
5365
- console.error(chalk6.red("Failed to start MCP server:"), error);
6052
+ console.error(chalk7.red("Failed to start MCP server:"), error);
5366
6053
  process.exit(1);
5367
6054
  }
5368
6055
  }