@liendev/lien 0.12.0 → 0.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -9,11 +9,39 @@ var __export = (target, all) => {
9
9
  __defProp(target, name, { get: all[name], enumerable: true });
10
10
  };
11
11
 
12
+ // src/utils/version.ts
13
+ import { createRequire } from "module";
14
+ import { fileURLToPath } from "url";
15
+ import { dirname, join } from "path";
16
+ function getPackageVersion() {
17
+ return packageJson.version;
18
+ }
19
+ var __filename, __dirname, require2, packageJson;
20
+ var init_version = __esm({
21
+ "src/utils/version.ts"() {
22
+ "use strict";
23
+ __filename = fileURLToPath(import.meta.url);
24
+ __dirname = dirname(__filename);
25
+ require2 = createRequire(import.meta.url);
26
+ try {
27
+ packageJson = require2(join(__dirname, "../package.json"));
28
+ } catch {
29
+ try {
30
+ packageJson = require2(join(__dirname, "../../package.json"));
31
+ } catch {
32
+ console.warn("[Lien] Warning: Could not load package.json, using fallback version");
33
+ packageJson = { version: "0.0.0-unknown" };
34
+ }
35
+ }
36
+ }
37
+ });
38
+
12
39
  // src/constants.ts
13
40
  var DEFAULT_CHUNK_SIZE, DEFAULT_CHUNK_OVERLAP, DEFAULT_CONCURRENCY, DEFAULT_EMBEDDING_BATCH_SIZE, EMBEDDING_MICRO_BATCH_SIZE, VECTOR_DB_MAX_BATCH_SIZE, VECTOR_DB_MIN_BATCH_SIZE, EMBEDDING_DIMENSIONS, DEFAULT_EMBEDDING_MODEL, DEFAULT_PORT, VERSION_CHECK_INTERVAL_MS, DEFAULT_GIT_POLL_INTERVAL_MS, DEFAULT_DEBOUNCE_MS, CURRENT_CONFIG_VERSION, INDEX_FORMAT_VERSION;
14
41
  var init_constants = __esm({
15
42
  "src/constants.ts"() {
16
43
  "use strict";
44
+ init_version();
17
45
  DEFAULT_CHUNK_SIZE = 75;
18
46
  DEFAULT_CHUNK_OVERLAP = 10;
19
47
  DEFAULT_CONCURRENCY = 4;
@@ -27,8 +55,8 @@ var init_constants = __esm({
27
55
  VERSION_CHECK_INTERVAL_MS = 2e3;
28
56
  DEFAULT_GIT_POLL_INTERVAL_MS = 1e4;
29
57
  DEFAULT_DEBOUNCE_MS = 1e3;
30
- CURRENT_CONFIG_VERSION = "0.3.0";
31
- INDEX_FORMAT_VERSION = 1;
58
+ CURRENT_CONFIG_VERSION = getPackageVersion();
59
+ INDEX_FORMAT_VERSION = 2;
32
60
  }
33
61
  });
34
62
 
@@ -52,6 +80,12 @@ var init_schema = __esm({
52
80
  concurrency: DEFAULT_CONCURRENCY,
53
81
  embeddingBatchSize: DEFAULT_EMBEDDING_BATCH_SIZE
54
82
  },
83
+ chunking: {
84
+ useAST: true,
85
+ // AST-based chunking enabled by default (v0.13.0)
86
+ astFallback: "line-based"
87
+ // Fallback to line-based on errors
88
+ },
55
89
  mcp: {
56
90
  port: DEFAULT_PORT,
57
91
  transport: "stdio",
@@ -72,60 +106,17 @@ var init_schema = __esm({
72
106
  }
73
107
  });
74
108
 
75
- // src/config/merge.ts
76
- function deepMergeConfig(defaults, user) {
77
- return {
78
- version: user.version ?? defaults.version,
79
- core: {
80
- ...defaults.core,
81
- ...user.core
82
- },
83
- mcp: {
84
- ...defaults.mcp,
85
- ...user.mcp
86
- },
87
- gitDetection: {
88
- ...defaults.gitDetection,
89
- ...user.gitDetection
90
- },
91
- fileWatching: {
92
- ...defaults.fileWatching,
93
- ...user.fileWatching
94
- },
95
- frameworks: user.frameworks ?? defaults.frameworks
96
- };
97
- }
98
- function detectNewFields(before, after) {
99
- const newFields = [];
100
- for (const key of Object.keys(after)) {
101
- if (!(key in before)) {
102
- newFields.push(key);
103
- continue;
104
- }
105
- if (typeof after[key] === "object" && after[key] !== null && !Array.isArray(after[key])) {
106
- const beforeSection = before[key] || {};
107
- const afterSection = after[key];
108
- for (const nestedKey of Object.keys(afterSection)) {
109
- if (!(nestedKey in beforeSection)) {
110
- newFields.push(`${key}.${nestedKey}`);
111
- }
112
- }
113
- }
114
- }
115
- return newFields;
116
- }
117
- var init_merge = __esm({
118
- "src/config/merge.ts"() {
119
- "use strict";
120
- }
121
- });
122
-
123
109
  // src/config/migration.ts
110
+ import fs from "fs/promises";
111
+ import path from "path";
124
112
  function needsMigration(config) {
125
113
  if (!config) {
126
114
  return false;
127
115
  }
128
- if (config.frameworks !== void 0) {
116
+ if (config.frameworks !== void 0 && !config.chunking) {
117
+ return true;
118
+ }
119
+ if (config.frameworks !== void 0 && config.chunking !== void 0) {
129
120
  return false;
130
121
  }
131
122
  if (config.indexing !== void 0) {
@@ -138,12 +129,16 @@ function needsMigration(config) {
138
129
  }
139
130
  function migrateConfig(oldConfig) {
140
131
  const newConfig = {
141
- version: "0.3.0",
132
+ version: CURRENT_CONFIG_VERSION,
142
133
  core: {
143
- chunkSize: oldConfig.indexing?.chunkSize ?? defaultConfig.core.chunkSize,
144
- chunkOverlap: oldConfig.indexing?.chunkOverlap ?? defaultConfig.core.chunkOverlap,
145
- concurrency: oldConfig.indexing?.concurrency ?? defaultConfig.core.concurrency,
146
- embeddingBatchSize: oldConfig.indexing?.embeddingBatchSize ?? defaultConfig.core.embeddingBatchSize
134
+ chunkSize: oldConfig.indexing?.chunkSize ?? oldConfig.core?.chunkSize ?? defaultConfig.core.chunkSize,
135
+ chunkOverlap: oldConfig.indexing?.chunkOverlap ?? oldConfig.core?.chunkOverlap ?? defaultConfig.core.chunkOverlap,
136
+ concurrency: oldConfig.indexing?.concurrency ?? oldConfig.core?.concurrency ?? defaultConfig.core.concurrency,
137
+ embeddingBatchSize: oldConfig.indexing?.embeddingBatchSize ?? oldConfig.core?.embeddingBatchSize ?? defaultConfig.core.embeddingBatchSize
138
+ },
139
+ chunking: {
140
+ useAST: oldConfig.chunking?.useAST ?? defaultConfig.chunking.useAST,
141
+ astFallback: oldConfig.chunking?.astFallback ?? defaultConfig.chunking.astFallback
147
142
  },
148
143
  mcp: {
149
144
  port: oldConfig.mcp?.port ?? defaultConfig.mcp.port,
@@ -158,9 +153,9 @@ function migrateConfig(oldConfig) {
158
153
  enabled: oldConfig.fileWatching?.enabled ?? defaultConfig.fileWatching.enabled,
159
154
  debounceMs: oldConfig.fileWatching?.debounceMs ?? defaultConfig.fileWatching.debounceMs
160
155
  },
161
- frameworks: []
156
+ frameworks: oldConfig.frameworks ?? []
162
157
  };
163
- if (oldConfig.indexing) {
158
+ if (oldConfig.indexing && newConfig.frameworks.length === 0) {
164
159
  const genericFramework = {
165
160
  name: "generic",
166
161
  path: ".",
@@ -180,7 +175,7 @@ function migrateConfig(oldConfig) {
180
175
  }
181
176
  };
182
177
  newConfig.frameworks.push(genericFramework);
183
- } else {
178
+ } else if (newConfig.frameworks.length === 0) {
184
179
  const genericFramework = {
185
180
  name: "generic",
186
181
  path: ".",
@@ -203,10 +198,93 @@ function migrateConfig(oldConfig) {
203
198
  }
204
199
  return newConfig;
205
200
  }
201
+ async function migrateConfigFile(rootDir = process.cwd()) {
202
+ const configPath = path.join(rootDir, ".lien.config.json");
203
+ try {
204
+ const configContent = await fs.readFile(configPath, "utf-8");
205
+ const oldConfig = JSON.parse(configContent);
206
+ if (!needsMigration(oldConfig)) {
207
+ return {
208
+ migrated: false,
209
+ config: oldConfig
210
+ };
211
+ }
212
+ const newConfig = migrateConfig(oldConfig);
213
+ const backupPath = `${configPath}.v0.2.0.backup`;
214
+ await fs.copyFile(configPath, backupPath);
215
+ await fs.writeFile(configPath, JSON.stringify(newConfig, null, 2) + "\n", "utf-8");
216
+ return {
217
+ migrated: true,
218
+ backupPath,
219
+ config: newConfig
220
+ };
221
+ } catch (error) {
222
+ if (error.code === "ENOENT") {
223
+ return {
224
+ migrated: false,
225
+ config: defaultConfig
226
+ };
227
+ }
228
+ throw error;
229
+ }
230
+ }
206
231
  var init_migration = __esm({
207
232
  "src/config/migration.ts"() {
208
233
  "use strict";
209
234
  init_schema();
235
+ init_constants();
236
+ }
237
+ });
238
+
239
+ // src/config/merge.ts
240
+ function deepMergeConfig(defaults, user) {
241
+ return {
242
+ version: user.version ?? defaults.version,
243
+ core: {
244
+ ...defaults.core,
245
+ ...user.core
246
+ },
247
+ chunking: {
248
+ ...defaults.chunking,
249
+ ...user.chunking
250
+ },
251
+ mcp: {
252
+ ...defaults.mcp,
253
+ ...user.mcp
254
+ },
255
+ gitDetection: {
256
+ ...defaults.gitDetection,
257
+ ...user.gitDetection
258
+ },
259
+ fileWatching: {
260
+ ...defaults.fileWatching,
261
+ ...user.fileWatching
262
+ },
263
+ frameworks: user.frameworks ?? defaults.frameworks
264
+ };
265
+ }
266
+ function detectNewFields(before, after) {
267
+ const newFields = [];
268
+ for (const key of Object.keys(after)) {
269
+ if (!(key in before)) {
270
+ newFields.push(key);
271
+ continue;
272
+ }
273
+ if (typeof after[key] === "object" && after[key] !== null && !Array.isArray(after[key])) {
274
+ const beforeSection = before[key] || {};
275
+ const afterSection = after[key];
276
+ for (const nestedKey of Object.keys(afterSection)) {
277
+ if (!(nestedKey in beforeSection)) {
278
+ newFields.push(`${key}.${nestedKey}`);
279
+ }
280
+ }
281
+ }
282
+ }
283
+ return newFields;
284
+ }
285
+ var init_merge = __esm({
286
+ "src/config/merge.ts"() {
287
+ "use strict";
210
288
  }
211
289
  });
212
290
 
@@ -300,8 +378,8 @@ var init_errors = __esm({
300
378
  });
301
379
 
302
380
  // src/config/service.ts
303
- import fs6 from "fs/promises";
304
- import path6 from "path";
381
+ import fs8 from "fs/promises";
382
+ import path8 from "path";
305
383
  var ConfigService, configService;
306
384
  var init_service = __esm({
307
385
  "src/config/service.ts"() {
@@ -323,13 +401,13 @@ var init_service = __esm({
323
401
  async load(rootDir = process.cwd()) {
324
402
  const configPath = this.getConfigPath(rootDir);
325
403
  try {
326
- const configContent = await fs6.readFile(configPath, "utf-8");
404
+ const configContent = await fs8.readFile(configPath, "utf-8");
327
405
  const userConfig = JSON.parse(configContent);
328
406
  if (this.needsMigration(userConfig)) {
329
407
  console.log("\u{1F504} Migrating config from v0.2.0 to v0.3.0...");
330
408
  const result = await this.migrate(rootDir);
331
409
  if (result.migrated && result.backupPath) {
332
- const backupFilename = path6.basename(result.backupPath);
410
+ const backupFilename = path8.basename(result.backupPath);
333
411
  console.log(`\u2705 Migration complete! Backup saved as ${backupFilename}`);
334
412
  console.log("\u{1F4DD} Your config now uses the framework-based structure.");
335
413
  }
@@ -385,7 +463,7 @@ ${validation.errors.join("\n")}`,
385
463
  }
386
464
  try {
387
465
  const configJson = JSON.stringify(config, null, 2) + "\n";
388
- await fs6.writeFile(configPath, configJson, "utf-8");
466
+ await fs8.writeFile(configPath, configJson, "utf-8");
389
467
  } catch (error) {
390
468
  throw wrapError(error, "Failed to save configuration", { path: configPath });
391
469
  }
@@ -399,7 +477,7 @@ ${validation.errors.join("\n")}`,
399
477
  async exists(rootDir = process.cwd()) {
400
478
  const configPath = this.getConfigPath(rootDir);
401
479
  try {
402
- await fs6.access(configPath);
480
+ await fs8.access(configPath);
403
481
  return true;
404
482
  } catch {
405
483
  return false;
@@ -416,7 +494,7 @@ ${validation.errors.join("\n")}`,
416
494
  async migrate(rootDir = process.cwd()) {
417
495
  const configPath = this.getConfigPath(rootDir);
418
496
  try {
419
- const configContent = await fs6.readFile(configPath, "utf-8");
497
+ const configContent = await fs8.readFile(configPath, "utf-8");
420
498
  const oldConfig = JSON.parse(configContent);
421
499
  if (!this.needsMigration(oldConfig)) {
422
500
  return {
@@ -434,7 +512,7 @@ ${validation.errors.join("\n")}`,
434
512
  );
435
513
  }
436
514
  const backupPath = `${configPath}.v0.2.0.backup`;
437
- await fs6.copyFile(configPath, backupPath);
515
+ await fs8.copyFile(configPath, backupPath);
438
516
  await this.save(rootDir, newConfig);
439
517
  return {
440
518
  migrated: true,
@@ -532,7 +610,7 @@ ${validation.errors.join("\n")}`,
532
610
  * Get the full path to the config file
533
611
  */
534
612
  getConfigPath(rootDir) {
535
- return path6.join(rootDir, _ConfigService.CONFIG_FILENAME);
613
+ return path8.join(rootDir, _ConfigService.CONFIG_FILENAME);
536
614
  }
537
615
  /**
538
616
  * Validate modern (v0.3.0+) configuration
@@ -696,7 +774,7 @@ ${validation.errors.join("\n")}`,
696
774
  errors.push(`frameworks[${index}] missing required field: path`);
697
775
  } else if (typeof fw.path !== "string") {
698
776
  errors.push(`frameworks[${index}].path must be a string`);
699
- } else if (path6.isAbsolute(fw.path)) {
777
+ } else if (path8.isAbsolute(fw.path)) {
700
778
  errors.push(`frameworks[${index}].path must be relative, got: ${fw.path}`);
701
779
  }
702
780
  if (fw.enabled === void 0) {
@@ -756,12 +834,12 @@ __export(utils_exports, {
756
834
  });
757
835
  import { exec } from "child_process";
758
836
  import { promisify } from "util";
759
- import fs7 from "fs/promises";
760
- import path7 from "path";
837
+ import fs9 from "fs/promises";
838
+ import path9 from "path";
761
839
  async function isGitRepo(rootDir) {
762
840
  try {
763
- const gitDir = path7.join(rootDir, ".git");
764
- await fs7.access(gitDir);
841
+ const gitDir = path9.join(rootDir, ".git");
842
+ await fs9.access(gitDir);
765
843
  return true;
766
844
  } catch {
767
845
  return false;
@@ -800,7 +878,7 @@ async function getChangedFiles(rootDir, fromRef, toRef) {
800
878
  // 10 second timeout for diffs
801
879
  }
802
880
  );
803
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
881
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
804
882
  return files;
805
883
  } catch (error) {
806
884
  throw new Error(`Failed to get changed files: ${error}`);
@@ -815,7 +893,7 @@ async function getChangedFilesInCommit(rootDir, commitSha) {
815
893
  timeout: 1e4
816
894
  }
817
895
  );
818
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
896
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
819
897
  return files;
820
898
  } catch (error) {
821
899
  throw new Error(`Failed to get changed files in commit: ${error}`);
@@ -830,7 +908,7 @@ async function getChangedFilesBetweenCommits(rootDir, fromCommit, toCommit) {
830
908
  timeout: 1e4
831
909
  }
832
910
  );
833
- const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
911
+ const files = stdout.trim().split("\n").filter(Boolean).map((file) => path9.join(rootDir, file));
834
912
  return files;
835
913
  } catch (error) {
836
914
  throw new Error(`Failed to get changed files between commits: ${error}`);
@@ -853,21 +931,21 @@ var init_utils = __esm({
853
931
  });
854
932
 
855
933
  // src/vectordb/version.ts
856
- import fs8 from "fs/promises";
857
- import path8 from "path";
934
+ import fs10 from "fs/promises";
935
+ import path10 from "path";
858
936
  async function writeVersionFile(indexPath) {
859
937
  try {
860
- const versionFilePath = path8.join(indexPath, VERSION_FILE);
938
+ const versionFilePath = path10.join(indexPath, VERSION_FILE);
861
939
  const timestamp = Date.now().toString();
862
- await fs8.writeFile(versionFilePath, timestamp, "utf-8");
940
+ await fs10.writeFile(versionFilePath, timestamp, "utf-8");
863
941
  } catch (error) {
864
942
  console.error(`Warning: Failed to write version file: ${error}`);
865
943
  }
866
944
  }
867
945
  async function readVersionFile(indexPath) {
868
946
  try {
869
- const versionFilePath = path8.join(indexPath, VERSION_FILE);
870
- const content = await fs8.readFile(versionFilePath, "utf-8");
947
+ const versionFilePath = path10.join(indexPath, VERSION_FILE);
948
+ const content = await fs10.readFile(versionFilePath, "utf-8");
871
949
  const timestamp = parseInt(content.trim(), 10);
872
950
  return isNaN(timestamp) ? 0 : timestamp;
873
951
  } catch (error) {
@@ -875,7 +953,7 @@ async function readVersionFile(indexPath) {
875
953
  }
876
954
  }
877
955
  var VERSION_FILE;
878
- var init_version = __esm({
956
+ var init_version2 = __esm({
879
957
  "src/vectordb/version.ts"() {
880
958
  "use strict";
881
959
  VERSION_FILE = ".lien-index-version";
@@ -885,8 +963,8 @@ var init_version = __esm({
885
963
  // src/indexer/scanner.ts
886
964
  import { glob } from "glob";
887
965
  import ignore from "ignore";
888
- import fs10 from "fs/promises";
889
- import path10 from "path";
966
+ import fs12 from "fs/promises";
967
+ import path12 from "path";
890
968
  async function scanCodebaseWithFrameworks(rootDir, config) {
891
969
  const allFiles = [];
892
970
  for (const framework of config.frameworks) {
@@ -899,16 +977,16 @@ async function scanCodebaseWithFrameworks(rootDir, config) {
899
977
  return allFiles;
900
978
  }
901
979
  async function scanFramework(rootDir, framework) {
902
- const frameworkPath = path10.join(rootDir, framework.path);
903
- const gitignorePath = path10.join(frameworkPath, ".gitignore");
980
+ const frameworkPath = path12.join(rootDir, framework.path);
981
+ const gitignorePath = path12.join(frameworkPath, ".gitignore");
904
982
  let ig = ignore();
905
983
  try {
906
- const gitignoreContent = await fs10.readFile(gitignorePath, "utf-8");
984
+ const gitignoreContent = await fs12.readFile(gitignorePath, "utf-8");
907
985
  ig = ignore().add(gitignoreContent);
908
986
  } catch (e) {
909
- const rootGitignorePath = path10.join(rootDir, ".gitignore");
987
+ const rootGitignorePath = path12.join(rootDir, ".gitignore");
910
988
  try {
911
- const gitignoreContent = await fs10.readFile(rootGitignorePath, "utf-8");
989
+ const gitignoreContent = await fs12.readFile(rootGitignorePath, "utf-8");
912
990
  ig = ignore().add(gitignoreContent);
913
991
  } catch (e2) {
914
992
  }
@@ -930,15 +1008,15 @@ async function scanFramework(rootDir, framework) {
930
1008
  }
931
1009
  const uniqueFiles = Array.from(new Set(allFiles));
932
1010
  return uniqueFiles.filter((file) => !ig.ignores(file)).map((file) => {
933
- return framework.path === "." ? file : path10.join(framework.path, file);
1011
+ return framework.path === "." ? file : path12.join(framework.path, file);
934
1012
  });
935
1013
  }
936
1014
  async function scanCodebase(options) {
937
1015
  const { rootDir, includePatterns = [], excludePatterns = [] } = options;
938
- const gitignorePath = path10.join(rootDir, ".gitignore");
1016
+ const gitignorePath = path12.join(rootDir, ".gitignore");
939
1017
  let ig = ignore();
940
1018
  try {
941
- const gitignoreContent = await fs10.readFile(gitignorePath, "utf-8");
1019
+ const gitignoreContent = await fs12.readFile(gitignorePath, "utf-8");
942
1020
  ig = ignore().add(gitignoreContent);
943
1021
  } catch (e) {
944
1022
  }
@@ -965,12 +1043,12 @@ async function scanCodebase(options) {
965
1043
  }
966
1044
  const uniqueFiles = Array.from(new Set(allFiles));
967
1045
  return uniqueFiles.filter((file) => {
968
- const relativePath = path10.relative(rootDir, file);
1046
+ const relativePath = path12.relative(rootDir, file);
969
1047
  return !ig.ignores(relativePath);
970
1048
  });
971
1049
  }
972
1050
  function detectLanguage(filepath) {
973
- const ext = path10.extname(filepath).toLowerCase();
1051
+ const ext = path12.extname(filepath).toLowerCase();
974
1052
  const languageMap = {
975
1053
  ".ts": "typescript",
976
1054
  ".tsx": "typescript",
@@ -1305,9 +1383,890 @@ var init_symbol_extractor = __esm({
1305
1383
  }
1306
1384
  });
1307
1385
 
1386
+ // src/indexer/ast/parser.ts
1387
+ import Parser from "tree-sitter";
1388
+ import TypeScript from "tree-sitter-typescript";
1389
+ import JavaScript from "tree-sitter-javascript";
1390
+ import PHPParser from "tree-sitter-php";
1391
+ import { extname } from "path";
1392
+ function getParser(language) {
1393
+ if (!parserCache.has(language)) {
1394
+ const parser = new Parser();
1395
+ const grammar = languageConfig[language];
1396
+ if (!grammar) {
1397
+ throw new Error(`No grammar available for language: ${language}`);
1398
+ }
1399
+ parser.setLanguage(grammar);
1400
+ parserCache.set(language, parser);
1401
+ }
1402
+ return parserCache.get(language);
1403
+ }
1404
+ function detectLanguage2(filePath) {
1405
+ const ext = extname(filePath).slice(1).toLowerCase();
1406
+ switch (ext) {
1407
+ case "ts":
1408
+ case "tsx":
1409
+ return "typescript";
1410
+ case "js":
1411
+ case "jsx":
1412
+ case "mjs":
1413
+ case "cjs":
1414
+ return "javascript";
1415
+ case "php":
1416
+ return "php";
1417
+ default:
1418
+ return null;
1419
+ }
1420
+ }
1421
+ function isASTSupported(filePath) {
1422
+ return detectLanguage2(filePath) !== null;
1423
+ }
1424
+ function parseAST(content, language) {
1425
+ try {
1426
+ const parser = getParser(language);
1427
+ const tree = parser.parse(content);
1428
+ if (tree.rootNode.hasError) {
1429
+ return {
1430
+ tree,
1431
+ error: "Parse completed with errors"
1432
+ };
1433
+ }
1434
+ return { tree };
1435
+ } catch (error) {
1436
+ return {
1437
+ tree: null,
1438
+ error: error instanceof Error ? error.message : "Unknown parse error"
1439
+ };
1440
+ }
1441
+ }
1442
+ var parserCache, languageConfig;
1443
+ var init_parser = __esm({
1444
+ "src/indexer/ast/parser.ts"() {
1445
+ "use strict";
1446
+ parserCache = /* @__PURE__ */ new Map();
1447
+ languageConfig = {
1448
+ typescript: TypeScript.typescript,
1449
+ javascript: JavaScript,
1450
+ php: PHPParser.php
1451
+ // Note: tree-sitter-php exports both 'php' (mixed HTML/PHP) and 'php_only'
1452
+ };
1453
+ }
1454
+ });
1455
+
1456
+ // src/indexer/ast/symbols.ts
1457
+ function extractFunctionInfo(node, content, parentClass) {
1458
+ const nameNode = node.childForFieldName("name");
1459
+ if (!nameNode) return null;
1460
+ return {
1461
+ name: nameNode.text,
1462
+ type: parentClass ? "method" : "function",
1463
+ startLine: node.startPosition.row + 1,
1464
+ endLine: node.endPosition.row + 1,
1465
+ parentClass,
1466
+ signature: extractSignature(node, content),
1467
+ parameters: extractParameters(node, content),
1468
+ returnType: extractReturnType(node, content),
1469
+ complexity: calculateComplexity(node)
1470
+ };
1471
+ }
1472
+ function extractArrowFunctionInfo(node, content, parentClass) {
1473
+ const parent = node.parent;
1474
+ let name = "anonymous";
1475
+ if (parent?.type === "variable_declarator") {
1476
+ const nameNode = parent.childForFieldName("name");
1477
+ name = nameNode?.text || "anonymous";
1478
+ }
1479
+ return {
1480
+ name,
1481
+ type: parentClass ? "method" : "function",
1482
+ startLine: node.startPosition.row + 1,
1483
+ endLine: node.endPosition.row + 1,
1484
+ parentClass,
1485
+ signature: extractSignature(node, content),
1486
+ parameters: extractParameters(node, content),
1487
+ complexity: calculateComplexity(node)
1488
+ };
1489
+ }
1490
+ function extractMethodInfo(node, content, parentClass) {
1491
+ const nameNode = node.childForFieldName("name");
1492
+ if (!nameNode) return null;
1493
+ return {
1494
+ name: nameNode.text,
1495
+ type: "method",
1496
+ startLine: node.startPosition.row + 1,
1497
+ endLine: node.endPosition.row + 1,
1498
+ parentClass,
1499
+ signature: extractSignature(node, content),
1500
+ parameters: extractParameters(node, content),
1501
+ returnType: extractReturnType(node, content),
1502
+ complexity: calculateComplexity(node)
1503
+ };
1504
+ }
1505
+ function extractClassInfo(node, _content, _parentClass) {
1506
+ const nameNode = node.childForFieldName("name");
1507
+ if (!nameNode) return null;
1508
+ return {
1509
+ name: nameNode.text,
1510
+ type: "class",
1511
+ startLine: node.startPosition.row + 1,
1512
+ endLine: node.endPosition.row + 1,
1513
+ signature: `class ${nameNode.text}`
1514
+ };
1515
+ }
1516
+ function extractInterfaceInfo(node, _content, _parentClass) {
1517
+ const nameNode = node.childForFieldName("name");
1518
+ if (!nameNode) return null;
1519
+ return {
1520
+ name: nameNode.text,
1521
+ type: "interface",
1522
+ startLine: node.startPosition.row + 1,
1523
+ endLine: node.endPosition.row + 1,
1524
+ signature: `interface ${nameNode.text}`
1525
+ };
1526
+ }
1527
+ function extractSymbolInfo(node, content, parentClass) {
1528
+ const extractor = symbolExtractors[node.type];
1529
+ return extractor ? extractor(node, content, parentClass) : null;
1530
+ }
1531
+ function extractSignature(node, content) {
1532
+ const startLine = node.startPosition.row;
1533
+ const lines = content.split("\n");
1534
+ let signature = lines[startLine] || "";
1535
+ let currentLine = startLine;
1536
+ while (currentLine < node.endPosition.row && !signature.includes("{") && !signature.includes("=>")) {
1537
+ currentLine++;
1538
+ signature += " " + (lines[currentLine] || "");
1539
+ }
1540
+ signature = signature.split("{")[0].split("=>")[0].trim();
1541
+ if (signature.length > 200) {
1542
+ signature = signature.substring(0, 197) + "...";
1543
+ }
1544
+ return signature;
1545
+ }
1546
+ function extractParameters(node, _content) {
1547
+ const parameters = [];
1548
+ const paramsNode = node.childForFieldName("parameters");
1549
+ if (!paramsNode) return parameters;
1550
+ for (let i = 0; i < paramsNode.namedChildCount; i++) {
1551
+ const param = paramsNode.namedChild(i);
1552
+ if (param) {
1553
+ parameters.push(param.text);
1554
+ }
1555
+ }
1556
+ return parameters;
1557
+ }
1558
+ function extractReturnType(node, _content) {
1559
+ const returnTypeNode = node.childForFieldName("return_type");
1560
+ if (!returnTypeNode) return void 0;
1561
+ return returnTypeNode.text;
1562
+ }
1563
+ function calculateComplexity(node) {
1564
+ let complexity = 1;
1565
+ const decisionPoints = [
1566
+ // TypeScript/JavaScript
1567
+ "if_statement",
1568
+ "while_statement",
1569
+ "do_statement",
1570
+ // do...while loops
1571
+ "for_statement",
1572
+ "for_in_statement",
1573
+ "for_of_statement",
1574
+ // for...of loops
1575
+ "switch_case",
1576
+ "catch_clause",
1577
+ "ternary_expression",
1578
+ "binary_expression",
1579
+ // For && and ||
1580
+ // PHP
1581
+ "foreach_statement"
1582
+ // PHP foreach loops
1583
+ ];
1584
+ function traverse(n) {
1585
+ if (decisionPoints.includes(n.type)) {
1586
+ if (n.type === "binary_expression") {
1587
+ const operator = n.childForFieldName("operator");
1588
+ if (operator && (operator.text === "&&" || operator.text === "||")) {
1589
+ complexity++;
1590
+ }
1591
+ } else {
1592
+ complexity++;
1593
+ }
1594
+ }
1595
+ for (let i = 0; i < n.namedChildCount; i++) {
1596
+ const child = n.namedChild(i);
1597
+ if (child) traverse(child);
1598
+ }
1599
+ }
1600
+ traverse(node);
1601
+ return complexity;
1602
+ }
1603
+ function extractImports(rootNode) {
1604
+ const imports = [];
1605
+ function traverse(node) {
1606
+ if (node.type === "import_statement") {
1607
+ const sourceNode = node.childForFieldName("source");
1608
+ if (sourceNode) {
1609
+ const importPath = sourceNode.text.replace(/['"]/g, "");
1610
+ imports.push(importPath);
1611
+ }
1612
+ }
1613
+ if (node === rootNode) {
1614
+ for (let i = 0; i < node.namedChildCount; i++) {
1615
+ const child = node.namedChild(i);
1616
+ if (child) traverse(child);
1617
+ }
1618
+ }
1619
+ }
1620
+ traverse(rootNode);
1621
+ return imports;
1622
+ }
1623
+ var symbolExtractors;
1624
+ var init_symbols = __esm({
1625
+ "src/indexer/ast/symbols.ts"() {
1626
+ "use strict";
1627
+ symbolExtractors = {
1628
+ // TypeScript/JavaScript
1629
+ "function_declaration": extractFunctionInfo,
1630
+ "function": extractFunctionInfo,
1631
+ "arrow_function": extractArrowFunctionInfo,
1632
+ "function_expression": extractArrowFunctionInfo,
1633
+ "method_definition": extractMethodInfo,
1634
+ "class_declaration": extractClassInfo,
1635
+ "interface_declaration": extractInterfaceInfo,
1636
+ // PHP
1637
+ "function_definition": extractFunctionInfo,
1638
+ // PHP functions
1639
+ "method_declaration": extractMethodInfo
1640
+ // PHP methods
1641
+ };
1642
+ }
1643
+ });
1644
+
1645
+ // src/indexer/ast/traversers/typescript.ts
1646
+ var TypeScriptTraverser, JavaScriptTraverser;
1647
+ var init_typescript = __esm({
1648
+ "src/indexer/ast/traversers/typescript.ts"() {
1649
+ "use strict";
1650
+ TypeScriptTraverser = class {
1651
+ targetNodeTypes = [
1652
+ "function_declaration",
1653
+ "function",
1654
+ "interface_declaration",
1655
+ "method_definition",
1656
+ "lexical_declaration",
1657
+ // For const/let with arrow functions
1658
+ "variable_declaration"
1659
+ // For var with functions
1660
+ ];
1661
+ containerTypes = [
1662
+ "class_declaration"
1663
+ // We extract methods, not the class itself
1664
+ ];
1665
+ declarationTypes = [
1666
+ "lexical_declaration",
1667
+ // const/let
1668
+ "variable_declaration"
1669
+ // var
1670
+ ];
1671
+ functionTypes = [
1672
+ "arrow_function",
1673
+ "function_expression",
1674
+ "function"
1675
+ ];
1676
+ shouldExtractChildren(node) {
1677
+ return this.containerTypes.includes(node.type);
1678
+ }
1679
+ isDeclarationWithFunction(node) {
1680
+ return this.declarationTypes.includes(node.type);
1681
+ }
1682
+ getContainerBody(node) {
1683
+ if (node.type === "class_declaration") {
1684
+ return node.childForFieldName("body");
1685
+ }
1686
+ return null;
1687
+ }
1688
+ shouldTraverseChildren(node) {
1689
+ return node.type === "program" || node.type === "export_statement" || node.type === "class_body";
1690
+ }
1691
+ findParentContainerName(node) {
1692
+ let current = node.parent;
1693
+ while (current) {
1694
+ if (current.type === "class_declaration") {
1695
+ const nameNode = current.childForFieldName("name");
1696
+ return nameNode?.text;
1697
+ }
1698
+ current = current.parent;
1699
+ }
1700
+ return void 0;
1701
+ }
1702
+ /**
1703
+ * Check if a declaration node contains a function (arrow, function expression, etc.)
1704
+ */
1705
+ findFunctionInDeclaration(node) {
1706
+ const search2 = (n, depth) => {
1707
+ if (depth > 3) return null;
1708
+ if (this.functionTypes.includes(n.type)) {
1709
+ return n;
1710
+ }
1711
+ for (let i = 0; i < n.childCount; i++) {
1712
+ const child = n.child(i);
1713
+ if (child) {
1714
+ const result = search2(child, depth + 1);
1715
+ if (result) return result;
1716
+ }
1717
+ }
1718
+ return null;
1719
+ };
1720
+ const functionNode = search2(node, 0);
1721
+ return {
1722
+ hasFunction: functionNode !== null,
1723
+ functionNode
1724
+ };
1725
+ }
1726
+ };
1727
+ JavaScriptTraverser = class extends TypeScriptTraverser {
1728
+ };
1729
+ }
1730
+ });
1731
+
1732
+ // src/indexer/ast/traversers/php.ts
1733
+ var PHPTraverser;
1734
+ var init_php = __esm({
1735
+ "src/indexer/ast/traversers/php.ts"() {
1736
+ "use strict";
1737
+ PHPTraverser = class {
1738
+ targetNodeTypes = [
1739
+ "function_definition",
1740
+ // function foo() {}
1741
+ "method_declaration"
1742
+ // public function bar() {}
1743
+ ];
1744
+ containerTypes = [
1745
+ "class_declaration",
1746
+ // We extract methods, not the class itself
1747
+ "trait_declaration",
1748
+ // PHP traits
1749
+ "interface_declaration"
1750
+ // PHP interfaces (for interface methods)
1751
+ ];
1752
+ declarationTypes = [
1753
+ // PHP doesn't have arrow functions or const/let like JS
1754
+ // Functions are always defined with 'function' keyword
1755
+ ];
1756
+ functionTypes = [
1757
+ "function_definition",
1758
+ "method_declaration"
1759
+ ];
1760
+ shouldExtractChildren(node) {
1761
+ return this.containerTypes.includes(node.type);
1762
+ }
1763
+ isDeclarationWithFunction(_node) {
1764
+ return false;
1765
+ }
1766
+ getContainerBody(node) {
1767
+ if (node.type === "class_declaration" || node.type === "trait_declaration" || node.type === "interface_declaration") {
1768
+ return node.childForFieldName("body");
1769
+ }
1770
+ return null;
1771
+ }
1772
+ shouldTraverseChildren(node) {
1773
+ return node.type === "program" || // Top-level PHP file
1774
+ node.type === "php" || // PHP block
1775
+ node.type === "declaration_list";
1776
+ }
1777
+ findParentContainerName(node) {
1778
+ let current = node.parent;
1779
+ while (current) {
1780
+ if (current.type === "class_declaration" || current.type === "trait_declaration") {
1781
+ const nameNode = current.childForFieldName("name");
1782
+ return nameNode?.text;
1783
+ }
1784
+ current = current.parent;
1785
+ }
1786
+ return void 0;
1787
+ }
1788
+ findFunctionInDeclaration(_node) {
1789
+ return {
1790
+ hasFunction: false,
1791
+ functionNode: null
1792
+ };
1793
+ }
1794
+ };
1795
+ }
1796
+ });
1797
+
1798
+ // src/indexer/ast/traversers/index.ts
1799
+ function getTraverser(language) {
1800
+ const traverser = traverserRegistry[language];
1801
+ if (!traverser) {
1802
+ throw new Error(`No traverser available for language: ${language}`);
1803
+ }
1804
+ return traverser;
1805
+ }
1806
+ var traverserRegistry;
1807
+ var init_traversers = __esm({
1808
+ "src/indexer/ast/traversers/index.ts"() {
1809
+ "use strict";
1810
+ init_typescript();
1811
+ init_php();
1812
+ traverserRegistry = {
1813
+ typescript: new TypeScriptTraverser(),
1814
+ javascript: new JavaScriptTraverser(),
1815
+ php: new PHPTraverser()
1816
+ };
1817
+ }
1818
+ });
1819
+
1820
+ // src/indexer/ast/chunker.ts
1821
+ function chunkByAST(filepath, content, options = {}) {
1822
+ const { minChunkSize = 5 } = options;
1823
+ const language = detectLanguage2(filepath);
1824
+ if (!language) {
1825
+ throw new Error(`Unsupported language for file: ${filepath}`);
1826
+ }
1827
+ const parseResult = parseAST(content, language);
1828
+ if (!parseResult.tree) {
1829
+ throw new Error(`Failed to parse ${filepath}: ${parseResult.error}`);
1830
+ }
1831
+ const chunks = [];
1832
+ const lines = content.split("\n");
1833
+ const rootNode = parseResult.tree.rootNode;
1834
+ const traverser = getTraverser(language);
1835
+ const fileImports = extractImports(rootNode);
1836
+ const topLevelNodes = findTopLevelNodes(rootNode, traverser);
1837
+ for (const node of topLevelNodes) {
1838
+ let actualNode = node;
1839
+ if (traverser.isDeclarationWithFunction(node)) {
1840
+ const declInfo = traverser.findFunctionInDeclaration(node);
1841
+ if (declInfo.functionNode) {
1842
+ actualNode = declInfo.functionNode;
1843
+ }
1844
+ }
1845
+ const parentClassName = traverser.findParentContainerName(actualNode);
1846
+ const symbolInfo = extractSymbolInfo(actualNode, content, parentClassName);
1847
+ const nodeContent = getNodeContent(node, lines);
1848
+ chunks.push(createChunk(filepath, node, nodeContent, symbolInfo, fileImports, language));
1849
+ }
1850
+ const coveredRanges = topLevelNodes.map((n) => ({
1851
+ start: n.startPosition.row,
1852
+ end: n.endPosition.row
1853
+ }));
1854
+ const uncoveredChunks = extractUncoveredCode(
1855
+ lines,
1856
+ coveredRanges,
1857
+ filepath,
1858
+ minChunkSize,
1859
+ fileImports,
1860
+ language
1861
+ );
1862
+ chunks.push(...uncoveredChunks);
1863
+ chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
1864
+ return chunks;
1865
+ }
1866
+ function findTopLevelNodes(rootNode, traverser) {
1867
+ const nodes = [];
1868
+ function traverse(node, depth) {
1869
+ if (traverser.isDeclarationWithFunction(node) && depth === 0) {
1870
+ const declInfo = traverser.findFunctionInDeclaration(node);
1871
+ if (declInfo.hasFunction) {
1872
+ nodes.push(node);
1873
+ return;
1874
+ }
1875
+ }
1876
+ if (depth <= 1 && traverser.targetNodeTypes.includes(node.type)) {
1877
+ nodes.push(node);
1878
+ return;
1879
+ }
1880
+ if (traverser.shouldExtractChildren(node)) {
1881
+ const body = traverser.getContainerBody(node);
1882
+ if (body) {
1883
+ traverse(body, depth + 1);
1884
+ }
1885
+ return;
1886
+ }
1887
+ if (traverser.shouldTraverseChildren(node)) {
1888
+ for (let i = 0; i < node.namedChildCount; i++) {
1889
+ const child = node.namedChild(i);
1890
+ if (child) traverse(child, depth);
1891
+ }
1892
+ }
1893
+ }
1894
+ traverse(rootNode, 0);
1895
+ return nodes;
1896
+ }
1897
+ function getNodeContent(node, lines) {
1898
+ const startLine = node.startPosition.row;
1899
+ const endLine = node.endPosition.row;
1900
+ return lines.slice(startLine, endLine + 1).join("\n");
1901
+ }
1902
+ function createChunk(filepath, node, content, symbolInfo, imports, language) {
1903
+ const symbols = {
1904
+ functions: [],
1905
+ classes: [],
1906
+ interfaces: []
1907
+ };
1908
+ if (symbolInfo?.name) {
1909
+ if (symbolInfo.type === "function" || symbolInfo.type === "method") {
1910
+ symbols.functions.push(symbolInfo.name);
1911
+ } else if (symbolInfo.type === "class") {
1912
+ symbols.classes.push(symbolInfo.name);
1913
+ } else if (symbolInfo.type === "interface") {
1914
+ symbols.interfaces.push(symbolInfo.name);
1915
+ }
1916
+ }
1917
+ return {
1918
+ content,
1919
+ metadata: {
1920
+ file: filepath,
1921
+ startLine: node.startPosition.row + 1,
1922
+ endLine: node.endPosition.row + 1,
1923
+ type: symbolInfo == null ? "block" : symbolInfo.type === "class" ? "class" : "function",
1924
+ language,
1925
+ // Legacy symbols field for backward compatibility
1926
+ symbols,
1927
+ // New AST-derived metadata
1928
+ symbolName: symbolInfo?.name,
1929
+ symbolType: symbolInfo?.type,
1930
+ parentClass: symbolInfo?.parentClass,
1931
+ complexity: symbolInfo?.complexity,
1932
+ parameters: symbolInfo?.parameters,
1933
+ signature: symbolInfo?.signature,
1934
+ imports
1935
+ }
1936
+ };
1937
+ }
1938
+ function findUncoveredRanges(coveredRanges, totalLines) {
1939
+ const uncoveredRanges = [];
1940
+ let currentStart = 0;
1941
+ const sortedRanges = [...coveredRanges].sort((a, b) => a.start - b.start);
1942
+ for (const range of sortedRanges) {
1943
+ if (currentStart < range.start) {
1944
+ uncoveredRanges.push({
1945
+ start: currentStart,
1946
+ end: range.start - 1
1947
+ });
1948
+ }
1949
+ currentStart = range.end + 1;
1950
+ }
1951
+ if (currentStart < totalLines) {
1952
+ uncoveredRanges.push({
1953
+ start: currentStart,
1954
+ end: totalLines - 1
1955
+ });
1956
+ }
1957
+ return uncoveredRanges;
1958
+ }
1959
+ function createChunkFromRange(range, lines, filepath, language, imports) {
1960
+ const uncoveredLines = lines.slice(range.start, range.end + 1);
1961
+ const content = uncoveredLines.join("\n").trim();
1962
+ return {
1963
+ content,
1964
+ metadata: {
1965
+ file: filepath,
1966
+ startLine: range.start + 1,
1967
+ endLine: range.end + 1,
1968
+ type: "block",
1969
+ language,
1970
+ // Empty symbols for uncovered code (imports, exports, etc.)
1971
+ symbols: { functions: [], classes: [], interfaces: [] },
1972
+ imports
1973
+ }
1974
+ };
1975
+ }
1976
+ function isValidChunk(chunk, minChunkSize) {
1977
+ const lineCount = chunk.metadata.endLine - chunk.metadata.startLine + 1;
1978
+ return chunk.content.length > 0 && lineCount >= minChunkSize;
1979
+ }
1980
+ function extractUncoveredCode(lines, coveredRanges, filepath, minChunkSize, imports, language) {
1981
+ const uncoveredRanges = findUncoveredRanges(coveredRanges, lines.length);
1982
+ return uncoveredRanges.map((range) => createChunkFromRange(range, lines, filepath, language, imports)).filter((chunk) => isValidChunk(chunk, minChunkSize));
1983
+ }
1984
+ function shouldUseAST(filepath) {
1985
+ return isASTSupported(filepath);
1986
+ }
1987
+ var init_chunker = __esm({
1988
+ "src/indexer/ast/chunker.ts"() {
1989
+ "use strict";
1990
+ init_parser();
1991
+ init_symbols();
1992
+ init_traversers();
1993
+ }
1994
+ });
1995
+
1996
+ // src/indexer/liquid-chunker.ts
1997
+ function extractSchemaName(schemaContent) {
1998
+ try {
1999
+ let jsonContent = schemaContent.replace(/\{%-?\s*schema\s*-?%\}/g, "").replace(/\{%-?\s*endschema\s*-?%\}/g, "").trim();
2000
+ const schema = JSON.parse(jsonContent);
2001
+ return typeof schema.name === "string" ? schema.name : void 0;
2002
+ } catch (error) {
2003
+ }
2004
+ return void 0;
2005
+ }
2006
+ function removeComments(content) {
2007
+ return content.replace(/\{%-?\s*comment\s*-?%\}[\s\S]*?\{%-?\s*endcomment\s*-?%\}/g, "");
2008
+ }
2009
+ function extractRenderTags(contentWithoutComments) {
2010
+ const dependencies = /* @__PURE__ */ new Set();
2011
+ const renderPattern = /\{%-?\s*render\s+['"]([^'"]+)['"]/g;
2012
+ let match;
2013
+ while ((match = renderPattern.exec(contentWithoutComments)) !== null) {
2014
+ dependencies.add(match[1]);
2015
+ }
2016
+ const includePattern = /\{%-?\s*include\s+['"]([^'"]+)['"]/g;
2017
+ while ((match = includePattern.exec(contentWithoutComments)) !== null) {
2018
+ dependencies.add(match[1]);
2019
+ }
2020
+ const sectionPattern = /\{%-?\s*section\s+['"]([^'"]+)['"]/g;
2021
+ while ((match = sectionPattern.exec(contentWithoutComments)) !== null) {
2022
+ dependencies.add(match[1]);
2023
+ }
2024
+ return Array.from(dependencies);
2025
+ }
2026
+ function findLiquidBlocks(content) {
2027
+ const lines = content.split("\n");
2028
+ const blocks = [];
2029
+ const blockPatterns = [
2030
+ { type: "schema", start: /\{%-?\s*schema\s*-?%\}/, end: /\{%-?\s*endschema\s*-?%\}/ },
2031
+ { type: "style", start: /\{%-?\s*style\s*-?%\}/, end: /\{%-?\s*endstyle\s*-?%\}/ },
2032
+ { type: "javascript", start: /\{%-?\s*javascript\s*-?%\}/, end: /\{%-?\s*endjavascript\s*-?%\}/ }
2033
+ ];
2034
+ for (const pattern of blockPatterns) {
2035
+ let searchStart = 0;
2036
+ while (searchStart < lines.length) {
2037
+ const startIdx = lines.findIndex(
2038
+ (line, idx) => idx >= searchStart && pattern.start.test(line)
2039
+ );
2040
+ if (startIdx === -1) break;
2041
+ const endIdx = lines.findIndex(
2042
+ (line, idx) => idx >= startIdx && pattern.end.test(line)
2043
+ );
2044
+ if (endIdx === -1) {
2045
+ break;
2046
+ }
2047
+ const blockContent = lines.slice(startIdx, endIdx + 1).join("\n");
2048
+ blocks.push({
2049
+ type: pattern.type,
2050
+ startLine: startIdx,
2051
+ endLine: endIdx,
2052
+ content: blockContent
2053
+ });
2054
+ searchStart = endIdx + 1;
2055
+ }
2056
+ }
2057
+ return blocks.sort((a, b) => a.startLine - b.startLine);
2058
+ }
2059
+ function chunkLiquidFile(filepath, content, chunkSize = 75, chunkOverlap = 10) {
2060
+ const lines = content.split("\n");
2061
+ const blocks = findLiquidBlocks(content);
2062
+ const chunks = [];
2063
+ const contentWithoutComments = removeComments(content);
2064
+ const linesWithoutComments = contentWithoutComments.split("\n");
2065
+ const coveredLines = /* @__PURE__ */ new Set();
2066
+ for (const block of blocks) {
2067
+ for (let i = block.startLine; i <= block.endLine; i++) {
2068
+ coveredLines.add(i);
2069
+ }
2070
+ let symbolName;
2071
+ if (block.type === "schema") {
2072
+ symbolName = extractSchemaName(block.content);
2073
+ }
2074
+ const blockContentWithoutComments = linesWithoutComments.slice(block.startLine, block.endLine + 1).join("\n");
2075
+ const imports = extractRenderTags(blockContentWithoutComments);
2076
+ const blockLineCount = block.endLine - block.startLine + 1;
2077
+ const maxBlockSize = chunkSize * 3;
2078
+ if (blockLineCount <= maxBlockSize) {
2079
+ chunks.push({
2080
+ content: block.content,
2081
+ metadata: {
2082
+ file: filepath,
2083
+ startLine: block.startLine + 1,
2084
+ // 1-indexed
2085
+ endLine: block.endLine + 1,
2086
+ language: "liquid",
2087
+ type: "block",
2088
+ symbolName,
2089
+ symbolType: block.type,
2090
+ imports: imports.length > 0 ? imports : void 0
2091
+ }
2092
+ });
2093
+ } else {
2094
+ const blockLines = block.content.split("\n");
2095
+ for (let offset = 0; offset < blockLines.length; offset += chunkSize - chunkOverlap) {
2096
+ const endOffset = Math.min(offset + chunkSize, blockLines.length);
2097
+ const chunkContent = blockLines.slice(offset, endOffset).join("\n");
2098
+ if (chunkContent.trim().length > 0) {
2099
+ chunks.push({
2100
+ content: chunkContent,
2101
+ metadata: {
2102
+ file: filepath,
2103
+ startLine: block.startLine + offset + 1,
2104
+ // 1-indexed
2105
+ endLine: block.startLine + endOffset,
2106
+ // 1-indexed (endOffset already accounts for exclusivity)
2107
+ language: "liquid",
2108
+ type: "block",
2109
+ symbolName,
2110
+ // Preserve symbol name for all split chunks
2111
+ symbolType: block.type,
2112
+ imports: imports.length > 0 ? imports : void 0
2113
+ }
2114
+ });
2115
+ }
2116
+ if (endOffset >= blockLines.length) break;
2117
+ }
2118
+ }
2119
+ }
2120
+ let currentChunk = [];
2121
+ let chunkStartLine = 0;
2122
+ for (let i = 0; i < lines.length; i++) {
2123
+ if (coveredLines.has(i)) {
2124
+ if (currentChunk.length > 0) {
2125
+ const chunkContent = currentChunk.join("\n");
2126
+ if (chunkContent.trim().length > 0) {
2127
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i).join("\n");
2128
+ const imports = extractRenderTags(cleanedChunk);
2129
+ chunks.push({
2130
+ content: chunkContent,
2131
+ metadata: {
2132
+ file: filepath,
2133
+ startLine: chunkStartLine + 1,
2134
+ endLine: i,
2135
+ language: "liquid",
2136
+ type: "template",
2137
+ imports: imports.length > 0 ? imports : void 0
2138
+ }
2139
+ });
2140
+ }
2141
+ currentChunk = [];
2142
+ }
2143
+ continue;
2144
+ }
2145
+ if (currentChunk.length === 0) {
2146
+ chunkStartLine = i;
2147
+ }
2148
+ currentChunk.push(lines[i]);
2149
+ if (currentChunk.length >= chunkSize) {
2150
+ const chunkContent = currentChunk.join("\n");
2151
+ if (chunkContent.trim().length > 0) {
2152
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, i + 1).join("\n");
2153
+ const imports = extractRenderTags(cleanedChunk);
2154
+ chunks.push({
2155
+ content: chunkContent,
2156
+ metadata: {
2157
+ file: filepath,
2158
+ startLine: chunkStartLine + 1,
2159
+ endLine: i + 1,
2160
+ language: "liquid",
2161
+ type: "template",
2162
+ imports: imports.length > 0 ? imports : void 0
2163
+ }
2164
+ });
2165
+ }
2166
+ currentChunk = currentChunk.slice(-chunkOverlap);
2167
+ chunkStartLine = Math.max(0, i + 1 - chunkOverlap);
2168
+ }
2169
+ }
2170
+ if (currentChunk.length > 0) {
2171
+ const chunkContent = currentChunk.join("\n");
2172
+ if (chunkContent.trim().length === 0) {
2173
+ return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
2174
+ }
2175
+ const cleanedChunk = linesWithoutComments.slice(chunkStartLine, lines.length).join("\n");
2176
+ const imports = extractRenderTags(cleanedChunk);
2177
+ chunks.push({
2178
+ content: chunkContent,
2179
+ metadata: {
2180
+ file: filepath,
2181
+ startLine: chunkStartLine + 1,
2182
+ endLine: lines.length,
2183
+ language: "liquid",
2184
+ type: "template",
2185
+ imports: imports.length > 0 ? imports : void 0
2186
+ }
2187
+ });
2188
+ }
2189
+ return chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
2190
+ }
2191
+ var init_liquid_chunker = __esm({
2192
+ "src/indexer/liquid-chunker.ts"() {
2193
+ "use strict";
2194
+ }
2195
+ });
2196
+
2197
+ // src/indexer/json-template-chunker.ts
2198
+ function extractSectionReferences(jsonContent) {
2199
+ try {
2200
+ const template = JSON.parse(jsonContent);
2201
+ const sectionTypes = /* @__PURE__ */ new Set();
2202
+ if (template.sections && typeof template.sections === "object") {
2203
+ for (const section of Object.values(template.sections)) {
2204
+ if (typeof section === "object" && section !== null && "type" in section && typeof section.type === "string") {
2205
+ sectionTypes.add(section.type);
2206
+ }
2207
+ }
2208
+ }
2209
+ return Array.from(sectionTypes);
2210
+ } catch (error) {
2211
+ console.warn(`[Lien] Failed to parse JSON template: ${error instanceof Error ? error.message : String(error)}`);
2212
+ return [];
2213
+ }
2214
+ }
2215
+ function extractTemplateName(filepath) {
2216
+ const match = filepath.match(/templates\/(.+)\.json$/);
2217
+ return match ? match[1] : void 0;
2218
+ }
2219
+ function chunkJSONTemplate(filepath, content) {
2220
+ if (content.trim().length === 0) {
2221
+ return [];
2222
+ }
2223
+ const lines = content.split("\n");
2224
+ const templateName = extractTemplateName(filepath);
2225
+ const sectionReferences = extractSectionReferences(content);
2226
+ return [{
2227
+ content,
2228
+ metadata: {
2229
+ file: filepath,
2230
+ startLine: 1,
2231
+ endLine: lines.length,
2232
+ language: "json",
2233
+ type: "template",
2234
+ symbolName: templateName,
2235
+ symbolType: "template",
2236
+ imports: sectionReferences.length > 0 ? sectionReferences : void 0
2237
+ }
2238
+ }];
2239
+ }
2240
+ var init_json_template_chunker = __esm({
2241
+ "src/indexer/json-template-chunker.ts"() {
2242
+ "use strict";
2243
+ }
2244
+ });
2245
+
1308
2246
  // src/indexer/chunker.ts
1309
2247
  function chunkFile(filepath, content, options = {}) {
1310
- const { chunkSize = 75, chunkOverlap = 10 } = options;
2248
+ const { chunkSize = 75, chunkOverlap = 10, useAST = true, astFallback = "line-based" } = options;
2249
+ if (filepath.endsWith(".liquid")) {
2250
+ return chunkLiquidFile(filepath, content, chunkSize, chunkOverlap);
2251
+ }
2252
+ if (filepath.endsWith(".json") && /(?:^|\/)templates\//.test(filepath)) {
2253
+ return chunkJSONTemplate(filepath, content);
2254
+ }
2255
+ if (useAST && shouldUseAST(filepath)) {
2256
+ try {
2257
+ return chunkByAST(filepath, content, {
2258
+ minChunkSize: Math.floor(chunkSize / 10)
2259
+ });
2260
+ } catch (error) {
2261
+ if (astFallback === "error") {
2262
+ throw new Error(`AST chunking failed for ${filepath}: ${error instanceof Error ? error.message : String(error)}`);
2263
+ }
2264
+ console.warn(`AST chunking failed for ${filepath}, falling back to line-based:`, error);
2265
+ }
2266
+ }
2267
+ return chunkByLines(filepath, content, chunkSize, chunkOverlap);
2268
+ }
2269
+ function chunkByLines(filepath, content, chunkSize, chunkOverlap) {
1311
2270
  const lines = content.split("\n");
1312
2271
  const chunks = [];
1313
2272
  const language = detectLanguage(filepath);
@@ -1340,11 +2299,14 @@ function chunkFile(filepath, content, options = {}) {
1340
2299
  }
1341
2300
  return chunks;
1342
2301
  }
1343
- var init_chunker = __esm({
2302
+ var init_chunker2 = __esm({
1344
2303
  "src/indexer/chunker.ts"() {
1345
2304
  "use strict";
1346
2305
  init_scanner();
1347
2306
  init_symbol_extractor();
2307
+ init_chunker();
2308
+ init_liquid_chunker();
2309
+ init_json_template_chunker();
1348
2310
  }
1349
2311
  });
1350
2312
 
@@ -1455,18 +2417,11 @@ var init_intent_classifier = __esm({
1455
2417
  }
1456
2418
  });
1457
2419
 
1458
- // src/vectordb/lancedb.ts
1459
- var lancedb_exports = {};
1460
- __export(lancedb_exports, {
1461
- VectorDB: () => VectorDB
1462
- });
1463
- import * as lancedb from "vectordb";
1464
- import path11 from "path";
1465
- import os2 from "os";
1466
- import crypto2 from "crypto";
2420
+ // src/vectordb/query.ts
2421
+ import path13 from "path";
1467
2422
  function isDocumentationFile(filepath) {
1468
2423
  const lower = filepath.toLowerCase();
1469
- const filename = path11.basename(filepath).toLowerCase();
2424
+ const filename = path13.basename(filepath).toLowerCase();
1470
2425
  if (filename.startsWith("readme")) return true;
1471
2426
  if (filename.startsWith("changelog")) return true;
1472
2427
  if (filename.endsWith(".md") || filename.endsWith(".mdx") || filename.endsWith(".markdown")) {
@@ -1513,7 +2468,7 @@ function boostPathRelevance(query, filepath, baseScore) {
1513
2468
  return baseScore * boostFactor;
1514
2469
  }
1515
2470
  function boostFilenameRelevance(query, filepath, baseScore) {
1516
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2471
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
1517
2472
  const queryTokens = query.toLowerCase().split(/\s+/);
1518
2473
  let boostFactor = 1;
1519
2474
  for (const token of queryTokens) {
@@ -1528,7 +2483,7 @@ function boostFilenameRelevance(query, filepath, baseScore) {
1528
2483
  }
1529
2484
  function boostForLocationIntent(query, filepath, baseScore) {
1530
2485
  let score = baseScore;
1531
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2486
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
1532
2487
  const queryTokens = query.toLowerCase().split(/\s+/);
1533
2488
  for (const token of queryTokens) {
1534
2489
  if (token.length <= 2) continue;
@@ -1556,7 +2511,7 @@ function boostForConceptualIntent(query, filepath, baseScore) {
1556
2511
  if (isUtilityFile(filepath)) {
1557
2512
  score *= 1.05;
1558
2513
  }
1559
- const filename = path11.basename(filepath, path11.extname(filepath)).toLowerCase();
2514
+ const filename = path13.basename(filepath, path13.extname(filepath)).toLowerCase();
1560
2515
  const queryTokens = query.toLowerCase().split(/\s+/);
1561
2516
  for (const token of queryTokens) {
1562
2517
  if (token.length <= 2) continue;
@@ -1564,7 +2519,7 @@ function boostForConceptualIntent(query, filepath, baseScore) {
1564
2519
  score *= 0.9;
1565
2520
  }
1566
2521
  }
1567
- const pathSegments = filepath.toLowerCase().split(path11.sep);
2522
+ const pathSegments = filepath.toLowerCase().split(path13.sep);
1568
2523
  for (const token of queryTokens) {
1569
2524
  if (token.length <= 2) continue;
1570
2525
  for (const segment of pathSegments) {
@@ -1601,16 +2556,373 @@ function applyRelevanceBoosting(query, filepath, baseScore) {
1601
2556
  return boostForImplementationIntent(query, filepath, baseScore);
1602
2557
  }
1603
2558
  }
2559
+ function dbRecordToSearchResult(r, query) {
2560
+ const baseScore = r._distance ?? 0;
2561
+ const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
2562
+ return {
2563
+ content: r.content,
2564
+ metadata: {
2565
+ file: r.file,
2566
+ startLine: r.startLine,
2567
+ endLine: r.endLine,
2568
+ type: r.type,
2569
+ language: r.language,
2570
+ // AST-derived metadata (v0.13.0)
2571
+ symbolName: r.symbolName || void 0,
2572
+ symbolType: r.symbolType,
2573
+ parentClass: r.parentClass || void 0,
2574
+ complexity: r.complexity || void 0,
2575
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2576
+ signature: r.signature || void 0,
2577
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2578
+ },
2579
+ score: boostedScore,
2580
+ relevance: calculateRelevance(boostedScore)
2581
+ };
2582
+ }
2583
+ async function search(table, queryVector, limit = 5, query) {
2584
+ if (!table) {
2585
+ throw new DatabaseError("Vector database not initialized");
2586
+ }
2587
+ try {
2588
+ const results = await table.search(Array.from(queryVector)).limit(limit + 20).execute();
2589
+ const filtered = results.filter(
2590
+ (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2591
+ ).map((r) => dbRecordToSearchResult(r, query)).sort((a, b) => a.score - b.score).slice(0, limit);
2592
+ return filtered;
2593
+ } catch (error) {
2594
+ const errorMsg = String(error);
2595
+ if (errorMsg.includes("Not found:") || errorMsg.includes(".lance")) {
2596
+ throw new DatabaseError(
2597
+ `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,
2598
+ { originalError: error }
2599
+ );
2600
+ }
2601
+ throw wrapError(error, "Failed to search vector database");
2602
+ }
2603
+ }
2604
+ async function scanWithFilter(table, options) {
2605
+ if (!table) {
2606
+ throw new DatabaseError("Vector database not initialized");
2607
+ }
2608
+ const { language, pattern, limit = 100 } = options;
2609
+ try {
2610
+ const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2611
+ const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 5, 200));
2612
+ const results = await query.execute();
2613
+ let filtered = results.filter(
2614
+ (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2615
+ );
2616
+ if (language) {
2617
+ filtered = filtered.filter(
2618
+ (r) => r.language && r.language.toLowerCase() === language.toLowerCase()
2619
+ );
2620
+ }
2621
+ if (pattern) {
2622
+ const regex = new RegExp(pattern, "i");
2623
+ filtered = filtered.filter(
2624
+ (r) => regex.test(r.content) || regex.test(r.file)
2625
+ );
2626
+ }
2627
+ return filtered.slice(0, limit).map((r) => {
2628
+ const score = 0;
2629
+ return {
2630
+ content: r.content,
2631
+ metadata: {
2632
+ file: r.file,
2633
+ startLine: r.startLine,
2634
+ endLine: r.endLine,
2635
+ type: r.type,
2636
+ language: r.language,
2637
+ // AST-derived metadata (v0.13.0)
2638
+ symbolName: r.symbolName || void 0,
2639
+ symbolType: r.symbolType,
2640
+ parentClass: r.parentClass || void 0,
2641
+ complexity: r.complexity || void 0,
2642
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2643
+ signature: r.signature || void 0,
2644
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2645
+ },
2646
+ score,
2647
+ relevance: calculateRelevance(score)
2648
+ };
2649
+ });
2650
+ } catch (error) {
2651
+ throw wrapError(error, "Failed to scan with filter");
2652
+ }
2653
+ }
2654
+ function matchesSymbolType(record, symbolType, symbols) {
2655
+ if (record.symbolType) {
2656
+ if (symbolType === "function") {
2657
+ return record.symbolType === "function" || record.symbolType === "method";
2658
+ } else if (symbolType === "class") {
2659
+ return record.symbolType === "class";
2660
+ } else if (symbolType === "interface") {
2661
+ return record.symbolType === "interface";
2662
+ }
2663
+ return false;
2664
+ }
2665
+ return symbols.length > 0 && symbols.some((s) => s.length > 0 && s !== "");
2666
+ }
2667
+ async function querySymbols(table, options) {
2668
+ if (!table) {
2669
+ throw new DatabaseError("Vector database not initialized");
2670
+ }
2671
+ const { language, pattern, symbolType, limit = 50 } = options;
2672
+ try {
2673
+ const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2674
+ const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 10, 500));
2675
+ const results = await query.execute();
2676
+ let filtered = results.filter((r) => {
2677
+ if (!r.content || r.content.trim().length === 0) {
2678
+ return false;
2679
+ }
2680
+ if (!r.file || r.file.length === 0) {
2681
+ return false;
2682
+ }
2683
+ if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
2684
+ return false;
2685
+ }
2686
+ const symbols = symbolType === "function" ? r.functionNames || [] : symbolType === "class" ? r.classNames || [] : symbolType === "interface" ? r.interfaceNames || [] : [...r.functionNames || [], ...r.classNames || [], ...r.interfaceNames || []];
2687
+ const astSymbolName = r.symbolName || "";
2688
+ if (symbols.length === 0 && !astSymbolName) {
2689
+ return false;
2690
+ }
2691
+ if (pattern) {
2692
+ const regex = new RegExp(pattern, "i");
2693
+ const matchesOldSymbols = symbols.some((s) => regex.test(s));
2694
+ const matchesASTSymbol = regex.test(astSymbolName);
2695
+ const nameMatches = matchesOldSymbols || matchesASTSymbol;
2696
+ if (!nameMatches) return false;
2697
+ if (symbolType) {
2698
+ return matchesSymbolType(r, symbolType, symbols);
2699
+ }
2700
+ return nameMatches;
2701
+ }
2702
+ if (symbolType) {
2703
+ return matchesSymbolType(r, symbolType, symbols);
2704
+ }
2705
+ return true;
2706
+ });
2707
+ return filtered.slice(0, limit).map((r) => {
2708
+ const score = 0;
2709
+ return {
2710
+ content: r.content,
2711
+ metadata: {
2712
+ file: r.file,
2713
+ startLine: r.startLine,
2714
+ endLine: r.endLine,
2715
+ type: r.type,
2716
+ language: r.language,
2717
+ symbols: {
2718
+ functions: r.functionNames && r.functionNames.length > 0 && r.functionNames[0] !== "" ? r.functionNames : [],
2719
+ classes: r.classNames && r.classNames.length > 0 && r.classNames[0] !== "" ? r.classNames : [],
2720
+ interfaces: r.interfaceNames && r.interfaceNames.length > 0 && r.interfaceNames[0] !== "" ? r.interfaceNames : []
2721
+ },
2722
+ // AST-derived metadata (v0.13.0)
2723
+ symbolName: r.symbolName || void 0,
2724
+ symbolType: r.symbolType,
2725
+ parentClass: r.parentClass || void 0,
2726
+ complexity: r.complexity || void 0,
2727
+ parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2728
+ signature: r.signature || void 0,
2729
+ imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2730
+ },
2731
+ score,
2732
+ relevance: calculateRelevance(score)
2733
+ };
2734
+ });
2735
+ } catch (error) {
2736
+ throw wrapError(error, "Failed to query symbols");
2737
+ }
2738
+ }
2739
+ var init_query = __esm({
2740
+ "src/vectordb/query.ts"() {
2741
+ "use strict";
2742
+ init_types();
2743
+ init_errors();
2744
+ init_relevance();
2745
+ init_intent_classifier();
2746
+ }
2747
+ });
2748
+
2749
+ // src/vectordb/batch-insert.ts
2750
+ async function insertBatch(db, table, tableName, vectors, metadatas, contents) {
2751
+ if (!db) {
2752
+ throw new DatabaseError("Vector database not initialized");
2753
+ }
2754
+ if (vectors.length !== metadatas.length || vectors.length !== contents.length) {
2755
+ throw new DatabaseError("Vectors, metadatas, and contents arrays must have the same length", {
2756
+ vectorsLength: vectors.length,
2757
+ metadatasLength: metadatas.length,
2758
+ contentsLength: contents.length
2759
+ });
2760
+ }
2761
+ if (vectors.length === 0) {
2762
+ return table;
2763
+ }
2764
+ if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {
2765
+ let currentTable = table;
2766
+ for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {
2767
+ const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2768
+ const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2769
+ const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
2770
+ currentTable = await insertBatchInternal(db, currentTable, tableName, batchVectors, batchMetadata, batchContents);
2771
+ }
2772
+ if (!currentTable) {
2773
+ throw new DatabaseError("Failed to create table during batch insert");
2774
+ }
2775
+ return currentTable;
2776
+ } else {
2777
+ return insertBatchInternal(db, table, tableName, vectors, metadatas, contents);
2778
+ }
2779
+ }
2780
+ async function insertBatchInternal(db, table, tableName, vectors, metadatas, contents) {
2781
+ const queue = [{ vectors, metadatas, contents }];
2782
+ const failedRecords = [];
2783
+ let currentTable = table;
2784
+ while (queue.length > 0) {
2785
+ const batch = queue.shift();
2786
+ if (!batch) break;
2787
+ try {
2788
+ const records = batch.vectors.map((vector, i) => ({
2789
+ vector: Array.from(vector),
2790
+ content: batch.contents[i],
2791
+ file: batch.metadatas[i].file,
2792
+ startLine: batch.metadatas[i].startLine,
2793
+ endLine: batch.metadatas[i].endLine,
2794
+ type: batch.metadatas[i].type,
2795
+ language: batch.metadatas[i].language,
2796
+ // Ensure arrays have at least empty string for Arrow type inference
2797
+ functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
2798
+ classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
2799
+ interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""],
2800
+ // AST-derived metadata (v0.13.0)
2801
+ symbolName: batch.metadatas[i].symbolName || "",
2802
+ symbolType: batch.metadatas[i].symbolType || "",
2803
+ parentClass: batch.metadatas[i].parentClass || "",
2804
+ complexity: batch.metadatas[i].complexity || 0,
2805
+ parameters: batch.metadatas[i].parameters && batch.metadatas[i].parameters.length > 0 ? batch.metadatas[i].parameters : [""],
2806
+ signature: batch.metadatas[i].signature || "",
2807
+ imports: batch.metadatas[i].imports && batch.metadatas[i].imports.length > 0 ? batch.metadatas[i].imports : [""]
2808
+ }));
2809
+ if (!currentTable) {
2810
+ currentTable = await db.createTable(tableName, records);
2811
+ } else {
2812
+ await currentTable.add(records);
2813
+ }
2814
+ } catch (error) {
2815
+ if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
2816
+ const half = Math.floor(batch.vectors.length / 2);
2817
+ queue.push({
2818
+ vectors: batch.vectors.slice(0, half),
2819
+ metadatas: batch.metadatas.slice(0, half),
2820
+ contents: batch.contents.slice(0, half)
2821
+ });
2822
+ queue.push({
2823
+ vectors: batch.vectors.slice(half),
2824
+ metadatas: batch.metadatas.slice(half),
2825
+ contents: batch.contents.slice(half)
2826
+ });
2827
+ } else {
2828
+ failedRecords.push(batch);
2829
+ }
2830
+ }
2831
+ }
2832
+ if (failedRecords.length > 0) {
2833
+ const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
2834
+ throw new DatabaseError(
2835
+ `Failed to insert ${totalFailed} record(s) after retry attempts`,
2836
+ {
2837
+ failedBatches: failedRecords.length,
2838
+ totalRecords: totalFailed,
2839
+ sampleFile: failedRecords[0].metadatas[0].file
2840
+ }
2841
+ );
2842
+ }
2843
+ if (!currentTable) {
2844
+ throw new DatabaseError("Failed to create table during batch insert");
2845
+ }
2846
+ return currentTable;
2847
+ }
2848
+ var init_batch_insert = __esm({
2849
+ "src/vectordb/batch-insert.ts"() {
2850
+ "use strict";
2851
+ init_errors();
2852
+ init_constants();
2853
+ }
2854
+ });
2855
+
2856
+ // src/vectordb/maintenance.ts
2857
+ async function clear(db, table, tableName) {
2858
+ if (!db) {
2859
+ throw new DatabaseError("Vector database not initialized");
2860
+ }
2861
+ try {
2862
+ if (table) {
2863
+ await db.dropTable(tableName);
2864
+ }
2865
+ } catch (error) {
2866
+ throw wrapError(error, "Failed to clear vector database");
2867
+ }
2868
+ }
2869
+ async function deleteByFile(table, filepath) {
2870
+ if (!table) {
2871
+ throw new DatabaseError("Vector database not initialized");
2872
+ }
2873
+ try {
2874
+ await table.delete(`file = "${filepath}"`);
2875
+ } catch (error) {
2876
+ throw wrapError(error, "Failed to delete file from vector database");
2877
+ }
2878
+ }
2879
+ async function updateFile(db, table, tableName, dbPath, filepath, vectors, metadatas, contents) {
2880
+ if (!table) {
2881
+ throw new DatabaseError("Vector database not initialized");
2882
+ }
2883
+ try {
2884
+ await deleteByFile(table, filepath);
2885
+ let updatedTable = table;
2886
+ if (vectors.length > 0) {
2887
+ updatedTable = await insertBatch(db, table, tableName, vectors, metadatas, contents);
2888
+ if (!updatedTable) {
2889
+ throw new DatabaseError("insertBatch unexpectedly returned null");
2890
+ }
2891
+ }
2892
+ await writeVersionFile(dbPath);
2893
+ return updatedTable;
2894
+ } catch (error) {
2895
+ throw wrapError(error, "Failed to update file in vector database");
2896
+ }
2897
+ }
2898
+ var init_maintenance = __esm({
2899
+ "src/vectordb/maintenance.ts"() {
2900
+ "use strict";
2901
+ init_errors();
2902
+ init_version2();
2903
+ init_batch_insert();
2904
+ }
2905
+ });
2906
+
2907
+ // src/vectordb/lancedb.ts
2908
+ var lancedb_exports = {};
2909
+ __export(lancedb_exports, {
2910
+ VectorDB: () => VectorDB
2911
+ });
2912
+ import * as lancedb from "vectordb";
2913
+ import path14 from "path";
2914
+ import os2 from "os";
2915
+ import crypto2 from "crypto";
1604
2916
  var VectorDB;
1605
2917
  var init_lancedb = __esm({
1606
2918
  "src/vectordb/lancedb.ts"() {
1607
2919
  "use strict";
1608
2920
  init_types();
1609
- init_version();
2921
+ init_version2();
1610
2922
  init_errors();
1611
- init_relevance();
1612
- init_intent_classifier();
1613
- init_constants();
2923
+ init_query();
2924
+ init_batch_insert();
2925
+ init_maintenance();
1614
2926
  VectorDB = class _VectorDB {
1615
2927
  db = null;
1616
2928
  table = null;
@@ -1619,9 +2931,9 @@ var init_lancedb = __esm({
1619
2931
  lastVersionCheck = 0;
1620
2932
  currentVersion = 0;
1621
2933
  constructor(projectRoot) {
1622
- const projectName = path11.basename(projectRoot);
2934
+ const projectName = path14.basename(projectRoot);
1623
2935
  const pathHash = crypto2.createHash("md5").update(projectRoot).digest("hex").substring(0, 8);
1624
- this.dbPath = path11.join(
2936
+ this.dbPath = path14.join(
1625
2937
  os2.homedir(),
1626
2938
  ".lien",
1627
2939
  "indices",
@@ -1649,134 +2961,30 @@ var init_lancedb = __esm({
1649
2961
  if (!this.db) {
1650
2962
  throw new DatabaseError("Vector database not initialized");
1651
2963
  }
1652
- if (vectors.length !== metadatas.length || vectors.length !== contents.length) {
1653
- throw new DatabaseError("Vectors, metadatas, and contents arrays must have the same length", {
1654
- vectorsLength: vectors.length,
1655
- metadatasLength: metadatas.length,
1656
- contentsLength: contents.length
1657
- });
1658
- }
1659
- if (vectors.length === 0) {
1660
- return;
1661
- }
1662
- if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {
1663
- for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {
1664
- const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
1665
- const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
1666
- const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
1667
- await this._insertBatchInternal(batchVectors, batchMetadata, batchContents);
1668
- }
1669
- } else {
1670
- await this._insertBatchInternal(vectors, metadatas, contents);
1671
- }
1672
- }
1673
- /**
1674
- * Internal method to insert a single batch with iterative retry logic.
1675
- * Uses a queue-based approach to avoid deep recursion on large batch failures.
1676
- */
1677
- async _insertBatchInternal(vectors, metadatas, contents) {
1678
- const queue = [{ vectors, metadatas, contents }];
1679
- const failedRecords = [];
1680
- while (queue.length > 0) {
1681
- const batch = queue.shift();
1682
- try {
1683
- const records = batch.vectors.map((vector, i) => ({
1684
- vector: Array.from(vector),
1685
- content: batch.contents[i],
1686
- file: batch.metadatas[i].file,
1687
- startLine: batch.metadatas[i].startLine,
1688
- endLine: batch.metadatas[i].endLine,
1689
- type: batch.metadatas[i].type,
1690
- language: batch.metadatas[i].language,
1691
- // Ensure arrays have at least empty string for Arrow type inference
1692
- functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
1693
- classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
1694
- interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""]
1695
- }));
1696
- if (!this.table) {
1697
- this.table = await this.db.createTable(this.tableName, records);
1698
- } else {
1699
- await this.table.add(records);
1700
- }
1701
- } catch (error) {
1702
- if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
1703
- const half = Math.floor(batch.vectors.length / 2);
1704
- queue.push({
1705
- vectors: batch.vectors.slice(0, half),
1706
- metadatas: batch.metadatas.slice(0, half),
1707
- contents: batch.contents.slice(0, half)
1708
- });
1709
- queue.push({
1710
- vectors: batch.vectors.slice(half),
1711
- metadatas: batch.metadatas.slice(half),
1712
- contents: batch.contents.slice(half)
1713
- });
1714
- } else {
1715
- failedRecords.push(batch);
1716
- }
1717
- }
1718
- }
1719
- if (failedRecords.length > 0) {
1720
- const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
1721
- throw new DatabaseError(
1722
- `Failed to insert ${totalFailed} record(s) after retry attempts`,
1723
- {
1724
- failedBatches: failedRecords.length,
1725
- totalRecords: totalFailed,
1726
- sampleFile: failedRecords[0].metadatas[0].file
1727
- }
1728
- );
1729
- }
2964
+ this.table = await insertBatch(
2965
+ this.db,
2966
+ this.table,
2967
+ this.tableName,
2968
+ vectors,
2969
+ metadatas,
2970
+ contents
2971
+ );
1730
2972
  }
1731
2973
  async search(queryVector, limit = 5, query) {
1732
2974
  if (!this.table) {
1733
2975
  throw new DatabaseError("Vector database not initialized");
1734
2976
  }
1735
2977
  try {
1736
- const results = await this.table.search(Array.from(queryVector)).limit(limit + 20).execute();
1737
- const filtered = results.filter(
1738
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
1739
- ).map((r) => {
1740
- const baseScore = r._distance ?? 0;
1741
- const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
1742
- return {
1743
- content: r.content,
1744
- metadata: {
1745
- file: r.file,
1746
- startLine: r.startLine,
1747
- endLine: r.endLine,
1748
- type: r.type,
1749
- language: r.language
1750
- },
1751
- score: boostedScore,
1752
- relevance: calculateRelevance(boostedScore)
1753
- };
1754
- }).sort((a, b) => a.score - b.score).slice(0, limit);
1755
- return filtered;
2978
+ return await search(this.table, queryVector, limit, query);
1756
2979
  } catch (error) {
1757
2980
  const errorMsg = String(error);
1758
2981
  if (errorMsg.includes("Not found:") || errorMsg.includes(".lance")) {
1759
2982
  try {
1760
2983
  await this.initialize();
1761
- const results = await this.table.search(Array.from(queryVector)).limit(limit + 20).execute();
1762
- return results.filter(
1763
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
1764
- ).map((r) => {
1765
- const baseScore = r._distance ?? 0;
1766
- const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
1767
- return {
1768
- content: r.content,
1769
- metadata: {
1770
- file: r.file,
1771
- startLine: r.startLine,
1772
- endLine: r.endLine,
1773
- type: r.type,
1774
- language: r.language
1775
- },
1776
- score: boostedScore,
1777
- relevance: calculateRelevance(boostedScore)
1778
- };
1779
- }).sort((a, b) => a.score - b.score).slice(0, limit);
2984
+ if (!this.table) {
2985
+ throw new DatabaseError("Vector database not initialized after reconnection");
2986
+ }
2987
+ return await search(this.table, queryVector, limit, query);
1780
2988
  } catch (retryError) {
1781
2989
  throw new DatabaseError(
1782
2990
  `Index appears corrupted or outdated. Please restart the MCP server or run 'lien reindex' in the project directory.`,
@@ -1784,162 +2992,52 @@ var init_lancedb = __esm({
1784
2992
  );
1785
2993
  }
1786
2994
  }
1787
- throw wrapError(error, "Failed to search vector database");
2995
+ throw error;
1788
2996
  }
1789
2997
  }
1790
2998
  async scanWithFilter(options) {
1791
2999
  if (!this.table) {
1792
3000
  throw new DatabaseError("Vector database not initialized");
1793
3001
  }
1794
- const { language, pattern, limit = 100 } = options;
1795
- try {
1796
- const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
1797
- const query = this.table.search(zeroVector).where('file != ""').limit(Math.max(limit * 5, 200));
1798
- const results = await query.execute();
1799
- let filtered = results.filter(
1800
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
1801
- );
1802
- if (language) {
1803
- filtered = filtered.filter(
1804
- (r) => r.language && r.language.toLowerCase() === language.toLowerCase()
1805
- );
1806
- }
1807
- if (pattern) {
1808
- const regex = new RegExp(pattern, "i");
1809
- filtered = filtered.filter(
1810
- (r) => regex.test(r.content) || regex.test(r.file)
1811
- );
1812
- }
1813
- return filtered.slice(0, limit).map((r) => {
1814
- const score = 0;
1815
- return {
1816
- content: r.content,
1817
- metadata: {
1818
- file: r.file,
1819
- startLine: r.startLine,
1820
- endLine: r.endLine,
1821
- type: r.type,
1822
- language: r.language
1823
- },
1824
- score,
1825
- relevance: calculateRelevance(score)
1826
- };
1827
- });
1828
- } catch (error) {
1829
- throw wrapError(error, "Failed to scan with filter");
1830
- }
3002
+ return scanWithFilter(this.table, options);
1831
3003
  }
1832
3004
  async querySymbols(options) {
1833
3005
  if (!this.table) {
1834
3006
  throw new DatabaseError("Vector database not initialized");
1835
3007
  }
1836
- const { language, pattern, symbolType, limit = 50 } = options;
1837
- try {
1838
- const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
1839
- const query = this.table.search(zeroVector).where('file != ""').limit(Math.max(limit * 10, 500));
1840
- const results = await query.execute();
1841
- let filtered = results.filter((r) => {
1842
- if (!r.content || r.content.trim().length === 0) {
1843
- return false;
1844
- }
1845
- if (!r.file || r.file.length === 0) {
1846
- return false;
1847
- }
1848
- if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
1849
- return false;
1850
- }
1851
- const symbols = symbolType === "function" ? r.functionNames || [] : symbolType === "class" ? r.classNames || [] : symbolType === "interface" ? r.interfaceNames || [] : [...r.functionNames || [], ...r.classNames || [], ...r.interfaceNames || []];
1852
- if (symbols.length === 0) {
1853
- return false;
1854
- }
1855
- if (pattern) {
1856
- const regex = new RegExp(pattern, "i");
1857
- return symbols.some((s) => regex.test(s));
1858
- }
1859
- return true;
1860
- });
1861
- return filtered.slice(0, limit).map((r) => {
1862
- const score = 0;
1863
- return {
1864
- content: r.content,
1865
- metadata: {
1866
- file: r.file,
1867
- startLine: r.startLine,
1868
- endLine: r.endLine,
1869
- type: r.type,
1870
- language: r.language,
1871
- symbols: {
1872
- functions: r.functionNames || [],
1873
- classes: r.classNames || [],
1874
- interfaces: r.interfaceNames || []
1875
- }
1876
- },
1877
- score,
1878
- relevance: calculateRelevance(score)
1879
- };
1880
- });
1881
- } catch (error) {
1882
- throw wrapError(error, "Failed to query symbols");
1883
- }
3008
+ return querySymbols(this.table, options);
1884
3009
  }
1885
3010
  async clear() {
1886
3011
  if (!this.db) {
1887
3012
  throw new DatabaseError("Vector database not initialized");
1888
3013
  }
1889
- try {
1890
- if (this.table) {
1891
- await this.db.dropTable(this.tableName);
1892
- }
1893
- this.table = null;
1894
- } catch (error) {
1895
- throw wrapError(error, "Failed to clear vector database");
1896
- }
3014
+ await clear(this.db, this.table, this.tableName);
3015
+ this.table = null;
1897
3016
  }
1898
- /**
1899
- * Deletes all chunks from a specific file.
1900
- * Used for incremental reindexing when a file is deleted or needs to be re-indexed.
1901
- *
1902
- * @param filepath - Path to the file whose chunks should be deleted
1903
- */
1904
3017
  async deleteByFile(filepath) {
1905
3018
  if (!this.table) {
1906
3019
  throw new DatabaseError("Vector database not initialized");
1907
3020
  }
1908
- try {
1909
- await this.table.delete(`file = "${filepath}"`);
1910
- } catch (error) {
1911
- throw wrapError(error, "Failed to delete file from vector database");
1912
- }
3021
+ await deleteByFile(this.table, filepath);
1913
3022
  }
1914
- /**
1915
- * Updates a file in the index by atomically deleting old chunks and inserting new ones.
1916
- * This is the primary method for incremental reindexing.
1917
- *
1918
- * @param filepath - Path to the file being updated
1919
- * @param vectors - New embedding vectors
1920
- * @param metadatas - New chunk metadata
1921
- * @param contents - New chunk contents
1922
- */
1923
3023
  async updateFile(filepath, vectors, metadatas, contents) {
1924
- if (!this.table) {
1925
- throw new DatabaseError("Vector database not initialized");
3024
+ if (!this.db) {
3025
+ throw new DatabaseError("Vector database connection not initialized");
1926
3026
  }
1927
- try {
1928
- await this.deleteByFile(filepath);
1929
- if (vectors.length > 0) {
1930
- await this.insertBatch(vectors, metadatas, contents);
1931
- }
1932
- await writeVersionFile(this.dbPath);
1933
- } catch (error) {
1934
- throw wrapError(error, "Failed to update file in vector database");
3027
+ if (!this.table) {
3028
+ throw new DatabaseError("Vector database table not initialized");
1935
3029
  }
3030
+ this.table = await updateFile(
3031
+ this.db,
3032
+ this.table,
3033
+ this.tableName,
3034
+ this.dbPath,
3035
+ filepath,
3036
+ vectors,
3037
+ metadatas,
3038
+ contents
3039
+ );
1936
3040
  }
1937
- /**
1938
- * Checks if the index version has changed since last check.
1939
- * Uses caching to minimize I/O overhead (checks at most once per second).
1940
- *
1941
- * @returns true if version has changed, false otherwise
1942
- */
1943
3041
  async checkVersion() {
1944
3042
  const now = Date.now();
1945
3043
  if (now - this.lastVersionCheck < 1e3) {
@@ -1957,11 +3055,6 @@ var init_lancedb = __esm({
1957
3055
  return false;
1958
3056
  }
1959
3057
  }
1960
- /**
1961
- * Reconnects to the database by reinitializing the connection.
1962
- * Used when the index has been rebuilt/reindexed.
1963
- * Forces a complete reload from disk by closing existing connections first.
1964
- */
1965
3058
  async reconnect() {
1966
3059
  try {
1967
3060
  this.table = null;
@@ -1971,31 +3064,15 @@ var init_lancedb = __esm({
1971
3064
  throw wrapError(error, "Failed to reconnect to vector database");
1972
3065
  }
1973
3066
  }
1974
- /**
1975
- * Gets the current index version (timestamp of last reindex).
1976
- *
1977
- * @returns Version timestamp, or 0 if unknown
1978
- */
1979
3067
  getCurrentVersion() {
1980
3068
  return this.currentVersion;
1981
3069
  }
1982
- /**
1983
- * Gets the current index version as a human-readable date string.
1984
- *
1985
- * @returns Formatted date string, or 'Unknown' if no version
1986
- */
1987
3070
  getVersionDate() {
1988
3071
  if (this.currentVersion === 0) {
1989
3072
  return "Unknown";
1990
3073
  }
1991
3074
  return new Date(this.currentVersion).toLocaleString();
1992
3075
  }
1993
- /**
1994
- * Checks if the database contains real indexed data.
1995
- * Used to detect first run and trigger auto-indexing.
1996
- *
1997
- * @returns true if database has real code chunks, false if empty or only schema rows
1998
- */
1999
3076
  async hasData() {
2000
3077
  if (!this.table) {
2001
3078
  return false;
@@ -2023,46 +3100,19 @@ var init_lancedb = __esm({
2023
3100
  }
2024
3101
  });
2025
3102
 
2026
- // src/utils/version.ts
2027
- import { createRequire as createRequire2 } from "module";
2028
- import { fileURLToPath as fileURLToPath3 } from "url";
2029
- import { dirname as dirname2, join as join2 } from "path";
2030
- function getPackageVersion() {
2031
- return packageJson2.version;
2032
- }
2033
- var __filename3, __dirname3, require3, packageJson2;
2034
- var init_version2 = __esm({
2035
- "src/utils/version.ts"() {
2036
- "use strict";
2037
- __filename3 = fileURLToPath3(import.meta.url);
2038
- __dirname3 = dirname2(__filename3);
2039
- require3 = createRequire2(import.meta.url);
2040
- try {
2041
- packageJson2 = require3(join2(__dirname3, "../package.json"));
2042
- } catch {
2043
- try {
2044
- packageJson2 = require3(join2(__dirname3, "../../package.json"));
2045
- } catch {
2046
- console.warn("[Lien] Warning: Could not load package.json, using fallback version");
2047
- packageJson2 = { version: "0.0.0-unknown" };
2048
- }
2049
- }
2050
- }
2051
- });
2052
-
2053
3103
  // src/indexer/manifest.ts
2054
3104
  var manifest_exports = {};
2055
3105
  __export(manifest_exports, {
2056
3106
  ManifestManager: () => ManifestManager
2057
3107
  });
2058
- import fs11 from "fs/promises";
2059
- import path12 from "path";
3108
+ import fs13 from "fs/promises";
3109
+ import path15 from "path";
2060
3110
  var MANIFEST_FILE, ManifestManager;
2061
3111
  var init_manifest = __esm({
2062
3112
  "src/indexer/manifest.ts"() {
2063
3113
  "use strict";
2064
3114
  init_constants();
2065
- init_version2();
3115
+ init_version();
2066
3116
  MANIFEST_FILE = "manifest.json";
2067
3117
  ManifestManager = class {
2068
3118
  manifestPath;
@@ -2078,7 +3128,7 @@ var init_manifest = __esm({
2078
3128
  */
2079
3129
  constructor(indexPath) {
2080
3130
  this.indexPath = indexPath;
2081
- this.manifestPath = path12.join(indexPath, MANIFEST_FILE);
3131
+ this.manifestPath = path15.join(indexPath, MANIFEST_FILE);
2082
3132
  }
2083
3133
  /**
2084
3134
  * Loads the manifest from disk.
@@ -2091,7 +3141,7 @@ var init_manifest = __esm({
2091
3141
  */
2092
3142
  async load() {
2093
3143
  try {
2094
- const content = await fs11.readFile(this.manifestPath, "utf-8");
3144
+ const content = await fs13.readFile(this.manifestPath, "utf-8");
2095
3145
  const manifest = JSON.parse(content);
2096
3146
  if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {
2097
3147
  console.error(
@@ -2118,7 +3168,7 @@ var init_manifest = __esm({
2118
3168
  */
2119
3169
  async save(manifest) {
2120
3170
  try {
2121
- await fs11.mkdir(this.indexPath, { recursive: true });
3171
+ await fs13.mkdir(this.indexPath, { recursive: true });
2122
3172
  const manifestToSave = {
2123
3173
  ...manifest,
2124
3174
  formatVersion: INDEX_FORMAT_VERSION,
@@ -2126,7 +3176,7 @@ var init_manifest = __esm({
2126
3176
  lastIndexed: Date.now()
2127
3177
  };
2128
3178
  const content = JSON.stringify(manifestToSave, null, 2);
2129
- await fs11.writeFile(this.manifestPath, content, "utf-8");
3179
+ await fs13.writeFile(this.manifestPath, content, "utf-8");
2130
3180
  } catch (error) {
2131
3181
  console.error(`[Lien] Warning: Failed to save manifest: ${error}`);
2132
3182
  }
@@ -2263,7 +3313,7 @@ var init_manifest = __esm({
2263
3313
  */
2264
3314
  async clear() {
2265
3315
  try {
2266
- await fs11.unlink(this.manifestPath);
3316
+ await fs13.unlink(this.manifestPath);
2267
3317
  } catch (error) {
2268
3318
  if (error.code !== "ENOENT") {
2269
3319
  console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);
@@ -2292,8 +3342,8 @@ var tracker_exports = {};
2292
3342
  __export(tracker_exports, {
2293
3343
  GitStateTracker: () => GitStateTracker
2294
3344
  });
2295
- import fs12 from "fs/promises";
2296
- import path13 from "path";
3345
+ import fs14 from "fs/promises";
3346
+ import path16 from "path";
2297
3347
  var GitStateTracker;
2298
3348
  var init_tracker = __esm({
2299
3349
  "src/git/tracker.ts"() {
@@ -2305,7 +3355,7 @@ var init_tracker = __esm({
2305
3355
  currentState = null;
2306
3356
  constructor(rootDir, indexPath) {
2307
3357
  this.rootDir = rootDir;
2308
- this.stateFile = path13.join(indexPath, ".git-state.json");
3358
+ this.stateFile = path16.join(indexPath, ".git-state.json");
2309
3359
  }
2310
3360
  /**
2311
3361
  * Loads the last known git state from disk.
@@ -2313,7 +3363,7 @@ var init_tracker = __esm({
2313
3363
  */
2314
3364
  async loadState() {
2315
3365
  try {
2316
- const content = await fs12.readFile(this.stateFile, "utf-8");
3366
+ const content = await fs14.readFile(this.stateFile, "utf-8");
2317
3367
  return JSON.parse(content);
2318
3368
  } catch {
2319
3369
  return null;
@@ -2325,7 +3375,7 @@ var init_tracker = __esm({
2325
3375
  async saveState(state) {
2326
3376
  try {
2327
3377
  const content = JSON.stringify(state, null, 2);
2328
- await fs12.writeFile(this.stateFile, content, "utf-8");
3378
+ await fs14.writeFile(this.stateFile, content, "utf-8");
2329
3379
  } catch (error) {
2330
3380
  console.error(`[Lien] Warning: Failed to save git state: ${error}`);
2331
3381
  }
@@ -2476,7 +3526,7 @@ var init_tracker = __esm({
2476
3526
  });
2477
3527
 
2478
3528
  // src/indexer/change-detector.ts
2479
- import fs13 from "fs/promises";
3529
+ import fs15 from "fs/promises";
2480
3530
  async function detectChanges(rootDir, vectorDB, config) {
2481
3531
  const manifest = new ManifestManager(vectorDB.dbPath);
2482
3532
  const savedManifest = await manifest.load();
@@ -2580,7 +3630,7 @@ async function mtimeBasedDetection(rootDir, savedManifest, config) {
2580
3630
  const fileStats = /* @__PURE__ */ new Map();
2581
3631
  for (const filepath of currentFiles) {
2582
3632
  try {
2583
- const stats = await fs13.stat(filepath);
3633
+ const stats = await fs15.stat(filepath);
2584
3634
  fileStats.set(filepath, stats.mtimeMs);
2585
3635
  } catch {
2586
3636
  continue;
@@ -2618,13 +3668,17 @@ var init_change_detector = __esm({
2618
3668
  });
2619
3669
 
2620
3670
  // src/indexer/incremental.ts
2621
- import fs14 from "fs/promises";
3671
+ import fs16 from "fs/promises";
2622
3672
  async function processFileContent(filepath, content, embeddings, config, verbose) {
2623
3673
  const chunkSize = isModernConfig(config) ? config.core.chunkSize : isLegacyConfig(config) ? config.indexing.chunkSize : 75;
2624
3674
  const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : isLegacyConfig(config) ? config.indexing.chunkOverlap : 10;
3675
+ const useAST = isModernConfig(config) ? config.chunking.useAST : true;
3676
+ const astFallback = isModernConfig(config) ? config.chunking.astFallback : "line-based";
2625
3677
  const chunks = chunkFile(filepath, content, {
2626
3678
  chunkSize,
2627
- chunkOverlap
3679
+ chunkOverlap,
3680
+ useAST,
3681
+ astFallback
2628
3682
  });
2629
3683
  if (chunks.length === 0) {
2630
3684
  if (verbose) {
@@ -2653,7 +3707,7 @@ async function indexSingleFile(filepath, vectorDB, embeddings, config, options =
2653
3707
  const { verbose } = options;
2654
3708
  try {
2655
3709
  try {
2656
- await fs14.access(filepath);
3710
+ await fs16.access(filepath);
2657
3711
  } catch {
2658
3712
  if (verbose) {
2659
3713
  console.error(`[Lien] File deleted: ${filepath}`);
@@ -2663,9 +3717,9 @@ async function indexSingleFile(filepath, vectorDB, embeddings, config, options =
2663
3717
  await manifest2.removeFile(filepath);
2664
3718
  return;
2665
3719
  }
2666
- const content = await fs14.readFile(filepath, "utf-8");
3720
+ const content = await fs16.readFile(filepath, "utf-8");
2667
3721
  const result = await processFileContent(filepath, content, embeddings, config, verbose || false);
2668
- const stats = await fs14.stat(filepath);
3722
+ const stats = await fs16.stat(filepath);
2669
3723
  const manifest = new ManifestManager(vectorDB.dbPath);
2670
3724
  if (result === null) {
2671
3725
  await vectorDB.deleteByFile(filepath);
@@ -2702,9 +3756,9 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
2702
3756
  let content;
2703
3757
  let fileMtime;
2704
3758
  try {
2705
- const stats = await fs14.stat(filepath);
3759
+ const stats = await fs16.stat(filepath);
2706
3760
  fileMtime = stats.mtimeMs;
2707
- content = await fs14.readFile(filepath, "utf-8");
3761
+ content = await fs16.readFile(filepath, "utf-8");
2708
3762
  } catch (error) {
2709
3763
  if (verbose) {
2710
3764
  console.error(`[Lien] File not readable: ${filepath}`);
@@ -2775,7 +3829,7 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
2775
3829
  var init_incremental = __esm({
2776
3830
  "src/indexer/incremental.ts"() {
2777
3831
  "use strict";
2778
- init_chunker();
3832
+ init_chunker2();
2779
3833
  init_schema();
2780
3834
  init_manifest();
2781
3835
  init_constants();
@@ -2869,9 +3923,9 @@ var indexer_exports = {};
2869
3923
  __export(indexer_exports, {
2870
3924
  indexCodebase: () => indexCodebase
2871
3925
  });
2872
- import fs15 from "fs/promises";
3926
+ import fs17 from "fs/promises";
2873
3927
  import ora from "ora";
2874
- import chalk4 from "chalk";
3928
+ import chalk5 from "chalk";
2875
3929
  import pLimit from "p-limit";
2876
3930
  async function indexCodebase(options = {}) {
2877
3931
  const rootDir = options.rootDir ?? process.cwd();
@@ -2944,7 +3998,7 @@ async function indexCodebase(options = {}) {
2944
3998
  await manifest2.updateGitState(gitState);
2945
3999
  }
2946
4000
  }
2947
- console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
4001
+ console.log(chalk5.dim("\nNext step: Run"), chalk5.bold("lien serve"), chalk5.dim("to start the MCP server"));
2948
4002
  return;
2949
4003
  }
2950
4004
  spinner.text = "Full reindex required...";
@@ -3033,13 +4087,17 @@ async function indexCodebase(options = {}) {
3033
4087
  const filePromises = files.map(
3034
4088
  (file) => limit(async () => {
3035
4089
  try {
3036
- const stats = await fs15.stat(file);
3037
- const content = await fs15.readFile(file, "utf-8");
4090
+ const stats = await fs17.stat(file);
4091
+ const content = await fs17.readFile(file, "utf-8");
3038
4092
  const chunkSize = isModernConfig(config) ? config.core.chunkSize : 75;
3039
4093
  const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : 10;
4094
+ const useAST = isModernConfig(config) ? config.chunking.useAST : true;
4095
+ const astFallback = isModernConfig(config) ? config.chunking.astFallback : "line-based";
3040
4096
  const chunks = chunkFile(file, content, {
3041
4097
  chunkSize,
3042
- chunkOverlap
4098
+ chunkOverlap,
4099
+ useAST,
4100
+ astFallback
3043
4101
  });
3044
4102
  if (chunks.length === 0) {
3045
4103
  processedFiles++;
@@ -3064,7 +4122,7 @@ async function indexCodebase(options = {}) {
3064
4122
  }
3065
4123
  } catch (error) {
3066
4124
  if (options.verbose) {
3067
- console.error(chalk4.yellow(`
4125
+ console.error(chalk5.yellow(`
3068
4126
  \u26A0\uFE0F Skipping ${file}: ${error}`));
3069
4127
  }
3070
4128
  processedFiles++;
@@ -3104,7 +4162,7 @@ async function indexCodebase(options = {}) {
3104
4162
  spinner.succeed(
3105
4163
  `Indexed ${processedFiles} files (${processedChunks} chunks) in ${totalTime}s using ${concurrency}x concurrency`
3106
4164
  );
3107
- console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
4165
+ console.log(chalk5.dim("\nNext step: Run"), chalk5.bold("lien serve"), chalk5.dim("to start the MCP server"));
3108
4166
  } catch (error) {
3109
4167
  if (updateInterval) {
3110
4168
  clearInterval(updateInterval);
@@ -3117,11 +4175,11 @@ var init_indexer = __esm({
3117
4175
  "src/indexer/index.ts"() {
3118
4176
  "use strict";
3119
4177
  init_scanner();
3120
- init_chunker();
4178
+ init_chunker2();
3121
4179
  init_local();
3122
4180
  init_lancedb();
3123
4181
  init_service();
3124
- init_version();
4182
+ init_version2();
3125
4183
  init_schema();
3126
4184
  init_manifest();
3127
4185
  init_change_detector();
@@ -3139,30 +4197,29 @@ import { dirname as dirname4, join as join4 } from "path";
3139
4197
 
3140
4198
  // src/cli/init.ts
3141
4199
  init_schema();
3142
- init_merge();
3143
- import fs5 from "fs/promises";
3144
- import path5 from "path";
3145
- import { fileURLToPath as fileURLToPath2 } from "url";
3146
- import chalk2 from "chalk";
4200
+ import fs7 from "fs/promises";
4201
+ import path7 from "path";
4202
+ import { fileURLToPath as fileURLToPath3 } from "url";
4203
+ import chalk3 from "chalk";
3147
4204
  import inquirer from "inquirer";
3148
4205
 
3149
4206
  // src/utils/banner.ts
3150
4207
  import figlet from "figlet";
3151
4208
  import chalk from "chalk";
3152
- import { createRequire } from "module";
3153
- import { fileURLToPath } from "url";
3154
- import { dirname, join } from "path";
3155
- var __filename = fileURLToPath(import.meta.url);
3156
- var __dirname = dirname(__filename);
3157
- var require2 = createRequire(import.meta.url);
3158
- var packageJson;
4209
+ import { createRequire as createRequire2 } from "module";
4210
+ import { fileURLToPath as fileURLToPath2 } from "url";
4211
+ import { dirname as dirname2, join as join2 } from "path";
4212
+ var __filename2 = fileURLToPath2(import.meta.url);
4213
+ var __dirname2 = dirname2(__filename2);
4214
+ var require3 = createRequire2(import.meta.url);
4215
+ var packageJson2;
3159
4216
  try {
3160
- packageJson = require2(join(__dirname, "../package.json"));
4217
+ packageJson2 = require3(join2(__dirname2, "../package.json"));
3161
4218
  } catch {
3162
- packageJson = require2(join(__dirname, "../../package.json"));
4219
+ packageJson2 = require3(join2(__dirname2, "../../package.json"));
3163
4220
  }
3164
- var PACKAGE_NAME = packageJson.name;
3165
- var VERSION = packageJson.version;
4221
+ var PACKAGE_NAME = packageJson2.name;
4222
+ var VERSION = packageJson2.version;
3166
4223
  function wrapInBox(text, footer, padding = 1) {
3167
4224
  const lines = text.split("\n").filter((line) => line.trim().length > 0);
3168
4225
  const maxLength = Math.max(...lines.map((line) => line.length));
@@ -3205,12 +4262,111 @@ function showCompactBanner() {
3205
4262
  console.log();
3206
4263
  }
3207
4264
 
3208
- // src/cli/init.ts
4265
+ // src/config/migration-manager.ts
4266
+ init_schema();
3209
4267
  init_migration();
4268
+ init_merge();
4269
+ init_constants();
4270
+ import fs2 from "fs/promises";
4271
+ import path2 from "path";
4272
+ import chalk2 from "chalk";
4273
+ var MigrationManager = class {
4274
+ constructor(rootDir = process.cwd()) {
4275
+ this.rootDir = rootDir;
4276
+ }
4277
+ /**
4278
+ * Get the config file path
4279
+ */
4280
+ getConfigPath() {
4281
+ return path2.join(this.rootDir, ".lien.config.json");
4282
+ }
4283
+ /**
4284
+ * Check if the current config needs migration
4285
+ */
4286
+ async needsMigration() {
4287
+ try {
4288
+ const configPath = this.getConfigPath();
4289
+ const content = await fs2.readFile(configPath, "utf-8");
4290
+ const config = JSON.parse(content);
4291
+ return needsMigration(config);
4292
+ } catch (error) {
4293
+ return false;
4294
+ }
4295
+ }
4296
+ /**
4297
+ * Perform silent migration (for auto-migration during load)
4298
+ * Returns the migrated config without user interaction
4299
+ */
4300
+ async autoMigrate() {
4301
+ const result = await migrateConfigFile(this.rootDir);
4302
+ if (result.migrated && result.backupPath) {
4303
+ const backupFilename = path2.basename(result.backupPath);
4304
+ console.log(`\u2705 Migration complete! Backup saved as ${backupFilename}`);
4305
+ console.log("\u{1F4DD} Your config now uses the framework-based structure.");
4306
+ }
4307
+ return result.config;
4308
+ }
4309
+ /**
4310
+ * Perform interactive upgrade (for CLI upgrade command)
4311
+ * Provides detailed feedback and handles edge cases
4312
+ */
4313
+ async upgradeInteractive() {
4314
+ const configPath = this.getConfigPath();
4315
+ try {
4316
+ const existingContent = await fs2.readFile(configPath, "utf-8");
4317
+ const existingConfig = JSON.parse(existingContent);
4318
+ const migrationNeeded = needsMigration(existingConfig);
4319
+ const newFields = migrationNeeded ? [] : detectNewFields(existingConfig, defaultConfig);
4320
+ const hasChanges = migrationNeeded || newFields.length > 0;
4321
+ if (!hasChanges) {
4322
+ console.log(chalk2.green("\u2713 Config is already up to date"));
4323
+ console.log(chalk2.dim("No changes needed"));
4324
+ return;
4325
+ }
4326
+ const backupPath = `${configPath}.backup`;
4327
+ await fs2.copyFile(configPath, backupPath);
4328
+ let upgradedConfig;
4329
+ let migrated = false;
4330
+ if (migrationNeeded) {
4331
+ console.log(chalk2.blue(`\u{1F504} Migrating config from v0.2.0 to v${CURRENT_CONFIG_VERSION}...`));
4332
+ upgradedConfig = migrateConfig(existingConfig);
4333
+ migrated = true;
4334
+ } else {
4335
+ upgradedConfig = deepMergeConfig(defaultConfig, existingConfig);
4336
+ console.log(chalk2.dim("\nNew options added:"));
4337
+ newFields.forEach((field) => console.log(chalk2.dim(" \u2022"), chalk2.bold(field)));
4338
+ }
4339
+ await fs2.writeFile(
4340
+ configPath,
4341
+ JSON.stringify(upgradedConfig, null, 2) + "\n",
4342
+ "utf-8"
4343
+ );
4344
+ console.log(chalk2.green("\u2713 Config upgraded successfully"));
4345
+ console.log(chalk2.dim("Backup saved to:"), backupPath);
4346
+ if (migrated) {
4347
+ console.log(chalk2.dim("\n\u{1F4DD} Your config now uses the framework-based structure."));
4348
+ }
4349
+ } catch (error) {
4350
+ if (error.code === "ENOENT") {
4351
+ console.log(chalk2.red("Error: No config file found"));
4352
+ console.log(chalk2.dim("Run"), chalk2.bold("lien init"), chalk2.dim("to create a config file"));
4353
+ return;
4354
+ }
4355
+ throw error;
4356
+ }
4357
+ }
4358
+ /**
4359
+ * Perform migration and return result
4360
+ * Used when programmatic access to migration result is needed
4361
+ */
4362
+ async migrate() {
4363
+ return migrateConfigFile(this.rootDir);
4364
+ }
4365
+ };
3210
4366
 
3211
4367
  // src/frameworks/detector-service.ts
3212
- import fs4 from "fs/promises";
3213
- import path4 from "path";
4368
+ import fs6 from "fs/promises";
4369
+ import path6 from "path";
3214
4370
 
3215
4371
  // src/frameworks/types.ts
3216
4372
  var defaultDetectionOptions = {
@@ -3232,8 +4388,8 @@ var defaultDetectionOptions = {
3232
4388
  };
3233
4389
 
3234
4390
  // src/frameworks/nodejs/detector.ts
3235
- import fs from "fs/promises";
3236
- import path from "path";
4391
+ import fs3 from "fs/promises";
4392
+ import path3 from "path";
3237
4393
 
3238
4394
  // src/frameworks/nodejs/config.ts
3239
4395
  async function generateNodeJsConfig(_rootDir, _relativePath) {
@@ -3284,7 +4440,7 @@ var nodejsDetector = {
3284
4440
  priority: 50,
3285
4441
  // Generic, yields to specific frameworks like Laravel
3286
4442
  async detect(rootDir, relativePath) {
3287
- const fullPath = path.join(rootDir, relativePath);
4443
+ const fullPath = path3.join(rootDir, relativePath);
3288
4444
  const result = {
3289
4445
  detected: false,
3290
4446
  name: "nodejs",
@@ -3292,10 +4448,10 @@ var nodejsDetector = {
3292
4448
  confidence: "low",
3293
4449
  evidence: []
3294
4450
  };
3295
- const packageJsonPath = path.join(fullPath, "package.json");
4451
+ const packageJsonPath = path3.join(fullPath, "package.json");
3296
4452
  let packageJson5 = null;
3297
4453
  try {
3298
- const content = await fs.readFile(packageJsonPath, "utf-8");
4454
+ const content = await fs3.readFile(packageJsonPath, "utf-8");
3299
4455
  packageJson5 = JSON.parse(content);
3300
4456
  result.evidence.push("Found package.json");
3301
4457
  } catch {
@@ -3343,8 +4499,8 @@ var nodejsDetector = {
3343
4499
  };
3344
4500
 
3345
4501
  // src/frameworks/laravel/detector.ts
3346
- import fs2 from "fs/promises";
3347
- import path2 from "path";
4502
+ import fs4 from "fs/promises";
4503
+ import path4 from "path";
3348
4504
 
3349
4505
  // src/frameworks/laravel/config.ts
3350
4506
  async function generateLaravelConfig(_rootDir, _relativePath) {
@@ -3401,7 +4557,7 @@ var laravelDetector = {
3401
4557
  priority: 100,
3402
4558
  // Laravel takes precedence over Node.js
3403
4559
  async detect(rootDir, relativePath) {
3404
- const fullPath = path2.join(rootDir, relativePath);
4560
+ const fullPath = path4.join(rootDir, relativePath);
3405
4561
  const result = {
3406
4562
  detected: false,
3407
4563
  name: "laravel",
@@ -3409,10 +4565,10 @@ var laravelDetector = {
3409
4565
  confidence: "low",
3410
4566
  evidence: []
3411
4567
  };
3412
- const composerJsonPath = path2.join(fullPath, "composer.json");
4568
+ const composerJsonPath = path4.join(fullPath, "composer.json");
3413
4569
  let composerJson = null;
3414
4570
  try {
3415
- const content = await fs2.readFile(composerJsonPath, "utf-8");
4571
+ const content = await fs4.readFile(composerJsonPath, "utf-8");
3416
4572
  composerJson = JSON.parse(content);
3417
4573
  result.evidence.push("Found composer.json");
3418
4574
  } catch {
@@ -3423,9 +4579,9 @@ var laravelDetector = {
3423
4579
  return result;
3424
4580
  }
3425
4581
  result.evidence.push("Laravel framework detected in composer.json");
3426
- const artisanPath = path2.join(fullPath, "artisan");
4582
+ const artisanPath = path4.join(fullPath, "artisan");
3427
4583
  try {
3428
- await fs2.access(artisanPath);
4584
+ await fs4.access(artisanPath);
3429
4585
  result.evidence.push("Found artisan file");
3430
4586
  result.confidence = "high";
3431
4587
  } catch {
@@ -3435,8 +4591,8 @@ var laravelDetector = {
3435
4591
  let foundDirs = 0;
3436
4592
  for (const dir of laravelDirs) {
3437
4593
  try {
3438
- const dirPath = path2.join(fullPath, dir);
3439
- const stats = await fs2.stat(dirPath);
4594
+ const dirPath = path4.join(fullPath, dir);
4595
+ const stats = await fs4.stat(dirPath);
3440
4596
  if (stats.isDirectory()) {
3441
4597
  foundDirs++;
3442
4598
  }
@@ -3448,14 +4604,14 @@ var laravelDetector = {
3448
4604
  result.confidence = "high";
3449
4605
  }
3450
4606
  const testDirsToCheck = [
3451
- path2.join(fullPath, "tests", "Feature"),
3452
- path2.join(fullPath, "tests", "Unit")
4607
+ path4.join(fullPath, "tests", "Feature"),
4608
+ path4.join(fullPath, "tests", "Unit")
3453
4609
  ];
3454
4610
  for (const testDir of testDirsToCheck) {
3455
4611
  try {
3456
- const stats = await fs2.stat(testDir);
4612
+ const stats = await fs4.stat(testDir);
3457
4613
  if (stats.isDirectory()) {
3458
- const dirName = path2.basename(path2.dirname(testDir)) + "/" + path2.basename(testDir);
4614
+ const dirName = path4.basename(path4.dirname(testDir)) + "/" + path4.basename(testDir);
3459
4615
  result.evidence.push(`Found ${dirName} test directory`);
3460
4616
  }
3461
4617
  } catch {
@@ -3473,8 +4629,8 @@ var laravelDetector = {
3473
4629
  };
3474
4630
 
3475
4631
  // src/frameworks/shopify/detector.ts
3476
- import fs3 from "fs/promises";
3477
- import path3 from "path";
4632
+ import fs5 from "fs/promises";
4633
+ import path5 from "path";
3478
4634
 
3479
4635
  // src/frameworks/shopify/config.ts
3480
4636
  async function generateShopifyConfig(_rootDir, _relativePath) {
@@ -3486,6 +4642,8 @@ async function generateShopifyConfig(_rootDir, _relativePath) {
3486
4642
  "snippets/**/*.liquid",
3487
4643
  "templates/**/*.liquid",
3488
4644
  // Matches any nesting level (e.g., templates/customers/account.liquid)
4645
+ "templates/**/*.json",
4646
+ // JSON template definitions (Shopify 2.0+)
3489
4647
  // Theme editor blocks (Online Store 2.0)
3490
4648
  "blocks/**/*.liquid",
3491
4649
  // Assets (CSS, JS with optional Liquid templating)
@@ -3530,7 +4688,7 @@ var shopifyDetector = {
3530
4688
  priority: 100,
3531
4689
  // High priority (same as Laravel)
3532
4690
  async detect(rootDir, relativePath) {
3533
- const fullPath = path3.join(rootDir, relativePath);
4691
+ const fullPath = path5.join(rootDir, relativePath);
3534
4692
  const result = {
3535
4693
  detected: false,
3536
4694
  name: "shopify",
@@ -3538,18 +4696,18 @@ var shopifyDetector = {
3538
4696
  confidence: "low",
3539
4697
  evidence: []
3540
4698
  };
3541
- const settingsSchemaPath = path3.join(fullPath, "config", "settings_schema.json");
4699
+ const settingsSchemaPath = path5.join(fullPath, "config", "settings_schema.json");
3542
4700
  let hasSettingsSchema = false;
3543
4701
  try {
3544
- await fs3.access(settingsSchemaPath);
4702
+ await fs5.access(settingsSchemaPath);
3545
4703
  hasSettingsSchema = true;
3546
4704
  result.evidence.push("Found config/settings_schema.json");
3547
4705
  } catch {
3548
4706
  }
3549
- const themeLayoutPath = path3.join(fullPath, "layout", "theme.liquid");
4707
+ const themeLayoutPath = path5.join(fullPath, "layout", "theme.liquid");
3550
4708
  let hasThemeLayout = false;
3551
4709
  try {
3552
- await fs3.access(themeLayoutPath);
4710
+ await fs5.access(themeLayoutPath);
3553
4711
  hasThemeLayout = true;
3554
4712
  result.evidence.push("Found layout/theme.liquid");
3555
4713
  } catch {
@@ -3558,8 +4716,8 @@ var shopifyDetector = {
3558
4716
  let foundDirs = 0;
3559
4717
  for (const dir of shopifyDirs) {
3560
4718
  try {
3561
- const dirPath = path3.join(fullPath, dir);
3562
- const stats = await fs3.stat(dirPath);
4719
+ const dirPath = path5.join(fullPath, dir);
4720
+ const stats = await fs5.stat(dirPath);
3563
4721
  if (stats.isDirectory()) {
3564
4722
  foundDirs++;
3565
4723
  }
@@ -3570,14 +4728,14 @@ var shopifyDetector = {
3570
4728
  result.evidence.push(`Shopify directory structure detected (${foundDirs}/${shopifyDirs.length} dirs)`);
3571
4729
  }
3572
4730
  try {
3573
- const tomlPath = path3.join(fullPath, "shopify.theme.toml");
3574
- await fs3.access(tomlPath);
4731
+ const tomlPath = path5.join(fullPath, "shopify.theme.toml");
4732
+ await fs5.access(tomlPath);
3575
4733
  result.evidence.push("Found shopify.theme.toml");
3576
4734
  } catch {
3577
4735
  }
3578
4736
  try {
3579
- const ignorePath = path3.join(fullPath, ".shopifyignore");
3580
- await fs3.access(ignorePath);
4737
+ const ignorePath = path5.join(fullPath, ".shopifyignore");
4738
+ await fs5.access(ignorePath);
3581
4739
  result.evidence.push("Found .shopifyignore");
3582
4740
  } catch {
3583
4741
  }
@@ -3623,7 +4781,7 @@ async function detectAllFrameworks(rootDir, options = {}) {
3623
4781
  return results;
3624
4782
  }
3625
4783
  async function detectAtPath(rootDir, relativePath, results, visited) {
3626
- const fullPath = path4.join(rootDir, relativePath);
4784
+ const fullPath = path6.join(rootDir, relativePath);
3627
4785
  if (visited.has(fullPath)) {
3628
4786
  return;
3629
4787
  }
@@ -3690,9 +4848,9 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
3690
4848
  if (depth >= options.maxDepth) {
3691
4849
  return;
3692
4850
  }
3693
- const fullPath = path4.join(rootDir, relativePath);
4851
+ const fullPath = path6.join(rootDir, relativePath);
3694
4852
  try {
3695
- const entries = await fs4.readdir(fullPath, { withFileTypes: true });
4853
+ const entries = await fs6.readdir(fullPath, { withFileTypes: true });
3696
4854
  const dirs = entries.filter((e) => e.isDirectory());
3697
4855
  for (const dir of dirs) {
3698
4856
  if (options.skipDirs.includes(dir.name)) {
@@ -3701,7 +4859,7 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
3701
4859
  if (dir.name.startsWith(".")) {
3702
4860
  continue;
3703
4861
  }
3704
- const subPath = relativePath === "." ? dir.name : path4.join(relativePath, dir.name);
4862
+ const subPath = relativePath === "." ? dir.name : path6.join(relativePath, dir.name);
3705
4863
  await detectAtPath(rootDir, subPath, results, visited);
3706
4864
  await scanSubdirectories(rootDir, subPath, results, visited, depth + 1, options);
3707
4865
  }
@@ -3711,43 +4869,44 @@ async function scanSubdirectories(rootDir, relativePath, results, visited, depth
3711
4869
  }
3712
4870
 
3713
4871
  // src/cli/init.ts
3714
- var __filename2 = fileURLToPath2(import.meta.url);
3715
- var __dirname2 = path5.dirname(__filename2);
4872
+ var __filename3 = fileURLToPath3(import.meta.url);
4873
+ var __dirname3 = path7.dirname(__filename3);
3716
4874
  async function initCommand(options = {}) {
3717
4875
  const rootDir = options.path || process.cwd();
3718
- const configPath = path5.join(rootDir, ".lien.config.json");
4876
+ const configPath = path7.join(rootDir, ".lien.config.json");
3719
4877
  try {
3720
4878
  let configExists = false;
3721
4879
  try {
3722
- await fs5.access(configPath);
4880
+ await fs7.access(configPath);
3723
4881
  configExists = true;
3724
4882
  } catch {
3725
4883
  }
3726
4884
  if (configExists && options.upgrade) {
3727
- await upgradeConfig(configPath);
4885
+ const migrationManager = new MigrationManager(rootDir);
4886
+ await migrationManager.upgradeInteractive();
3728
4887
  return;
3729
4888
  }
3730
4889
  if (configExists && !options.upgrade) {
3731
- console.log(chalk2.yellow("\u26A0\uFE0F .lien.config.json already exists"));
3732
- console.log(chalk2.dim("Run"), chalk2.bold("lien init --upgrade"), chalk2.dim("to merge new config options"));
4890
+ console.log(chalk3.yellow("\u26A0\uFE0F .lien.config.json already exists"));
4891
+ console.log(chalk3.dim("Run"), chalk3.bold("lien init --upgrade"), chalk3.dim("to merge new config options"));
3733
4892
  return;
3734
4893
  }
3735
4894
  if (!configExists) {
3736
4895
  await createNewConfig(rootDir, options);
3737
4896
  }
3738
4897
  } catch (error) {
3739
- console.error(chalk2.red("Error creating config file:"), error);
4898
+ console.error(chalk3.red("Error creating config file:"), error);
3740
4899
  process.exit(1);
3741
4900
  }
3742
4901
  }
3743
4902
  async function createNewConfig(rootDir, options) {
3744
4903
  showCompactBanner();
3745
- console.log(chalk2.bold("Initializing Lien...\n"));
3746
- console.log(chalk2.dim("\u{1F50D} Detecting frameworks in"), chalk2.bold(rootDir));
4904
+ console.log(chalk3.bold("Initializing Lien...\n"));
4905
+ console.log(chalk3.dim("\u{1F50D} Detecting frameworks in"), chalk3.bold(rootDir));
3747
4906
  const detections = await detectAllFrameworks(rootDir);
3748
4907
  let frameworks = [];
3749
4908
  if (detections.length === 0) {
3750
- console.log(chalk2.yellow("\n\u26A0\uFE0F No frameworks detected"));
4909
+ console.log(chalk3.yellow("\n\u26A0\uFE0F No frameworks detected"));
3751
4910
  if (!options.yes) {
3752
4911
  const { useGeneric } = await inquirer.prompt([
3753
4912
  {
@@ -3758,7 +4917,7 @@ async function createNewConfig(rootDir, options) {
3758
4917
  }
3759
4918
  ]);
3760
4919
  if (!useGeneric) {
3761
- console.log(chalk2.dim("Aborted."));
4920
+ console.log(chalk3.dim("Aborted."));
3762
4921
  return;
3763
4922
  }
3764
4923
  }
@@ -3781,16 +4940,16 @@ async function createNewConfig(rootDir, options) {
3781
4940
  }
3782
4941
  });
3783
4942
  } else {
3784
- console.log(chalk2.green(`
4943
+ console.log(chalk3.green(`
3785
4944
  \u2713 Found ${detections.length} framework(s):
3786
4945
  `));
3787
4946
  for (const det of detections) {
3788
4947
  const pathDisplay = det.path === "." ? "root" : det.path;
3789
- console.log(chalk2.bold(` ${det.name}`), chalk2.dim(`(${det.confidence} confidence)`));
3790
- console.log(chalk2.dim(` Location: ${pathDisplay}`));
4948
+ console.log(chalk3.bold(` ${det.name}`), chalk3.dim(`(${det.confidence} confidence)`));
4949
+ console.log(chalk3.dim(` Location: ${pathDisplay}`));
3791
4950
  if (det.evidence.length > 0) {
3792
4951
  det.evidence.forEach((e) => {
3793
- console.log(chalk2.dim(` \u2022 ${e}`));
4952
+ console.log(chalk3.dim(` \u2022 ${e}`));
3794
4953
  });
3795
4954
  }
3796
4955
  console.log();
@@ -3805,14 +4964,14 @@ async function createNewConfig(rootDir, options) {
3805
4964
  }
3806
4965
  ]);
3807
4966
  if (!confirm) {
3808
- console.log(chalk2.dim("Aborted."));
4967
+ console.log(chalk3.dim("Aborted."));
3809
4968
  return;
3810
4969
  }
3811
4970
  }
3812
4971
  for (const det of detections) {
3813
4972
  const detector = getFrameworkDetector(det.name);
3814
4973
  if (!detector) {
3815
- console.warn(chalk2.yellow(`\u26A0\uFE0F No detector found for ${det.name}, skipping`));
4974
+ console.warn(chalk3.yellow(`\u26A0\uFE0F No detector found for ${det.name}, skipping`));
3816
4975
  continue;
3817
4976
  }
3818
4977
  const frameworkConfig = await detector.generateConfig(rootDir, det.path);
@@ -3834,7 +4993,7 @@ async function createNewConfig(rootDir, options) {
3834
4993
  finalConfig = { ...frameworkConfig, ...customized };
3835
4994
  } else {
3836
4995
  const pathDisplay = det.path === "." ? "root" : det.path;
3837
- console.log(chalk2.dim(` \u2192 Using defaults for ${det.name} at ${pathDisplay}`));
4996
+ console.log(chalk3.dim(` \u2192 Using defaults for ${det.name} at ${pathDisplay}`));
3838
4997
  }
3839
4998
  frameworks.push({
3840
4999
  name: det.name,
@@ -3855,23 +5014,23 @@ async function createNewConfig(rootDir, options) {
3855
5014
  ]);
3856
5015
  if (installCursorRules) {
3857
5016
  try {
3858
- const cursorRulesDir = path5.join(rootDir, ".cursor");
3859
- await fs5.mkdir(cursorRulesDir, { recursive: true });
3860
- const templatePath = path5.join(__dirname2, "../CURSOR_RULES_TEMPLATE.md");
3861
- const rulesPath = path5.join(cursorRulesDir, "rules");
5017
+ const cursorRulesDir = path7.join(rootDir, ".cursor");
5018
+ await fs7.mkdir(cursorRulesDir, { recursive: true });
5019
+ const templatePath = path7.join(__dirname3, "../CURSOR_RULES_TEMPLATE.md");
5020
+ const rulesPath = path7.join(cursorRulesDir, "rules");
3862
5021
  let targetPath;
3863
5022
  let isDirectory = false;
3864
5023
  let isFile = false;
3865
5024
  try {
3866
- const stats = await fs5.stat(rulesPath);
5025
+ const stats = await fs7.stat(rulesPath);
3867
5026
  isDirectory = stats.isDirectory();
3868
5027
  isFile = stats.isFile();
3869
5028
  } catch {
3870
5029
  }
3871
5030
  if (isDirectory) {
3872
- targetPath = path5.join(rulesPath, "lien.mdc");
3873
- await fs5.copyFile(templatePath, targetPath);
3874
- console.log(chalk2.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
5031
+ targetPath = path7.join(rulesPath, "lien.mdc");
5032
+ await fs7.copyFile(templatePath, targetPath);
5033
+ console.log(chalk3.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
3875
5034
  } else if (isFile) {
3876
5035
  const { convertToDir } = await inquirer.prompt([
3877
5036
  {
@@ -3882,27 +5041,27 @@ async function createNewConfig(rootDir, options) {
3882
5041
  }
3883
5042
  ]);
3884
5043
  if (convertToDir) {
3885
- const existingRules = await fs5.readFile(rulesPath, "utf-8");
3886
- await fs5.unlink(rulesPath);
3887
- await fs5.mkdir(rulesPath);
3888
- await fs5.writeFile(path5.join(rulesPath, "project.mdc"), existingRules);
3889
- await fs5.copyFile(templatePath, path5.join(rulesPath, "lien.mdc"));
3890
- console.log(chalk2.green("\u2713 Converted .cursor/rules to directory"));
3891
- console.log(chalk2.green(" - Your project rules: .cursor/rules/project.mdc"));
3892
- console.log(chalk2.green(" - Lien rules: .cursor/rules/lien.mdc"));
5044
+ const existingRules = await fs7.readFile(rulesPath, "utf-8");
5045
+ await fs7.unlink(rulesPath);
5046
+ await fs7.mkdir(rulesPath);
5047
+ await fs7.writeFile(path7.join(rulesPath, "project.mdc"), existingRules);
5048
+ await fs7.copyFile(templatePath, path7.join(rulesPath, "lien.mdc"));
5049
+ console.log(chalk3.green("\u2713 Converted .cursor/rules to directory"));
5050
+ console.log(chalk3.green(" - Your project rules: .cursor/rules/project.mdc"));
5051
+ console.log(chalk3.green(" - Lien rules: .cursor/rules/lien.mdc"));
3893
5052
  } else {
3894
- console.log(chalk2.dim("Skipped Cursor rules installation (preserving existing file)"));
5053
+ console.log(chalk3.dim("Skipped Cursor rules installation (preserving existing file)"));
3895
5054
  }
3896
5055
  } else {
3897
- await fs5.mkdir(rulesPath, { recursive: true });
3898
- targetPath = path5.join(rulesPath, "lien.mdc");
3899
- await fs5.copyFile(templatePath, targetPath);
3900
- console.log(chalk2.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
5056
+ await fs7.mkdir(rulesPath, { recursive: true });
5057
+ targetPath = path7.join(rulesPath, "lien.mdc");
5058
+ await fs7.copyFile(templatePath, targetPath);
5059
+ console.log(chalk3.green("\u2713 Installed Cursor rules as .cursor/rules/lien.mdc"));
3901
5060
  }
3902
5061
  } catch (error) {
3903
- console.log(chalk2.yellow("\u26A0\uFE0F Could not install Cursor rules"));
3904
- console.log(chalk2.dim(`Error: ${error instanceof Error ? error.message : "Unknown error"}`));
3905
- console.log(chalk2.dim("You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc"));
5062
+ console.log(chalk3.yellow("\u26A0\uFE0F Could not install Cursor rules"));
5063
+ console.log(chalk3.dim(`Error: ${error instanceof Error ? error.message : "Unknown error"}`));
5064
+ console.log(chalk3.dim("You can manually copy CURSOR_RULES_TEMPLATE.md to .cursor/rules/lien.mdc"));
3906
5065
  }
3907
5066
  }
3908
5067
  }
@@ -3910,17 +5069,17 @@ async function createNewConfig(rootDir, options) {
3910
5069
  ...defaultConfig,
3911
5070
  frameworks
3912
5071
  };
3913
- const configPath = path5.join(rootDir, ".lien.config.json");
3914
- await fs5.writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8");
3915
- console.log(chalk2.green("\n\u2713 Created .lien.config.json"));
3916
- console.log(chalk2.green(`\u2713 Configured ${frameworks.length} framework(s)`));
3917
- console.log(chalk2.dim("\nNext steps:"));
3918
- console.log(chalk2.dim(" 1. Run"), chalk2.bold("lien index"), chalk2.dim("to index your codebase"));
3919
- console.log(chalk2.dim(" 2. Run"), chalk2.bold("lien serve"), chalk2.dim("to start the MCP server"));
3920
- console.log(chalk2.dim(" 3. Configure Cursor to use the MCP server (see README.md)"));
5072
+ const configPath = path7.join(rootDir, ".lien.config.json");
5073
+ await fs7.writeFile(configPath, JSON.stringify(config, null, 2) + "\n", "utf-8");
5074
+ console.log(chalk3.green("\n\u2713 Created .lien.config.json"));
5075
+ console.log(chalk3.green(`\u2713 Configured ${frameworks.length} framework(s)`));
5076
+ console.log(chalk3.dim("\nNext steps:"));
5077
+ console.log(chalk3.dim(" 1. Run"), chalk3.bold("lien index"), chalk3.dim("to index your codebase"));
5078
+ console.log(chalk3.dim(" 2. Run"), chalk3.bold("lien serve"), chalk3.dim("to start the MCP server"));
5079
+ console.log(chalk3.dim(" 3. Configure Cursor to use the MCP server (see README.md)"));
3921
5080
  }
3922
5081
  async function promptForCustomization(frameworkName, config) {
3923
- console.log(chalk2.bold(`
5082
+ console.log(chalk3.bold(`
3924
5083
  Customizing ${frameworkName} settings:`));
3925
5084
  const answers = await inquirer.prompt([
3926
5085
  {
@@ -3943,150 +5102,115 @@ Customizing ${frameworkName} settings:`));
3943
5102
  exclude: answers.exclude
3944
5103
  };
3945
5104
  }
3946
- async function upgradeConfig(configPath) {
3947
- try {
3948
- const backupPath = `${configPath}.backup`;
3949
- await fs5.copyFile(configPath, backupPath);
3950
- const existingContent = await fs5.readFile(configPath, "utf-8");
3951
- const existingConfig = JSON.parse(existingContent);
3952
- let upgradedConfig;
3953
- let migrated = false;
3954
- if (needsMigration(existingConfig)) {
3955
- console.log(chalk2.blue("\u{1F504} Migrating config from v0.2.0 to v0.3.0..."));
3956
- upgradedConfig = migrateConfig(existingConfig);
3957
- migrated = true;
3958
- } else {
3959
- const newFields = detectNewFields(existingConfig, defaultConfig);
3960
- upgradedConfig = deepMergeConfig(defaultConfig, existingConfig);
3961
- if (newFields.length > 0) {
3962
- console.log(chalk2.dim("\nNew options added:"));
3963
- newFields.forEach((field) => console.log(chalk2.dim(" \u2022"), chalk2.bold(field)));
3964
- }
3965
- }
3966
- await fs5.writeFile(
3967
- configPath,
3968
- JSON.stringify(upgradedConfig, null, 2) + "\n",
3969
- "utf-8"
3970
- );
3971
- console.log(chalk2.green("\u2713 Config upgraded successfully"));
3972
- console.log(chalk2.dim("Backup saved to:"), backupPath);
3973
- if (migrated) {
3974
- console.log(chalk2.dim("\n\u{1F4DD} Your config now uses the framework-based structure."));
3975
- }
3976
- } catch (error) {
3977
- console.error(chalk2.red("Error upgrading config:"), error);
3978
- throw error;
3979
- }
3980
- }
3981
5105
 
3982
5106
  // src/cli/status.ts
3983
5107
  init_service();
3984
5108
  init_utils();
3985
- init_version();
3986
- import chalk3 from "chalk";
3987
- import fs9 from "fs/promises";
3988
- import path9 from "path";
5109
+ init_version2();
5110
+ import chalk4 from "chalk";
5111
+ import fs11 from "fs/promises";
5112
+ import path11 from "path";
3989
5113
  import os from "os";
3990
5114
  import crypto from "crypto";
3991
5115
  init_schema();
3992
5116
  async function statusCommand() {
3993
5117
  const rootDir = process.cwd();
3994
- const projectName = path9.basename(rootDir);
5118
+ const projectName = path11.basename(rootDir);
3995
5119
  const pathHash = crypto.createHash("md5").update(rootDir).digest("hex").substring(0, 8);
3996
- const indexPath = path9.join(os.homedir(), ".lien", "indices", `${projectName}-${pathHash}`);
5120
+ const indexPath = path11.join(os.homedir(), ".lien", "indices", `${projectName}-${pathHash}`);
3997
5121
  showCompactBanner();
3998
- console.log(chalk3.bold("Status\n"));
5122
+ console.log(chalk4.bold("Status\n"));
3999
5123
  const hasConfig = await configService.exists(rootDir);
4000
- console.log(chalk3.dim("Configuration:"), hasConfig ? chalk3.green("\u2713 Found") : chalk3.red("\u2717 Not initialized"));
5124
+ console.log(chalk4.dim("Configuration:"), hasConfig ? chalk4.green("\u2713 Found") : chalk4.red("\u2717 Not initialized"));
4001
5125
  if (!hasConfig) {
4002
- console.log(chalk3.yellow("\nRun"), chalk3.bold("lien init"), chalk3.yellow("to initialize"));
5126
+ console.log(chalk4.yellow("\nRun"), chalk4.bold("lien init"), chalk4.yellow("to initialize"));
4003
5127
  return;
4004
5128
  }
4005
5129
  try {
4006
- const stats = await fs9.stat(indexPath);
4007
- console.log(chalk3.dim("Index location:"), indexPath);
4008
- console.log(chalk3.dim("Index status:"), chalk3.green("\u2713 Exists"));
5130
+ const stats = await fs11.stat(indexPath);
5131
+ console.log(chalk4.dim("Index location:"), indexPath);
5132
+ console.log(chalk4.dim("Index status:"), chalk4.green("\u2713 Exists"));
4009
5133
  try {
4010
- const files = await fs9.readdir(indexPath, { recursive: true });
4011
- console.log(chalk3.dim("Index files:"), files.length);
5134
+ const files = await fs11.readdir(indexPath, { recursive: true });
5135
+ console.log(chalk4.dim("Index files:"), files.length);
4012
5136
  } catch (e) {
4013
5137
  }
4014
- console.log(chalk3.dim("Last modified:"), stats.mtime.toLocaleString());
5138
+ console.log(chalk4.dim("Last modified:"), stats.mtime.toLocaleString());
4015
5139
  try {
4016
5140
  const version = await readVersionFile(indexPath);
4017
5141
  if (version > 0) {
4018
5142
  const versionDate = new Date(version);
4019
- console.log(chalk3.dim("Last reindex:"), versionDate.toLocaleString());
5143
+ console.log(chalk4.dim("Last reindex:"), versionDate.toLocaleString());
4020
5144
  }
4021
5145
  } catch {
4022
5146
  }
4023
5147
  } catch (error) {
4024
- console.log(chalk3.dim("Index status:"), chalk3.yellow("\u2717 Not indexed"));
4025
- console.log(chalk3.yellow("\nRun"), chalk3.bold("lien index"), chalk3.yellow("to index your codebase"));
5148
+ console.log(chalk4.dim("Index status:"), chalk4.yellow("\u2717 Not indexed"));
5149
+ console.log(chalk4.yellow("\nRun"), chalk4.bold("lien index"), chalk4.yellow("to index your codebase"));
4026
5150
  }
4027
5151
  try {
4028
5152
  const config = await configService.load(rootDir);
4029
- console.log(chalk3.bold("\nFeatures:"));
5153
+ console.log(chalk4.bold("\nFeatures:"));
4030
5154
  const isRepo = await isGitRepo(rootDir);
4031
5155
  if (config.gitDetection.enabled && isRepo) {
4032
- console.log(chalk3.dim("Git detection:"), chalk3.green("\u2713 Enabled"));
4033
- console.log(chalk3.dim(" Poll interval:"), `${config.gitDetection.pollIntervalMs / 1e3}s`);
5156
+ console.log(chalk4.dim("Git detection:"), chalk4.green("\u2713 Enabled"));
5157
+ console.log(chalk4.dim(" Poll interval:"), `${config.gitDetection.pollIntervalMs / 1e3}s`);
4034
5158
  try {
4035
5159
  const branch = await getCurrentBranch(rootDir);
4036
5160
  const commit = await getCurrentCommit(rootDir);
4037
- console.log(chalk3.dim(" Current branch:"), branch);
4038
- console.log(chalk3.dim(" Current commit:"), commit.substring(0, 8));
4039
- const gitStateFile = path9.join(indexPath, ".git-state.json");
5161
+ console.log(chalk4.dim(" Current branch:"), branch);
5162
+ console.log(chalk4.dim(" Current commit:"), commit.substring(0, 8));
5163
+ const gitStateFile = path11.join(indexPath, ".git-state.json");
4040
5164
  try {
4041
- const gitStateContent = await fs9.readFile(gitStateFile, "utf-8");
5165
+ const gitStateContent = await fs11.readFile(gitStateFile, "utf-8");
4042
5166
  const gitState = JSON.parse(gitStateContent);
4043
5167
  if (gitState.branch !== branch || gitState.commit !== commit) {
4044
- console.log(chalk3.yellow(" \u26A0\uFE0F Git state changed - will reindex on next serve"));
5168
+ console.log(chalk4.yellow(" \u26A0\uFE0F Git state changed - will reindex on next serve"));
4045
5169
  }
4046
5170
  } catch {
4047
5171
  }
4048
5172
  } catch {
4049
5173
  }
4050
5174
  } else if (config.gitDetection.enabled && !isRepo) {
4051
- console.log(chalk3.dim("Git detection:"), chalk3.yellow("Enabled (not a git repo)"));
5175
+ console.log(chalk4.dim("Git detection:"), chalk4.yellow("Enabled (not a git repo)"));
4052
5176
  } else {
4053
- console.log(chalk3.dim("Git detection:"), chalk3.gray("Disabled"));
5177
+ console.log(chalk4.dim("Git detection:"), chalk4.gray("Disabled"));
4054
5178
  }
4055
5179
  if (config.fileWatching.enabled) {
4056
- console.log(chalk3.dim("File watching:"), chalk3.green("\u2713 Enabled"));
4057
- console.log(chalk3.dim(" Debounce:"), `${config.fileWatching.debounceMs}ms`);
5180
+ console.log(chalk4.dim("File watching:"), chalk4.green("\u2713 Enabled"));
5181
+ console.log(chalk4.dim(" Debounce:"), `${config.fileWatching.debounceMs}ms`);
4058
5182
  } else {
4059
- console.log(chalk3.dim("File watching:"), chalk3.gray("Disabled"));
4060
- console.log(chalk3.dim(" Enable with:"), chalk3.bold("lien serve --watch"));
5183
+ console.log(chalk4.dim("File watching:"), chalk4.gray("Disabled"));
5184
+ console.log(chalk4.dim(" Enable with:"), chalk4.bold("lien serve --watch"));
4061
5185
  }
4062
- console.log(chalk3.bold("\nIndexing Settings:"));
5186
+ console.log(chalk4.bold("\nIndexing Settings:"));
4063
5187
  if (isModernConfig(config)) {
4064
- console.log(chalk3.dim("Concurrency:"), config.core.concurrency);
4065
- console.log(chalk3.dim("Batch size:"), config.core.embeddingBatchSize);
4066
- console.log(chalk3.dim("Chunk size:"), config.core.chunkSize);
4067
- console.log(chalk3.dim("Chunk overlap:"), config.core.chunkOverlap);
5188
+ console.log(chalk4.dim("Concurrency:"), config.core.concurrency);
5189
+ console.log(chalk4.dim("Batch size:"), config.core.embeddingBatchSize);
5190
+ console.log(chalk4.dim("Chunk size:"), config.core.chunkSize);
5191
+ console.log(chalk4.dim("Chunk overlap:"), config.core.chunkOverlap);
4068
5192
  }
4069
5193
  } catch (error) {
4070
- console.log(chalk3.yellow("\nWarning: Could not load configuration"));
5194
+ console.log(chalk4.yellow("\nWarning: Could not load configuration"));
4071
5195
  }
4072
5196
  }
4073
5197
 
4074
5198
  // src/cli/index-cmd.ts
4075
5199
  init_indexer();
4076
- import chalk5 from "chalk";
5200
+ import chalk6 from "chalk";
4077
5201
  async function indexCommand(options) {
4078
5202
  showCompactBanner();
4079
5203
  try {
4080
5204
  if (options.force) {
4081
5205
  const { VectorDB: VectorDB2 } = await Promise.resolve().then(() => (init_lancedb(), lancedb_exports));
4082
5206
  const { ManifestManager: ManifestManager2 } = await Promise.resolve().then(() => (init_manifest(), manifest_exports));
4083
- console.log(chalk5.yellow("Clearing existing index and manifest..."));
5207
+ console.log(chalk6.yellow("Clearing existing index and manifest..."));
4084
5208
  const vectorDB = new VectorDB2(process.cwd());
4085
5209
  await vectorDB.initialize();
4086
5210
  await vectorDB.clear();
4087
5211
  const manifest = new ManifestManager2(vectorDB.dbPath);
4088
5212
  await manifest.clear();
4089
- console.log(chalk5.green("\u2713 Index and manifest cleared\n"));
5213
+ console.log(chalk6.green("\u2713 Index and manifest cleared\n"));
4090
5214
  }
4091
5215
  await indexCodebase({
4092
5216
  rootDir: process.cwd(),
@@ -4094,18 +5218,18 @@ async function indexCommand(options) {
4094
5218
  force: options.force || false
4095
5219
  });
4096
5220
  if (options.watch) {
4097
- console.log(chalk5.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
5221
+ console.log(chalk6.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
4098
5222
  }
4099
5223
  } catch (error) {
4100
- console.error(chalk5.red("Error during indexing:"), error);
5224
+ console.error(chalk6.red("Error during indexing:"), error);
4101
5225
  process.exit(1);
4102
5226
  }
4103
5227
  }
4104
5228
 
4105
5229
  // src/cli/serve.ts
4106
- import chalk6 from "chalk";
4107
- import fs16 from "fs/promises";
4108
- import path14 from "path";
5230
+ import chalk7 from "chalk";
5231
+ import fs18 from "fs/promises";
5232
+ import path17 from "path";
4109
5233
 
4110
5234
  // src/mcp/server.ts
4111
5235
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
@@ -4735,36 +5859,36 @@ async function startMCPServer(options) {
4735
5859
 
4736
5860
  // src/cli/serve.ts
4737
5861
  async function serveCommand(options) {
4738
- const rootDir = options.root ? path14.resolve(options.root) : process.cwd();
5862
+ const rootDir = options.root ? path17.resolve(options.root) : process.cwd();
4739
5863
  try {
4740
5864
  if (options.root) {
4741
5865
  try {
4742
- const stats = await fs16.stat(rootDir);
5866
+ const stats = await fs18.stat(rootDir);
4743
5867
  if (!stats.isDirectory()) {
4744
- console.error(chalk6.red(`Error: --root path is not a directory: ${rootDir}`));
5868
+ console.error(chalk7.red(`Error: --root path is not a directory: ${rootDir}`));
4745
5869
  process.exit(1);
4746
5870
  }
4747
5871
  } catch (error) {
4748
5872
  if (error.code === "ENOENT") {
4749
- console.error(chalk6.red(`Error: --root directory does not exist: ${rootDir}`));
5873
+ console.error(chalk7.red(`Error: --root directory does not exist: ${rootDir}`));
4750
5874
  } else if (error.code === "EACCES") {
4751
- console.error(chalk6.red(`Error: --root directory is not accessible: ${rootDir}`));
5875
+ console.error(chalk7.red(`Error: --root directory is not accessible: ${rootDir}`));
4752
5876
  } else {
4753
- console.error(chalk6.red(`Error: Failed to access --root directory: ${rootDir}`));
4754
- console.error(chalk6.dim(error.message));
5877
+ console.error(chalk7.red(`Error: Failed to access --root directory: ${rootDir}`));
5878
+ console.error(chalk7.dim(error.message));
4755
5879
  }
4756
5880
  process.exit(1);
4757
5881
  }
4758
5882
  }
4759
5883
  showBanner();
4760
- console.error(chalk6.bold("Starting MCP server...\n"));
5884
+ console.error(chalk7.bold("Starting MCP server...\n"));
4761
5885
  if (options.root) {
4762
- console.error(chalk6.dim(`Serving from: ${rootDir}
5886
+ console.error(chalk7.dim(`Serving from: ${rootDir}
4763
5887
  `));
4764
5888
  }
4765
5889
  if (options.watch) {
4766
- console.error(chalk6.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
4767
- console.error(chalk6.dim(" Use --no-watch to disable file watching\n"));
5890
+ console.error(chalk7.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
5891
+ console.error(chalk7.dim(" Use --no-watch to disable file watching\n"));
4768
5892
  }
4769
5893
  const watch = options.noWatch ? false : options.watch ? true : void 0;
4770
5894
  await startMCPServer({
@@ -4773,7 +5897,7 @@ async function serveCommand(options) {
4773
5897
  watch
4774
5898
  });
4775
5899
  } catch (error) {
4776
- console.error(chalk6.red("Failed to start MCP server:"), error);
5900
+ console.error(chalk7.red("Failed to start MCP server:"), error);
4777
5901
  process.exit(1);
4778
5902
  }
4779
5903
  }