@liendev/lien 0.34.0 → 0.36.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/index.js +1457 -974
- package/dist/index.js.map +1 -1
- package/package.json +3 -3
package/dist/index.js
CHANGED
|
@@ -5,6 +5,9 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
5
5
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
6
|
var __getProtoOf = Object.getPrototypeOf;
|
|
7
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __esm = (fn, res) => function __init() {
|
|
9
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
10
|
+
};
|
|
8
11
|
var __commonJS = (cb, mod) => function __require() {
|
|
9
12
|
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
10
13
|
};
|
|
@@ -29,6 +32,70 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
29
32
|
mod
|
|
30
33
|
));
|
|
31
34
|
|
|
35
|
+
// src/utils/banner.ts
|
|
36
|
+
import figlet from "figlet";
|
|
37
|
+
import chalk from "chalk";
|
|
38
|
+
import { createRequire } from "module";
|
|
39
|
+
import { fileURLToPath } from "url";
|
|
40
|
+
import { dirname, join } from "path";
|
|
41
|
+
function wrapInBox(text, footer, padding = 1) {
|
|
42
|
+
const lines = text.split("\n").filter((line) => line.trim().length > 0);
|
|
43
|
+
const maxLength = Math.max(...lines.map((line) => line.length));
|
|
44
|
+
const horizontalBorder = "\u2500".repeat(maxLength + padding * 2);
|
|
45
|
+
const top = `\u250C${horizontalBorder}\u2510`;
|
|
46
|
+
const bottom = `\u2514${horizontalBorder}\u2518`;
|
|
47
|
+
const separator = `\u251C${horizontalBorder}\u2524`;
|
|
48
|
+
const paddedLines = lines.map((line) => {
|
|
49
|
+
const padRight = " ".repeat(maxLength - line.length + padding);
|
|
50
|
+
const padLeft = " ".repeat(padding);
|
|
51
|
+
return `\u2502${padLeft}${line}${padRight}\u2502`;
|
|
52
|
+
});
|
|
53
|
+
const totalPad = maxLength - footer.length;
|
|
54
|
+
const leftPad = Math.floor(totalPad / 2);
|
|
55
|
+
const rightPad = totalPad - leftPad;
|
|
56
|
+
const centeredFooter = " ".repeat(leftPad) + footer + " ".repeat(rightPad);
|
|
57
|
+
const paddedFooter = `\u2502${" ".repeat(padding)}${centeredFooter}${" ".repeat(padding)}\u2502`;
|
|
58
|
+
return [top, ...paddedLines, separator, paddedFooter, bottom].join("\n");
|
|
59
|
+
}
|
|
60
|
+
function showBanner() {
|
|
61
|
+
const banner = figlet.textSync("LIEN", {
|
|
62
|
+
font: "ANSI Shadow",
|
|
63
|
+
horizontalLayout: "fitted",
|
|
64
|
+
verticalLayout: "fitted"
|
|
65
|
+
});
|
|
66
|
+
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
67
|
+
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
68
|
+
console.error(chalk.cyan(boxedBanner));
|
|
69
|
+
console.error();
|
|
70
|
+
}
|
|
71
|
+
function showCompactBanner() {
|
|
72
|
+
const banner = figlet.textSync("LIEN", {
|
|
73
|
+
font: "ANSI Shadow",
|
|
74
|
+
horizontalLayout: "fitted",
|
|
75
|
+
verticalLayout: "fitted"
|
|
76
|
+
});
|
|
77
|
+
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
78
|
+
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
79
|
+
console.log(chalk.cyan(boxedBanner));
|
|
80
|
+
console.log();
|
|
81
|
+
}
|
|
82
|
+
var __filename, __dirname, require2, packageJson, PACKAGE_NAME, VERSION;
|
|
83
|
+
var init_banner = __esm({
|
|
84
|
+
"src/utils/banner.ts"() {
|
|
85
|
+
"use strict";
|
|
86
|
+
__filename = fileURLToPath(import.meta.url);
|
|
87
|
+
__dirname = dirname(__filename);
|
|
88
|
+
require2 = createRequire(import.meta.url);
|
|
89
|
+
try {
|
|
90
|
+
packageJson = require2(join(__dirname, "../package.json"));
|
|
91
|
+
} catch {
|
|
92
|
+
packageJson = require2(join(__dirname, "../../package.json"));
|
|
93
|
+
}
|
|
94
|
+
PACKAGE_NAME = packageJson.name;
|
|
95
|
+
VERSION = packageJson.version;
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
|
|
32
99
|
// ../../node_modules/collect.js/dist/methods/symbol.iterator.js
|
|
33
100
|
var require_symbol_iterator = __commonJS({
|
|
34
101
|
"../../node_modules/collect.js/dist/methods/symbol.iterator.js"(exports, module) {
|
|
@@ -3595,115 +3662,65 @@ import { fileURLToPath as fileURLToPath3 } from "url";
|
|
|
3595
3662
|
import { dirname as dirname3, join as join3 } from "path";
|
|
3596
3663
|
|
|
3597
3664
|
// src/cli/init.ts
|
|
3665
|
+
init_banner();
|
|
3598
3666
|
import fs from "fs/promises";
|
|
3599
3667
|
import path from "path";
|
|
3600
3668
|
import chalk2 from "chalk";
|
|
3601
|
-
|
|
3602
|
-
|
|
3603
|
-
|
|
3604
|
-
|
|
3605
|
-
import { createRequire } from "module";
|
|
3606
|
-
import { fileURLToPath } from "url";
|
|
3607
|
-
import { dirname, join } from "path";
|
|
3608
|
-
var __filename = fileURLToPath(import.meta.url);
|
|
3609
|
-
var __dirname = dirname(__filename);
|
|
3610
|
-
var require2 = createRequire(import.meta.url);
|
|
3611
|
-
var packageJson;
|
|
3612
|
-
try {
|
|
3613
|
-
packageJson = require2(join(__dirname, "../package.json"));
|
|
3614
|
-
} catch {
|
|
3615
|
-
packageJson = require2(join(__dirname, "../../package.json"));
|
|
3616
|
-
}
|
|
3617
|
-
var PACKAGE_NAME = packageJson.name;
|
|
3618
|
-
var VERSION = packageJson.version;
|
|
3619
|
-
function wrapInBox(text, footer, padding = 1) {
|
|
3620
|
-
const lines = text.split("\n").filter((line) => line.trim().length > 0);
|
|
3621
|
-
const maxLength = Math.max(...lines.map((line) => line.length));
|
|
3622
|
-
const horizontalBorder = "\u2500".repeat(maxLength + padding * 2);
|
|
3623
|
-
const top = `\u250C${horizontalBorder}\u2510`;
|
|
3624
|
-
const bottom = `\u2514${horizontalBorder}\u2518`;
|
|
3625
|
-
const separator = `\u251C${horizontalBorder}\u2524`;
|
|
3626
|
-
const paddedLines = lines.map((line) => {
|
|
3627
|
-
const padRight = " ".repeat(maxLength - line.length + padding);
|
|
3628
|
-
const padLeft = " ".repeat(padding);
|
|
3629
|
-
return `\u2502${padLeft}${line}${padRight}\u2502`;
|
|
3630
|
-
});
|
|
3631
|
-
const totalPad = maxLength - footer.length;
|
|
3632
|
-
const leftPad = Math.floor(totalPad / 2);
|
|
3633
|
-
const rightPad = totalPad - leftPad;
|
|
3634
|
-
const centeredFooter = " ".repeat(leftPad) + footer + " ".repeat(rightPad);
|
|
3635
|
-
const paddedFooter = `\u2502${" ".repeat(padding)}${centeredFooter}${" ".repeat(padding)}\u2502`;
|
|
3636
|
-
return [top, ...paddedLines, separator, paddedFooter, bottom].join("\n");
|
|
3637
|
-
}
|
|
3638
|
-
function showBanner() {
|
|
3639
|
-
const banner = figlet.textSync("LIEN", {
|
|
3640
|
-
font: "ANSI Shadow",
|
|
3641
|
-
horizontalLayout: "fitted",
|
|
3642
|
-
verticalLayout: "fitted"
|
|
3643
|
-
});
|
|
3644
|
-
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
3645
|
-
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
3646
|
-
console.error(chalk.cyan(boxedBanner));
|
|
3647
|
-
console.error();
|
|
3648
|
-
}
|
|
3649
|
-
function showCompactBanner() {
|
|
3650
|
-
const banner = figlet.textSync("LIEN", {
|
|
3651
|
-
font: "ANSI Shadow",
|
|
3652
|
-
horizontalLayout: "fitted",
|
|
3653
|
-
verticalLayout: "fitted"
|
|
3654
|
-
});
|
|
3655
|
-
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
3656
|
-
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
3657
|
-
console.log(chalk.cyan(boxedBanner));
|
|
3658
|
-
console.log();
|
|
3659
|
-
}
|
|
3660
|
-
|
|
3661
|
-
// src/cli/init.ts
|
|
3669
|
+
var MCP_CONFIG = {
|
|
3670
|
+
command: "lien",
|
|
3671
|
+
args: ["serve"]
|
|
3672
|
+
};
|
|
3662
3673
|
async function initCommand(options = {}) {
|
|
3663
3674
|
showCompactBanner();
|
|
3664
|
-
console.log(chalk2.bold("\nLien Initialization\n"));
|
|
3665
|
-
console.log(chalk2.green("\u2713 No per-project configuration needed!"));
|
|
3666
|
-
console.log(chalk2.dim("\nLien now uses:"));
|
|
3667
|
-
console.log(chalk2.dim(" \u2022 Auto-detected frameworks"));
|
|
3668
|
-
console.log(chalk2.dim(" \u2022 Sensible defaults for all settings"));
|
|
3669
|
-
console.log(chalk2.dim(" \u2022 Global config (optional) at ~/.lien/config.json"));
|
|
3670
|
-
console.log(chalk2.bold("\nNext steps:"));
|
|
3671
|
-
console.log(chalk2.dim(" 1. Run"), chalk2.bold("lien index"), chalk2.dim("to index your codebase"));
|
|
3672
|
-
console.log(chalk2.dim(" 2. Run"), chalk2.bold("lien serve"), chalk2.dim("to start the MCP server"));
|
|
3673
|
-
console.log(chalk2.bold("\nGlobal Configuration (optional):"));
|
|
3674
|
-
console.log(chalk2.dim(" To use Qdrant backend, create ~/.lien/config.json:"));
|
|
3675
|
-
console.log(chalk2.dim(" {"));
|
|
3676
|
-
console.log(chalk2.dim(' "backend": "qdrant",'));
|
|
3677
|
-
console.log(chalk2.dim(' "qdrant": {'));
|
|
3678
|
-
console.log(chalk2.dim(' "url": "http://localhost:6333",'));
|
|
3679
|
-
console.log(chalk2.dim(' "apiKey": "optional-api-key"'));
|
|
3680
|
-
console.log(chalk2.dim(" }"));
|
|
3681
|
-
console.log(chalk2.dim(" }"));
|
|
3682
|
-
console.log(chalk2.dim("\n Or use environment variables:"));
|
|
3683
|
-
console.log(chalk2.dim(" LIEN_BACKEND=qdrant"));
|
|
3684
|
-
console.log(chalk2.dim(" LIEN_QDRANT_URL=http://localhost:6333"));
|
|
3685
|
-
console.log(chalk2.dim(" LIEN_QDRANT_API_KEY=your-key"));
|
|
3686
3675
|
const rootDir = options.path || process.cwd();
|
|
3687
|
-
const
|
|
3676
|
+
const cursorDir = path.join(rootDir, ".cursor");
|
|
3677
|
+
const mcpConfigPath = path.join(cursorDir, "mcp.json");
|
|
3678
|
+
let existingConfig = null;
|
|
3679
|
+
try {
|
|
3680
|
+
const raw = await fs.readFile(mcpConfigPath, "utf-8");
|
|
3681
|
+
const parsed = JSON.parse(raw);
|
|
3682
|
+
if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
|
|
3683
|
+
existingConfig = parsed;
|
|
3684
|
+
}
|
|
3685
|
+
} catch {
|
|
3686
|
+
}
|
|
3687
|
+
if (existingConfig?.mcpServers?.lien) {
|
|
3688
|
+
console.log(chalk2.green("\n\u2713 Already configured \u2014 .cursor/mcp.json contains lien entry"));
|
|
3689
|
+
} else if (existingConfig) {
|
|
3690
|
+
const servers = existingConfig.mcpServers;
|
|
3691
|
+
const safeServers = servers && typeof servers === "object" && !Array.isArray(servers) ? servers : {};
|
|
3692
|
+
safeServers.lien = MCP_CONFIG;
|
|
3693
|
+
existingConfig.mcpServers = safeServers;
|
|
3694
|
+
await fs.writeFile(mcpConfigPath, JSON.stringify(existingConfig, null, 2) + "\n");
|
|
3695
|
+
console.log(chalk2.green("\n\u2713 Added lien to existing .cursor/mcp.json"));
|
|
3696
|
+
} else {
|
|
3697
|
+
await fs.mkdir(cursorDir, { recursive: true });
|
|
3698
|
+
const config = { mcpServers: { lien: MCP_CONFIG } };
|
|
3699
|
+
await fs.writeFile(mcpConfigPath, JSON.stringify(config, null, 2) + "\n");
|
|
3700
|
+
console.log(chalk2.green("\n\u2713 Created .cursor/mcp.json"));
|
|
3701
|
+
}
|
|
3702
|
+
console.log(chalk2.dim(" Restart Cursor to activate.\n"));
|
|
3703
|
+
const legacyConfigPath = path.join(rootDir, ".lien.config.json");
|
|
3688
3704
|
try {
|
|
3689
|
-
await fs.access(
|
|
3690
|
-
console.log(chalk2.yellow("\
|
|
3705
|
+
await fs.access(legacyConfigPath);
|
|
3706
|
+
console.log(chalk2.yellow("\u26A0\uFE0F Note: .lien.config.json found but no longer used"));
|
|
3691
3707
|
console.log(chalk2.dim(" You can safely delete it."));
|
|
3692
3708
|
} catch {
|
|
3693
3709
|
}
|
|
3694
3710
|
}
|
|
3695
3711
|
|
|
3696
3712
|
// src/cli/status.ts
|
|
3713
|
+
init_banner();
|
|
3697
3714
|
import chalk3 from "chalk";
|
|
3698
3715
|
import fs2 from "fs/promises";
|
|
3699
3716
|
import path2 from "path";
|
|
3700
3717
|
import os from "os";
|
|
3701
|
-
import crypto from "crypto";
|
|
3702
3718
|
import {
|
|
3703
3719
|
isGitRepo,
|
|
3704
3720
|
getCurrentBranch,
|
|
3705
3721
|
getCurrentCommit,
|
|
3706
3722
|
readVersionFile,
|
|
3723
|
+
extractRepoId,
|
|
3707
3724
|
DEFAULT_CONCURRENCY,
|
|
3708
3725
|
DEFAULT_EMBEDDING_BATCH_SIZE,
|
|
3709
3726
|
DEFAULT_CHUNK_SIZE,
|
|
@@ -3712,12 +3729,14 @@ import {
|
|
|
3712
3729
|
} from "@liendev/core";
|
|
3713
3730
|
async function statusCommand() {
|
|
3714
3731
|
const rootDir = process.cwd();
|
|
3715
|
-
const
|
|
3716
|
-
const
|
|
3717
|
-
const indexPath = path2.join(os.homedir(), ".lien", "indices", `${projectName}-${pathHash}`);
|
|
3732
|
+
const repoId = extractRepoId(rootDir);
|
|
3733
|
+
const indexPath = path2.join(os.homedir(), ".lien", "indices", repoId);
|
|
3718
3734
|
showCompactBanner();
|
|
3719
3735
|
console.log(chalk3.bold("Status\n"));
|
|
3720
|
-
console.log(
|
|
3736
|
+
console.log(
|
|
3737
|
+
chalk3.dim("Configuration:"),
|
|
3738
|
+
chalk3.green("\u2713 Using defaults (no per-project config needed)")
|
|
3739
|
+
);
|
|
3721
3740
|
try {
|
|
3722
3741
|
const stats = await fs2.stat(indexPath);
|
|
3723
3742
|
console.log(chalk3.dim("Index location:"), indexPath);
|
|
@@ -3725,7 +3744,7 @@ async function statusCommand() {
|
|
|
3725
3744
|
try {
|
|
3726
3745
|
const files = await fs2.readdir(indexPath, { recursive: true });
|
|
3727
3746
|
console.log(chalk3.dim("Index files:"), files.length);
|
|
3728
|
-
} catch
|
|
3747
|
+
} catch {
|
|
3729
3748
|
}
|
|
3730
3749
|
console.log(chalk3.dim("Last modified:"), stats.mtime.toLocaleString());
|
|
3731
3750
|
try {
|
|
@@ -3736,9 +3755,13 @@ async function statusCommand() {
|
|
|
3736
3755
|
}
|
|
3737
3756
|
} catch {
|
|
3738
3757
|
}
|
|
3739
|
-
} catch
|
|
3758
|
+
} catch {
|
|
3740
3759
|
console.log(chalk3.dim("Index status:"), chalk3.yellow("\u2717 Not indexed"));
|
|
3741
|
-
console.log(
|
|
3760
|
+
console.log(
|
|
3761
|
+
chalk3.yellow("\nRun"),
|
|
3762
|
+
chalk3.bold("lien index"),
|
|
3763
|
+
chalk3.yellow("to index your codebase")
|
|
3764
|
+
);
|
|
3742
3765
|
}
|
|
3743
3766
|
console.log(chalk3.bold("\nFeatures:"));
|
|
3744
3767
|
const isRepo = await isGitRepo(rootDir);
|
|
@@ -3775,8 +3798,9 @@ async function statusCommand() {
|
|
|
3775
3798
|
}
|
|
3776
3799
|
|
|
3777
3800
|
// src/cli/index-cmd.ts
|
|
3778
|
-
|
|
3779
|
-
import
|
|
3801
|
+
init_banner();
|
|
3802
|
+
import chalk5 from "chalk";
|
|
3803
|
+
import ora2 from "ora";
|
|
3780
3804
|
import { indexCodebase } from "@liendev/core";
|
|
3781
3805
|
|
|
3782
3806
|
// src/utils/loading-messages.ts
|
|
@@ -3855,17 +3879,28 @@ function getModelLoadingMessage() {
|
|
|
3855
3879
|
return message;
|
|
3856
3880
|
}
|
|
3857
3881
|
|
|
3882
|
+
// src/cli/utils.ts
|
|
3883
|
+
import ora from "ora";
|
|
3884
|
+
import chalk4 from "chalk";
|
|
3885
|
+
import { isLienError, getErrorMessage, getErrorStack } from "@liendev/core";
|
|
3886
|
+
function formatDuration(ms) {
|
|
3887
|
+
if (ms < 1e3) {
|
|
3888
|
+
return `${Math.round(ms)}ms`;
|
|
3889
|
+
}
|
|
3890
|
+
return `${(ms / 1e3).toFixed(1)}s`;
|
|
3891
|
+
}
|
|
3892
|
+
|
|
3858
3893
|
// src/cli/index-cmd.ts
|
|
3859
3894
|
async function clearExistingIndex() {
|
|
3860
3895
|
const { VectorDB: VectorDB2 } = await import("@liendev/core");
|
|
3861
3896
|
const { ManifestManager: ManifestManager2 } = await import("@liendev/core");
|
|
3862
|
-
console.log(
|
|
3897
|
+
console.log(chalk5.yellow("Clearing existing index and manifest..."));
|
|
3863
3898
|
const vectorDB = new VectorDB2(process.cwd());
|
|
3864
3899
|
await vectorDB.initialize();
|
|
3865
3900
|
await vectorDB.clear();
|
|
3866
3901
|
const manifest = new ManifestManager2(vectorDB.dbPath);
|
|
3867
3902
|
await manifest.clear();
|
|
3868
|
-
console.log(
|
|
3903
|
+
console.log(chalk5.green("\u2713 Index and manifest cleared\n"));
|
|
3869
3904
|
}
|
|
3870
3905
|
function createProgressTracker() {
|
|
3871
3906
|
return {
|
|
@@ -3932,19 +3967,24 @@ function createProgressCallback(spinner, tracker) {
|
|
|
3932
3967
|
if (progress.filesTotal && progress.filesProcessed !== void 0) {
|
|
3933
3968
|
message = `${message} (${progress.filesProcessed}/${progress.filesTotal})`;
|
|
3934
3969
|
}
|
|
3935
|
-
spinner.succeed(
|
|
3970
|
+
spinner.succeed(chalk5.green(message));
|
|
3936
3971
|
} else {
|
|
3937
3972
|
updateSpinner(spinner, tracker);
|
|
3938
3973
|
}
|
|
3939
3974
|
};
|
|
3940
3975
|
}
|
|
3941
|
-
function displayFinalResult(spinner, tracker, result) {
|
|
3942
|
-
|
|
3943
|
-
|
|
3944
|
-
|
|
3945
|
-
|
|
3946
|
-
|
|
3947
|
-
|
|
3976
|
+
function displayFinalResult(spinner, tracker, result, durationMs) {
|
|
3977
|
+
const timing = formatDuration(durationMs);
|
|
3978
|
+
if (tracker.completedViaProgress) {
|
|
3979
|
+
console.log(chalk5.dim(` Completed in ${timing}`));
|
|
3980
|
+
} else if (result.filesIndexed === 0) {
|
|
3981
|
+
spinner.succeed(chalk5.green(`Index is up to date - no changes detected in ${timing}`));
|
|
3982
|
+
} else {
|
|
3983
|
+
spinner.succeed(
|
|
3984
|
+
chalk5.green(
|
|
3985
|
+
`Indexed ${result.filesIndexed} files, ${result.chunksCreated} chunks in ${timing}`
|
|
3986
|
+
)
|
|
3987
|
+
);
|
|
3948
3988
|
}
|
|
3949
3989
|
}
|
|
3950
3990
|
async function indexCommand(options) {
|
|
@@ -3953,7 +3993,7 @@ async function indexCommand(options) {
|
|
|
3953
3993
|
if (options.force) {
|
|
3954
3994
|
await clearExistingIndex();
|
|
3955
3995
|
}
|
|
3956
|
-
const spinner =
|
|
3996
|
+
const spinner = ora2({
|
|
3957
3997
|
text: "Starting indexing...",
|
|
3958
3998
|
interval: 30
|
|
3959
3999
|
// Faster refresh rate for smoother progress
|
|
@@ -3968,23 +4008,20 @@ async function indexCommand(options) {
|
|
|
3968
4008
|
});
|
|
3969
4009
|
stopMessageRotation(tracker);
|
|
3970
4010
|
if (!result.success && result.error) {
|
|
3971
|
-
spinner.fail(
|
|
3972
|
-
console.error(
|
|
4011
|
+
spinner.fail(chalk5.red("Indexing failed"));
|
|
4012
|
+
console.error(chalk5.red("\n" + result.error));
|
|
3973
4013
|
process.exit(1);
|
|
3974
4014
|
}
|
|
3975
|
-
displayFinalResult(spinner, tracker, result);
|
|
3976
|
-
if (options.watch) {
|
|
3977
|
-
console.log(chalk4.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
|
|
3978
|
-
}
|
|
4015
|
+
displayFinalResult(spinner, tracker, result, result.durationMs);
|
|
3979
4016
|
} catch (error) {
|
|
3980
|
-
console.error(
|
|
4017
|
+
console.error(chalk5.red("Error during indexing:"), error);
|
|
3981
4018
|
process.exit(1);
|
|
3982
4019
|
}
|
|
3983
4020
|
}
|
|
3984
4021
|
|
|
3985
4022
|
// src/cli/serve.ts
|
|
3986
|
-
import
|
|
3987
|
-
import
|
|
4023
|
+
import chalk6 from "chalk";
|
|
4024
|
+
import fs5 from "fs/promises";
|
|
3988
4025
|
import path4 from "path";
|
|
3989
4026
|
|
|
3990
4027
|
// src/mcp/server.ts
|
|
@@ -3992,26 +4029,17 @@ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
|
3992
4029
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3993
4030
|
import { createRequire as createRequire2 } from "module";
|
|
3994
4031
|
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
3995
|
-
import { dirname as dirname2, join as join2
|
|
3996
|
-
import {
|
|
3997
|
-
LocalEmbeddings,
|
|
3998
|
-
GitStateTracker,
|
|
3999
|
-
indexMultipleFiles,
|
|
4000
|
-
indexSingleFile,
|
|
4001
|
-
ManifestManager,
|
|
4002
|
-
isGitAvailable,
|
|
4003
|
-
isGitRepo as isGitRepo2,
|
|
4004
|
-
VERSION_CHECK_INTERVAL_MS,
|
|
4005
|
-
DEFAULT_GIT_POLL_INTERVAL_MS as DEFAULT_GIT_POLL_INTERVAL_MS2,
|
|
4006
|
-
createVectorDB,
|
|
4007
|
-
computeContentHash,
|
|
4008
|
-
normalizeToRelativePath
|
|
4009
|
-
} from "@liendev/core";
|
|
4032
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
4033
|
+
import { WorkerEmbeddings, VERSION_CHECK_INTERVAL_MS, createVectorDB } from "@liendev/core";
|
|
4010
4034
|
|
|
4011
4035
|
// src/watcher/index.ts
|
|
4012
4036
|
import chokidar from "chokidar";
|
|
4013
4037
|
import path3 from "path";
|
|
4014
|
-
import {
|
|
4038
|
+
import {
|
|
4039
|
+
detectEcosystems,
|
|
4040
|
+
getEcosystemExcludePatterns,
|
|
4041
|
+
ALWAYS_IGNORE_PATTERNS
|
|
4042
|
+
} from "@liendev/core";
|
|
4015
4043
|
var FileWatcher = class {
|
|
4016
4044
|
watcher = null;
|
|
4017
4045
|
rootDir;
|
|
@@ -4028,6 +4056,7 @@ var FileWatcher = class {
|
|
|
4028
4056
|
firstChangeTimestamp = null;
|
|
4029
4057
|
// Track when batch started
|
|
4030
4058
|
// Git watching state
|
|
4059
|
+
gitWatcher = null;
|
|
4031
4060
|
gitChangeTimer = null;
|
|
4032
4061
|
gitChangeHandler = null;
|
|
4033
4062
|
GIT_DEBOUNCE_MS = 1e3;
|
|
@@ -4036,38 +4065,15 @@ var FileWatcher = class {
|
|
|
4036
4065
|
this.rootDir = rootDir;
|
|
4037
4066
|
}
|
|
4038
4067
|
/**
|
|
4039
|
-
* Detect watch patterns from
|
|
4068
|
+
* Detect watch patterns from ecosystem presets or use defaults.
|
|
4040
4069
|
*/
|
|
4041
4070
|
async getWatchPatterns() {
|
|
4042
4071
|
try {
|
|
4043
|
-
const
|
|
4044
|
-
|
|
4045
|
-
|
|
4046
|
-
|
|
4047
|
-
|
|
4048
|
-
if (!detector) {
|
|
4049
|
-
return null;
|
|
4050
|
-
}
|
|
4051
|
-
const config = await detector.generateConfig(this.rootDir, detection.path);
|
|
4052
|
-
return {
|
|
4053
|
-
name: detection.name,
|
|
4054
|
-
path: detection.path,
|
|
4055
|
-
enabled: true,
|
|
4056
|
-
config
|
|
4057
|
-
};
|
|
4058
|
-
})
|
|
4059
|
-
);
|
|
4060
|
-
const validFrameworks = frameworks.filter((f) => f !== null);
|
|
4061
|
-
const includePatterns = validFrameworks.flatMap((f) => f.config.include);
|
|
4062
|
-
const excludePatterns = validFrameworks.flatMap((f) => f.config.exclude);
|
|
4063
|
-
if (includePatterns.length === 0) {
|
|
4064
|
-
return this.getDefaultPatterns();
|
|
4065
|
-
}
|
|
4066
|
-
return { include: includePatterns, exclude: excludePatterns };
|
|
4067
|
-
} else {
|
|
4068
|
-
return this.getDefaultPatterns();
|
|
4069
|
-
}
|
|
4070
|
-
} catch (error) {
|
|
4072
|
+
const ecosystems = await detectEcosystems(this.rootDir);
|
|
4073
|
+
const ecosystemExcludes = getEcosystemExcludePatterns(ecosystems);
|
|
4074
|
+
const mergedExcludes = [.../* @__PURE__ */ new Set([...ALWAYS_IGNORE_PATTERNS, ...ecosystemExcludes])];
|
|
4075
|
+
return { include: ["**/*"], exclude: mergedExcludes };
|
|
4076
|
+
} catch {
|
|
4071
4077
|
return this.getDefaultPatterns();
|
|
4072
4078
|
}
|
|
4073
4079
|
}
|
|
@@ -4077,13 +4083,7 @@ var FileWatcher = class {
|
|
|
4077
4083
|
getDefaultPatterns() {
|
|
4078
4084
|
return {
|
|
4079
4085
|
include: ["**/*"],
|
|
4080
|
-
exclude: [
|
|
4081
|
-
"**/node_modules/**",
|
|
4082
|
-
"**/vendor/**",
|
|
4083
|
-
"**/dist/**",
|
|
4084
|
-
"**/build/**",
|
|
4085
|
-
"**/.git/**"
|
|
4086
|
-
]
|
|
4086
|
+
exclude: [...ALWAYS_IGNORE_PATTERNS]
|
|
4087
4087
|
};
|
|
4088
4088
|
}
|
|
4089
4089
|
/**
|
|
@@ -4134,17 +4134,17 @@ var FileWatcher = class {
|
|
|
4134
4134
|
}
|
|
4135
4135
|
let readyFired = false;
|
|
4136
4136
|
await Promise.race([
|
|
4137
|
-
new Promise((
|
|
4137
|
+
new Promise((resolve) => {
|
|
4138
4138
|
const readyHandler = () => {
|
|
4139
4139
|
readyFired = true;
|
|
4140
|
-
|
|
4140
|
+
resolve();
|
|
4141
4141
|
};
|
|
4142
4142
|
this.watcher.once("ready", readyHandler);
|
|
4143
4143
|
}),
|
|
4144
|
-
new Promise((
|
|
4144
|
+
new Promise((resolve) => {
|
|
4145
4145
|
setTimeout(() => {
|
|
4146
4146
|
if (!readyFired) {
|
|
4147
|
-
|
|
4147
|
+
resolve();
|
|
4148
4148
|
}
|
|
4149
4149
|
}, 1e3);
|
|
4150
4150
|
})
|
|
@@ -4152,7 +4152,7 @@ var FileWatcher = class {
|
|
|
4152
4152
|
}
|
|
4153
4153
|
/**
|
|
4154
4154
|
* Starts watching files for changes.
|
|
4155
|
-
*
|
|
4155
|
+
*
|
|
4156
4156
|
* @param handler - Callback function called when files change
|
|
4157
4157
|
*/
|
|
4158
4158
|
async start(handler) {
|
|
@@ -4168,43 +4168,61 @@ var FileWatcher = class {
|
|
|
4168
4168
|
/**
|
|
4169
4169
|
* Enable watching .git directory for git operations.
|
|
4170
4170
|
* Call this after start() to enable event-driven git detection.
|
|
4171
|
-
*
|
|
4171
|
+
*
|
|
4172
4172
|
* @param onGitChange - Callback invoked when git operations detected
|
|
4173
4173
|
*/
|
|
4174
4174
|
watchGit(onGitChange) {
|
|
4175
4175
|
if (!this.watcher) {
|
|
4176
4176
|
throw new Error("Cannot watch git - watcher not started");
|
|
4177
4177
|
}
|
|
4178
|
+
if (this.gitWatcher) {
|
|
4179
|
+
void this.gitWatcher.close().catch(() => {
|
|
4180
|
+
});
|
|
4181
|
+
this.gitWatcher = null;
|
|
4182
|
+
}
|
|
4178
4183
|
this.gitChangeHandler = onGitChange;
|
|
4179
|
-
|
|
4180
|
-
|
|
4181
|
-
|
|
4182
|
-
|
|
4183
|
-
|
|
4184
|
-
|
|
4185
|
-
|
|
4186
|
-
|
|
4187
|
-
|
|
4188
|
-
|
|
4184
|
+
const gitPaths = [
|
|
4185
|
+
"HEAD",
|
|
4186
|
+
"index",
|
|
4187
|
+
"refs/**",
|
|
4188
|
+
"MERGE_HEAD",
|
|
4189
|
+
"REBASE_HEAD",
|
|
4190
|
+
"CHERRY_PICK_HEAD",
|
|
4191
|
+
"logs/refs/stash"
|
|
4192
|
+
].map((p) => path3.join(this.rootDir, ".git", p).replace(/\\/g, "/"));
|
|
4193
|
+
this.gitWatcher = chokidar.watch(gitPaths, {
|
|
4194
|
+
persistent: true,
|
|
4195
|
+
ignoreInitial: true
|
|
4196
|
+
});
|
|
4197
|
+
this.gitWatcher.on("add", () => this.handleGitChange()).on("change", () => this.handleGitChange()).on("unlink", () => this.handleGitChange()).on("error", (error) => {
|
|
4198
|
+
try {
|
|
4199
|
+
const message = "[FileWatcher] Git watcher error: " + (error instanceof Error ? error.stack || error.message : String(error)) + "\n";
|
|
4200
|
+
process.stderr.write(message);
|
|
4201
|
+
} catch {
|
|
4202
|
+
}
|
|
4203
|
+
});
|
|
4189
4204
|
}
|
|
4190
4205
|
/**
|
|
4191
4206
|
* Check if a filepath is a git-related change
|
|
4192
4207
|
*/
|
|
4193
4208
|
isGitChange(filepath) {
|
|
4194
4209
|
const normalized = filepath.replace(/\\/g, "/");
|
|
4195
|
-
return normalized.
|
|
4210
|
+
return normalized.startsWith(".git/") || normalized.includes("/.git/");
|
|
4196
4211
|
}
|
|
4197
4212
|
/**
|
|
4198
4213
|
* Handle git-related file changes with debouncing
|
|
4199
4214
|
*/
|
|
4200
4215
|
handleGitChange() {
|
|
4216
|
+
if (!this.gitChangeHandler) {
|
|
4217
|
+
return;
|
|
4218
|
+
}
|
|
4201
4219
|
if (this.gitChangeTimer) {
|
|
4202
4220
|
clearTimeout(this.gitChangeTimer);
|
|
4203
4221
|
}
|
|
4204
4222
|
this.gitChangeTimer = setTimeout(async () => {
|
|
4205
4223
|
try {
|
|
4206
4224
|
await this.gitChangeHandler?.();
|
|
4207
|
-
} catch
|
|
4225
|
+
} catch {
|
|
4208
4226
|
}
|
|
4209
4227
|
this.gitChangeTimer = null;
|
|
4210
4228
|
}, this.GIT_DEBOUNCE_MS);
|
|
@@ -4213,13 +4231,12 @@ var FileWatcher = class {
|
|
|
4213
4231
|
* Handles a file change event with smart batching.
|
|
4214
4232
|
* Collects rapid changes across multiple files and processes them together.
|
|
4215
4233
|
* Forces flush after MAX_BATCH_WAIT_MS even if changes keep arriving.
|
|
4216
|
-
*
|
|
4234
|
+
*
|
|
4217
4235
|
* If a batch is currently being processed by an async handler, waits for completion
|
|
4218
4236
|
* before starting a new batch to prevent race conditions.
|
|
4219
4237
|
*/
|
|
4220
4238
|
handleChange(type, filepath) {
|
|
4221
|
-
if (this.
|
|
4222
|
-
this.handleGitChange();
|
|
4239
|
+
if (this.isGitChange(filepath)) {
|
|
4223
4240
|
return;
|
|
4224
4241
|
}
|
|
4225
4242
|
if (!this.onChangeHandler) {
|
|
@@ -4310,7 +4327,7 @@ var FileWatcher = class {
|
|
|
4310
4327
|
} else {
|
|
4311
4328
|
this.handleBatchComplete();
|
|
4312
4329
|
}
|
|
4313
|
-
} catch
|
|
4330
|
+
} catch {
|
|
4314
4331
|
this.handleBatchComplete();
|
|
4315
4332
|
}
|
|
4316
4333
|
}
|
|
@@ -4358,7 +4375,7 @@ var FileWatcher = class {
|
|
|
4358
4375
|
modified,
|
|
4359
4376
|
deleted
|
|
4360
4377
|
});
|
|
4361
|
-
} catch
|
|
4378
|
+
} catch {
|
|
4362
4379
|
}
|
|
4363
4380
|
}
|
|
4364
4381
|
/**
|
|
@@ -4368,6 +4385,13 @@ var FileWatcher = class {
|
|
|
4368
4385
|
if (!this.watcher) {
|
|
4369
4386
|
return;
|
|
4370
4387
|
}
|
|
4388
|
+
if (this.gitWatcher) {
|
|
4389
|
+
try {
|
|
4390
|
+
await this.gitWatcher.close();
|
|
4391
|
+
} catch {
|
|
4392
|
+
}
|
|
4393
|
+
this.gitWatcher = null;
|
|
4394
|
+
}
|
|
4371
4395
|
const handler = this.onChangeHandler;
|
|
4372
4396
|
this.onChangeHandler = null;
|
|
4373
4397
|
this.gitChangeHandler = null;
|
|
@@ -4376,7 +4400,7 @@ var FileWatcher = class {
|
|
|
4376
4400
|
this.gitChangeTimer = null;
|
|
4377
4401
|
}
|
|
4378
4402
|
while (this.batchInProgress) {
|
|
4379
|
-
await new Promise((
|
|
4403
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
4380
4404
|
}
|
|
4381
4405
|
if (this.batchTimer) {
|
|
4382
4406
|
clearTimeout(this.batchTimer);
|
|
@@ -4413,10 +4437,7 @@ var FileWatcher = class {
|
|
|
4413
4437
|
};
|
|
4414
4438
|
|
|
4415
4439
|
// src/mcp/server-config.ts
|
|
4416
|
-
import {
|
|
4417
|
-
CallToolRequestSchema,
|
|
4418
|
-
ListToolsRequestSchema
|
|
4419
|
-
} from "@modelcontextprotocol/sdk/types.js";
|
|
4440
|
+
import { CallToolRequestSchema, ListToolsRequestSchema } from "@modelcontextprotocol/sdk/types.js";
|
|
4420
4441
|
|
|
4421
4442
|
// src/mcp/utils/zod-to-json-schema.ts
|
|
4422
4443
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
@@ -4909,8 +4930,8 @@ function getErrorMap() {
|
|
|
4909
4930
|
|
|
4910
4931
|
// ../../node_modules/zod/v3/helpers/parseUtil.js
|
|
4911
4932
|
var makeIssue = (params) => {
|
|
4912
|
-
const { data, path:
|
|
4913
|
-
const fullPath = [...
|
|
4933
|
+
const { data, path: path7, errorMaps, issueData } = params;
|
|
4934
|
+
const fullPath = [...path7, ...issueData.path || []];
|
|
4914
4935
|
const fullIssue = {
|
|
4915
4936
|
...issueData,
|
|
4916
4937
|
path: fullPath
|
|
@@ -5026,11 +5047,11 @@ var errorUtil;
|
|
|
5026
5047
|
|
|
5027
5048
|
// ../../node_modules/zod/v3/types.js
|
|
5028
5049
|
var ParseInputLazyPath = class {
|
|
5029
|
-
constructor(parent, value,
|
|
5050
|
+
constructor(parent, value, path7, key) {
|
|
5030
5051
|
this._cachedPath = [];
|
|
5031
5052
|
this.parent = parent;
|
|
5032
5053
|
this.data = value;
|
|
5033
|
-
this._path =
|
|
5054
|
+
this._path = path7;
|
|
5034
5055
|
this._key = key;
|
|
5035
5056
|
}
|
|
5036
5057
|
get path() {
|
|
@@ -8481,25 +8502,23 @@ var SemanticSearchSchema = external_exports.object({
|
|
|
8481
8502
|
"Number of results to return.\n\nDefault: 5\nIncrease to 10-15 for broad exploration."
|
|
8482
8503
|
),
|
|
8483
8504
|
crossRepo: external_exports.boolean().default(false).describe(
|
|
8484
|
-
"If true, search across all repos in the organization (requires
|
|
8505
|
+
"If true, search across all repos in the organization (requires a cross-repo-capable backend, currently Qdrant).\n\nDefault: false (single-repo search)\nWhen enabled, results are grouped by repository."
|
|
8485
8506
|
),
|
|
8486
|
-
repoIds: external_exports.array(external_exports.string()).optional().describe(
|
|
8507
|
+
repoIds: external_exports.array(external_exports.string().max(255)).optional().describe(
|
|
8487
8508
|
"Optional: Filter to specific repos when crossRepo=true.\n\nIf provided, only searches within the specified repositories.\nIf omitted and crossRepo=true, searches all repos in the organization."
|
|
8488
8509
|
)
|
|
8489
8510
|
});
|
|
8490
8511
|
|
|
8491
8512
|
// src/mcp/schemas/similarity.schema.ts
|
|
8492
8513
|
var FindSimilarSchema = external_exports.object({
|
|
8493
|
-
code: external_exports.string().min(24, "Code snippet must be at least 24 characters").describe(
|
|
8514
|
+
code: external_exports.string().min(24, "Code snippet must be at least 24 characters").max(5e4, "Code snippet too long (max 50000 characters)").describe(
|
|
8494
8515
|
"Code snippet to find similar implementations for.\n\nProvide a representative code sample that demonstrates the pattern you want to find similar examples of in the codebase."
|
|
8495
8516
|
),
|
|
8496
|
-
limit: external_exports.number().int().min(1, "Limit must be at least 1").max(20, "Limit cannot exceed 20").default(5).describe(
|
|
8497
|
-
|
|
8498
|
-
),
|
|
8499
|
-
language: external_exports.string().min(1, "Language filter cannot be empty").optional().describe(
|
|
8517
|
+
limit: external_exports.number().int().min(1, "Limit must be at least 1").max(20, "Limit cannot exceed 20").default(5).describe("Number of similar code blocks to return.\n\nDefault: 5"),
|
|
8518
|
+
language: external_exports.string().min(1, "Language filter cannot be empty").max(50).optional().describe(
|
|
8500
8519
|
"Filter by programming language.\n\nExamples: 'typescript', 'python', 'javascript', 'php'\n\nIf omitted, searches all languages."
|
|
8501
8520
|
),
|
|
8502
|
-
pathHint: external_exports.string().min(1, "Path hint cannot be empty").optional().describe(
|
|
8521
|
+
pathHint: external_exports.string().min(1, "Path hint cannot be empty").max(500).optional().describe(
|
|
8503
8522
|
"Filter by file path substring.\n\nOnly returns results where the file path contains this string (case-insensitive).\n\nExamples: 'src/api', 'components', 'utils'"
|
|
8504
8523
|
)
|
|
8505
8524
|
});
|
|
@@ -8507,8 +8526,8 @@ var FindSimilarSchema = external_exports.object({
|
|
|
8507
8526
|
// src/mcp/schemas/file.schema.ts
|
|
8508
8527
|
var GetFilesContextSchema = external_exports.object({
|
|
8509
8528
|
filepaths: external_exports.union([
|
|
8510
|
-
external_exports.string().min(1, "Filepath cannot be empty"),
|
|
8511
|
-
external_exports.array(external_exports.string().min(1, "Filepath cannot be empty")).min(1, "Array must contain at least one filepath").max(50, "Maximum 50 files per request")
|
|
8529
|
+
external_exports.string().min(1, "Filepath cannot be empty").max(1e3),
|
|
8530
|
+
external_exports.array(external_exports.string().min(1, "Filepath cannot be empty").max(1e3)).min(1, "Array must contain at least one filepath").max(50, "Maximum 50 files per request")
|
|
8512
8531
|
]).describe(
|
|
8513
8532
|
"Single filepath or array of filepaths (relative to workspace root).\n\nSingle file: 'src/components/Button.tsx'\nMultiple files: ['src/auth.ts', 'src/user.ts']\n\nMaximum 50 files per request for batch operations."
|
|
8514
8533
|
),
|
|
@@ -8519,10 +8538,10 @@ var GetFilesContextSchema = external_exports.object({
|
|
|
8519
8538
|
|
|
8520
8539
|
// src/mcp/schemas/symbols.schema.ts
|
|
8521
8540
|
var ListFunctionsSchema = external_exports.object({
|
|
8522
|
-
pattern: external_exports.string().optional().describe(
|
|
8541
|
+
pattern: external_exports.string().max(200).optional().describe(
|
|
8523
8542
|
"Regex pattern to match symbol names.\n\nExamples:\n - '.*Controller.*' to find all Controllers\n - 'handle.*' to find handlers\n - '.*Service$' to find Services\n\nIf omitted, returns all symbols."
|
|
8524
8543
|
),
|
|
8525
|
-
language: external_exports.string().optional().describe(
|
|
8544
|
+
language: external_exports.string().max(50).optional().describe(
|
|
8526
8545
|
"Filter by programming language.\n\nExamples: 'typescript', 'python', 'javascript', 'php'\n\nIf omitted, searches all languages."
|
|
8527
8546
|
),
|
|
8528
8547
|
symbolType: external_exports.enum(["function", "method", "class", "interface"]).optional().describe("Filter by symbol type. If omitted, returns all types."),
|
|
@@ -8536,23 +8555,23 @@ var ListFunctionsSchema = external_exports.object({
|
|
|
8536
8555
|
|
|
8537
8556
|
// src/mcp/schemas/dependents.schema.ts
|
|
8538
8557
|
var GetDependentsSchema = external_exports.object({
|
|
8539
|
-
filepath: external_exports.string().min(1, "Filepath cannot be empty").describe(
|
|
8558
|
+
filepath: external_exports.string().min(1, "Filepath cannot be empty").max(1e3).describe(
|
|
8540
8559
|
"Path to file to find dependents for (relative to workspace root).\n\nExample: 'src/utils/validate.ts'\n\nReturns all files that import or depend on this file.\n\nNote: Scans up to 10,000 code chunks. For very large codebases,\nresults may be incomplete (a warning will be included if truncated)."
|
|
8541
8560
|
),
|
|
8542
|
-
symbol: external_exports.string().min(1, "Symbol cannot be an empty string").optional().describe(
|
|
8561
|
+
symbol: external_exports.string().min(1, "Symbol cannot be an empty string").max(500).optional().describe(
|
|
8543
8562
|
"Optional: specific exported symbol to find usages of.\n\nWhen provided, returns call sites instead of just importing files.\n\nExample: 'validateEmail' to find where validateEmail() is called.\n\nResponse includes 'usages' array showing which functions call this symbol."
|
|
8544
8563
|
),
|
|
8545
8564
|
depth: external_exports.number().int().min(1).max(1).default(1).describe(
|
|
8546
8565
|
"Depth of transitive dependencies. Only depth=1 (direct dependents) is currently supported.\n\n1 = Direct dependents only"
|
|
8547
8566
|
),
|
|
8548
8567
|
crossRepo: external_exports.boolean().default(false).describe(
|
|
8549
|
-
"If true, find dependents across all repos in the organization (requires
|
|
8568
|
+
"If true, find dependents across all repos in the organization (requires a cross-repo-capable backend, currently Qdrant).\n\nDefault: false (single-repo search)\nWhen enabled, results are grouped by repository."
|
|
8550
8569
|
)
|
|
8551
8570
|
});
|
|
8552
8571
|
|
|
8553
8572
|
// src/mcp/schemas/complexity.schema.ts
|
|
8554
8573
|
var GetComplexitySchema = external_exports.object({
|
|
8555
|
-
files: external_exports.array(external_exports.string().min(1, "Filepath cannot be empty")).optional().describe(
|
|
8574
|
+
files: external_exports.array(external_exports.string().min(1, "Filepath cannot be empty").max(1e3)).optional().describe(
|
|
8556
8575
|
"Specific files to analyze. If omitted, analyzes entire codebase.\n\nExample: ['src/auth.ts', 'src/api/user.ts']"
|
|
8557
8576
|
),
|
|
8558
8577
|
top: external_exports.number().int().min(1, "Top must be at least 1").max(50, "Top cannot exceed 50").default(10).describe(
|
|
@@ -8562,9 +8581,9 @@ var GetComplexitySchema = external_exports.object({
|
|
|
8562
8581
|
"Only return functions above this complexity threshold.\n\nNote: Violations are first identified using the threshold from lien.config.json (default: 15). This parameter filters those violations to show only items above the specified value. Setting threshold below the config threshold will not show additional functions."
|
|
8563
8582
|
),
|
|
8564
8583
|
crossRepo: external_exports.boolean().default(false).describe(
|
|
8565
|
-
"If true, analyze complexity across all repos in the organization (requires
|
|
8584
|
+
"If true, analyze complexity across all repos in the organization (requires a cross-repo-capable backend, currently Qdrant).\n\nDefault: false (single-repo analysis)\nWhen enabled, results are aggregated by repository."
|
|
8566
8585
|
),
|
|
8567
|
-
repoIds: external_exports.array(external_exports.string()).optional().describe(
|
|
8586
|
+
repoIds: external_exports.array(external_exports.string().max(255)).optional().describe(
|
|
8568
8587
|
"Optional: Filter to specific repos when crossRepo=true.\n\nIf provided, only analyzes the specified repositories.\nIf omitted and crossRepo=true, analyzes all repos in the organization."
|
|
8569
8588
|
)
|
|
8570
8589
|
});
|
|
@@ -8744,30 +8763,33 @@ function applyResponseBudget(result, maxChars = MAX_RESPONSE_CHARS) {
|
|
|
8744
8763
|
if (arrays.length === 0) {
|
|
8745
8764
|
return { result };
|
|
8746
8765
|
}
|
|
8747
|
-
|
|
8748
|
-
|
|
8749
|
-
|
|
8750
|
-
|
|
8766
|
+
const originalItemCount = arrays.reduce((sum, arr) => sum + arr.length, 0);
|
|
8767
|
+
truncateArrays(arrays, 10);
|
|
8768
|
+
if (measureSize(cloned) <= maxChars) {
|
|
8769
|
+
return buildResult(cloned, originalChars, 1, arrays, originalItemCount);
|
|
8751
8770
|
}
|
|
8771
|
+
dropArrayItems(arrays, cloned, maxChars);
|
|
8752
8772
|
if (measureSize(cloned) <= maxChars) {
|
|
8753
|
-
return buildResult(cloned, originalChars,
|
|
8773
|
+
return buildResult(cloned, originalChars, 2, arrays, originalItemCount);
|
|
8754
8774
|
}
|
|
8755
|
-
|
|
8775
|
+
truncateArrays(arrays, 3);
|
|
8776
|
+
return buildResult(cloned, originalChars, 3, arrays, originalItemCount);
|
|
8777
|
+
}
|
|
8778
|
+
function truncateArrays(arrays, maxLines) {
|
|
8756
8779
|
for (const arr of arrays) {
|
|
8757
|
-
|
|
8758
|
-
|
|
8759
|
-
currentSize = measureSize(cloned);
|
|
8780
|
+
for (const item of arr) {
|
|
8781
|
+
item.content = truncateContent(item.content, maxLines);
|
|
8760
8782
|
}
|
|
8761
8783
|
}
|
|
8762
|
-
|
|
8763
|
-
|
|
8764
|
-
|
|
8784
|
+
}
|
|
8785
|
+
function dropArrayItems(arrays, root, maxChars) {
|
|
8786
|
+
let currentSize = measureSize(root);
|
|
8765
8787
|
for (const arr of arrays) {
|
|
8766
|
-
|
|
8767
|
-
|
|
8788
|
+
while (arr.length > 1 && currentSize > maxChars) {
|
|
8789
|
+
arr.pop();
|
|
8790
|
+
currentSize = measureSize(root);
|
|
8768
8791
|
}
|
|
8769
8792
|
}
|
|
8770
|
-
return buildResult(cloned, originalChars, 3);
|
|
8771
8793
|
}
|
|
8772
8794
|
function truncateContent(content, maxLines) {
|
|
8773
8795
|
const lines = content.split("\n");
|
|
@@ -8796,15 +8818,19 @@ function walk(node, found) {
|
|
|
8796
8818
|
walk(value, found);
|
|
8797
8819
|
}
|
|
8798
8820
|
}
|
|
8799
|
-
function buildResult(cloned, originalChars, phase) {
|
|
8821
|
+
function buildResult(cloned, originalChars, phase, arrays, originalItemCount) {
|
|
8800
8822
|
const finalChars = measureSize(cloned);
|
|
8823
|
+
const finalItemCount = arrays.reduce((sum, arr) => sum + arr.length, 0);
|
|
8824
|
+
const message = finalItemCount < originalItemCount ? `Showing ${finalItemCount} of ${originalItemCount} results (truncated). Use narrower filters or smaller limit for complete results.` : `Showing all ${finalItemCount} results (content trimmed to fit). Use narrower filters or smaller limit for complete results.`;
|
|
8801
8825
|
return {
|
|
8802
8826
|
result: cloned,
|
|
8803
8827
|
truncation: {
|
|
8804
8828
|
originalChars,
|
|
8805
8829
|
finalChars,
|
|
8830
|
+
originalItemCount,
|
|
8831
|
+
finalItemCount,
|
|
8806
8832
|
phase,
|
|
8807
|
-
message
|
|
8833
|
+
message
|
|
8808
8834
|
}
|
|
8809
8835
|
};
|
|
8810
8836
|
}
|
|
@@ -8822,10 +8848,12 @@ function wrapToolHandler(schema, handler) {
|
|
|
8822
8848
|
${truncation.message}` : truncation.message;
|
|
8823
8849
|
}
|
|
8824
8850
|
return {
|
|
8825
|
-
content: [
|
|
8826
|
-
|
|
8827
|
-
|
|
8828
|
-
|
|
8851
|
+
content: [
|
|
8852
|
+
{
|
|
8853
|
+
type: "text",
|
|
8854
|
+
text: JSON.stringify(result, null, 2)
|
|
8855
|
+
}
|
|
8856
|
+
]
|
|
8829
8857
|
};
|
|
8830
8858
|
} catch (error) {
|
|
8831
8859
|
return formatErrorResponse(error);
|
|
@@ -8836,42 +8864,57 @@ function formatErrorResponse(error) {
|
|
|
8836
8864
|
if (error instanceof ZodError) {
|
|
8837
8865
|
return {
|
|
8838
8866
|
isError: true,
|
|
8839
|
-
content: [
|
|
8840
|
-
|
|
8841
|
-
|
|
8842
|
-
|
|
8843
|
-
|
|
8844
|
-
|
|
8845
|
-
|
|
8846
|
-
|
|
8847
|
-
|
|
8848
|
-
|
|
8849
|
-
|
|
8867
|
+
content: [
|
|
8868
|
+
{
|
|
8869
|
+
type: "text",
|
|
8870
|
+
text: JSON.stringify(
|
|
8871
|
+
{
|
|
8872
|
+
error: "Invalid parameters",
|
|
8873
|
+
code: LienErrorCode.INVALID_INPUT,
|
|
8874
|
+
details: error.errors.map((e) => ({
|
|
8875
|
+
field: e.path.join("."),
|
|
8876
|
+
message: e.message
|
|
8877
|
+
}))
|
|
8878
|
+
},
|
|
8879
|
+
null,
|
|
8880
|
+
2
|
|
8881
|
+
)
|
|
8882
|
+
}
|
|
8883
|
+
]
|
|
8850
8884
|
};
|
|
8851
8885
|
}
|
|
8852
8886
|
if (error instanceof LienError) {
|
|
8853
8887
|
return {
|
|
8854
8888
|
isError: true,
|
|
8855
|
-
content: [
|
|
8856
|
-
|
|
8857
|
-
|
|
8858
|
-
|
|
8889
|
+
content: [
|
|
8890
|
+
{
|
|
8891
|
+
type: "text",
|
|
8892
|
+
text: JSON.stringify(error.toJSON(), null, 2)
|
|
8893
|
+
}
|
|
8894
|
+
]
|
|
8859
8895
|
};
|
|
8860
8896
|
}
|
|
8861
8897
|
console.error("Unexpected error in tool handler:", error);
|
|
8862
8898
|
return {
|
|
8863
8899
|
isError: true,
|
|
8864
|
-
content: [
|
|
8865
|
-
|
|
8866
|
-
|
|
8867
|
-
|
|
8868
|
-
|
|
8869
|
-
|
|
8870
|
-
|
|
8900
|
+
content: [
|
|
8901
|
+
{
|
|
8902
|
+
type: "text",
|
|
8903
|
+
text: JSON.stringify(
|
|
8904
|
+
{
|
|
8905
|
+
error: error instanceof Error ? error.message : "Unknown error",
|
|
8906
|
+
code: LienErrorCode.INTERNAL_ERROR
|
|
8907
|
+
},
|
|
8908
|
+
null,
|
|
8909
|
+
2
|
|
8910
|
+
)
|
|
8911
|
+
}
|
|
8912
|
+
]
|
|
8871
8913
|
};
|
|
8872
8914
|
}
|
|
8873
8915
|
|
|
8874
8916
|
// src/mcp/utils/metadata-shaper.ts
|
|
8917
|
+
import { normalizeToRelativePath } from "@liendev/core";
|
|
8875
8918
|
var FIELD_ALLOWLISTS = {
|
|
8876
8919
|
semantic_search: /* @__PURE__ */ new Set([
|
|
8877
8920
|
"language",
|
|
@@ -8923,7 +8966,12 @@ var FIELD_ALLOWLISTS = {
|
|
|
8923
8966
|
function deduplicateResults(results) {
|
|
8924
8967
|
const seen = /* @__PURE__ */ new Set();
|
|
8925
8968
|
return results.filter((r) => {
|
|
8926
|
-
const key = JSON.stringify([
|
|
8969
|
+
const key = JSON.stringify([
|
|
8970
|
+
r.metadata.repoId ?? "",
|
|
8971
|
+
r.metadata.file ? normalizeToRelativePath(r.metadata.file) : "",
|
|
8972
|
+
r.metadata.startLine,
|
|
8973
|
+
r.metadata.endLine
|
|
8974
|
+
]);
|
|
8927
8975
|
if (seen.has(key)) return false;
|
|
8928
8976
|
seen.add(key);
|
|
8929
8977
|
return true;
|
|
@@ -8980,7 +9028,6 @@ function shapeResults(results, tool) {
|
|
|
8980
9028
|
}
|
|
8981
9029
|
|
|
8982
9030
|
// src/mcp/handlers/semantic-search.ts
|
|
8983
|
-
import { QdrantDB } from "@liendev/core";
|
|
8984
9031
|
function groupResultsByRepo(results) {
|
|
8985
9032
|
const grouped = {};
|
|
8986
9033
|
for (const result of results) {
|
|
@@ -8994,13 +9041,18 @@ function groupResultsByRepo(results) {
|
|
|
8994
9041
|
}
|
|
8995
9042
|
async function executeSearch(vectorDB, queryEmbedding, params, log) {
|
|
8996
9043
|
const { query, limit, crossRepo, repoIds } = params;
|
|
8997
|
-
if (crossRepo && vectorDB
|
|
9044
|
+
if (crossRepo && vectorDB.supportsCrossRepo) {
|
|
8998
9045
|
const results2 = await vectorDB.searchCrossRepo(queryEmbedding, limit, { repoIds });
|
|
8999
|
-
log(
|
|
9046
|
+
log(
|
|
9047
|
+
`Found ${results2.length} results across ${Object.keys(groupResultsByRepo(results2)).length} repos`
|
|
9048
|
+
);
|
|
9000
9049
|
return { results: results2, crossRepoFallback: false };
|
|
9001
9050
|
}
|
|
9002
9051
|
if (crossRepo) {
|
|
9003
|
-
log(
|
|
9052
|
+
log(
|
|
9053
|
+
"Warning: crossRepo=true requires a cross-repo-capable backend. Falling back to single-repo search.",
|
|
9054
|
+
"warning"
|
|
9055
|
+
);
|
|
9004
9056
|
}
|
|
9005
9057
|
const results = await vectorDB.search(queryEmbedding, limit, query);
|
|
9006
9058
|
log(`Found ${results.length} results`);
|
|
@@ -9009,7 +9061,9 @@ async function executeSearch(vectorDB, queryEmbedding, params, log) {
|
|
|
9009
9061
|
function processResults(rawResults, crossRepoFallback, log) {
|
|
9010
9062
|
const notes = [];
|
|
9011
9063
|
if (crossRepoFallback) {
|
|
9012
|
-
notes.push(
|
|
9064
|
+
notes.push(
|
|
9065
|
+
"Cross-repo search requires a cross-repo-capable backend. Fell back to single-repo search."
|
|
9066
|
+
);
|
|
9013
9067
|
}
|
|
9014
9068
|
const results = deduplicateResults(rawResults);
|
|
9015
9069
|
if (results.length > 0 && results.every((r) => r.relevance === "not_relevant")) {
|
|
@@ -9021,33 +9075,32 @@ function processResults(rawResults, crossRepoFallback, log) {
|
|
|
9021
9075
|
}
|
|
9022
9076
|
async function handleSemanticSearch(args, ctx) {
|
|
9023
9077
|
const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9024
|
-
return await wrapToolHandler(
|
|
9025
|
-
|
|
9026
|
-
|
|
9027
|
-
|
|
9028
|
-
|
|
9029
|
-
|
|
9030
|
-
|
|
9031
|
-
|
|
9032
|
-
|
|
9033
|
-
|
|
9034
|
-
|
|
9035
|
-
|
|
9078
|
+
return await wrapToolHandler(SemanticSearchSchema, async (validatedArgs) => {
|
|
9079
|
+
const { crossRepo, repoIds, query, limit } = validatedArgs;
|
|
9080
|
+
log(`Searching for: "${query}"${crossRepo ? " (cross-repo)" : ""}`);
|
|
9081
|
+
await checkAndReconnect();
|
|
9082
|
+
const queryEmbedding = await embeddings.embed(query);
|
|
9083
|
+
const { results: rawResults, crossRepoFallback } = await executeSearch(
|
|
9084
|
+
vectorDB,
|
|
9085
|
+
queryEmbedding,
|
|
9086
|
+
{ query, limit: limit ?? 5, crossRepo, repoIds },
|
|
9087
|
+
log
|
|
9088
|
+
);
|
|
9089
|
+
const { results, notes } = processResults(rawResults, crossRepoFallback, log);
|
|
9090
|
+
log(`Returning ${results.length} results`);
|
|
9091
|
+
const shaped = shapeResults(results, "semantic_search");
|
|
9092
|
+
if (shaped.length === 0) {
|
|
9093
|
+
notes.push(
|
|
9094
|
+
'0 results. Try rephrasing as a full question (e.g. "How does X work?"), or use grep for exact string matches. If the codebase was recently updated, run "lien reindex".'
|
|
9036
9095
|
);
|
|
9037
|
-
const { results, notes } = processResults(rawResults, crossRepoFallback, log);
|
|
9038
|
-
log(`Returning ${results.length} results`);
|
|
9039
|
-
const shaped = shapeResults(results, "semantic_search");
|
|
9040
|
-
if (shaped.length === 0) {
|
|
9041
|
-
notes.push('0 results. Try rephrasing as a full question (e.g. "How does X work?"), or use grep for exact string matches. If the codebase was recently updated, run "lien reindex".');
|
|
9042
|
-
}
|
|
9043
|
-
return {
|
|
9044
|
-
indexInfo: getIndexMetadata(),
|
|
9045
|
-
results: shaped,
|
|
9046
|
-
...crossRepo && vectorDB instanceof QdrantDB && { groupedByRepo: groupResultsByRepo(shaped) },
|
|
9047
|
-
...notes.length > 0 && { note: notes.join(" ") }
|
|
9048
|
-
};
|
|
9049
9096
|
}
|
|
9050
|
-
|
|
9097
|
+
return {
|
|
9098
|
+
indexInfo: getIndexMetadata(),
|
|
9099
|
+
results: shaped,
|
|
9100
|
+
...crossRepo && vectorDB.supportsCrossRepo && { groupedByRepo: groupResultsByRepo(shaped) },
|
|
9101
|
+
...notes.length > 0 && { note: notes.join(" ") }
|
|
9102
|
+
};
|
|
9103
|
+
})(args);
|
|
9051
9104
|
}
|
|
9052
9105
|
|
|
9053
9106
|
// src/mcp/handlers/find-similar.ts
|
|
@@ -9066,139 +9119,52 @@ function pruneIrrelevantResults(results) {
|
|
|
9066
9119
|
}
|
|
9067
9120
|
async function handleFindSimilar(args, ctx) {
|
|
9068
9121
|
const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9069
|
-
return await wrapToolHandler(
|
|
9070
|
-
|
|
9071
|
-
|
|
9072
|
-
|
|
9073
|
-
|
|
9074
|
-
|
|
9075
|
-
|
|
9076
|
-
|
|
9077
|
-
|
|
9078
|
-
|
|
9079
|
-
|
|
9080
|
-
|
|
9081
|
-
|
|
9082
|
-
|
|
9083
|
-
|
|
9084
|
-
|
|
9085
|
-
|
|
9086
|
-
|
|
9087
|
-
|
|
9088
|
-
|
|
9089
|
-
|
|
9090
|
-
|
|
9091
|
-
|
|
9092
|
-
|
|
9093
|
-
|
|
9094
|
-
|
|
9095
|
-
|
|
9096
|
-
|
|
9097
|
-
|
|
9098
|
-
|
|
9099
|
-
|
|
9100
|
-
|
|
9101
|
-
|
|
9102
|
-
|
|
9103
|
-
|
|
9104
|
-
|
|
9105
|
-
)(args);
|
|
9106
|
-
}
|
|
9107
|
-
|
|
9108
|
-
// src/mcp/utils/path-matching.ts
|
|
9109
|
-
function normalizePath(path6, workspaceRoot) {
|
|
9110
|
-
let normalized = path6.replace(/['"]/g, "").trim().replace(/\\/g, "/");
|
|
9111
|
-
normalized = normalized.replace(/\.(ts|tsx|js|jsx|php|py)$/, "");
|
|
9112
|
-
if (normalized.startsWith(workspaceRoot + "/")) {
|
|
9113
|
-
normalized = normalized.substring(workspaceRoot.length + 1);
|
|
9114
|
-
}
|
|
9115
|
-
return normalized;
|
|
9116
|
-
}
|
|
9117
|
-
function matchesAtBoundary(str, pattern) {
|
|
9118
|
-
const index = str.indexOf(pattern);
|
|
9119
|
-
if (index === -1) return false;
|
|
9120
|
-
const charBefore = index > 0 ? str[index - 1] : "/";
|
|
9121
|
-
if (charBefore !== "/" && index !== 0) return false;
|
|
9122
|
-
const endIndex = index + pattern.length;
|
|
9123
|
-
if (endIndex === str.length) return true;
|
|
9124
|
-
const charAfter = str[endIndex];
|
|
9125
|
-
return charAfter === "/";
|
|
9126
|
-
}
|
|
9127
|
-
function matchesFile(normalizedImport, normalizedTarget) {
|
|
9128
|
-
if (normalizedImport === normalizedTarget) return true;
|
|
9129
|
-
if (matchesAtBoundary(normalizedImport, normalizedTarget)) {
|
|
9130
|
-
return true;
|
|
9131
|
-
}
|
|
9132
|
-
if (matchesAtBoundary(normalizedTarget, normalizedImport)) {
|
|
9133
|
-
return true;
|
|
9134
|
-
}
|
|
9135
|
-
const cleanedImport = normalizedImport.replace(/^(\.\.?\/)+/, "");
|
|
9136
|
-
if (matchesAtBoundary(cleanedImport, normalizedTarget) || matchesAtBoundary(normalizedTarget, cleanedImport)) {
|
|
9137
|
-
return true;
|
|
9138
|
-
}
|
|
9139
|
-
if (matchesPHPNamespace(normalizedImport, normalizedTarget)) {
|
|
9140
|
-
return true;
|
|
9141
|
-
}
|
|
9142
|
-
if (matchesPythonModule(normalizedImport, normalizedTarget)) {
|
|
9143
|
-
return true;
|
|
9144
|
-
}
|
|
9145
|
-
return false;
|
|
9146
|
-
}
|
|
9147
|
-
function matchesDirectPythonModule(moduleAsPath, targetWithoutPy) {
|
|
9148
|
-
return targetWithoutPy === moduleAsPath || targetWithoutPy === moduleAsPath + "/__init__" || targetWithoutPy.replace(/\/__init__$/, "") === moduleAsPath;
|
|
9149
|
-
}
|
|
9150
|
-
function matchesParentPythonPackage(moduleAsPath, targetWithoutPy) {
|
|
9151
|
-
return targetWithoutPy.startsWith(moduleAsPath + "/");
|
|
9152
|
-
}
|
|
9153
|
-
function matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) {
|
|
9154
|
-
return targetWithoutPy.endsWith("/" + moduleAsPath) || targetWithoutPy.endsWith("/" + moduleAsPath + "/__init__");
|
|
9155
|
-
}
|
|
9156
|
-
function matchesWithSourcePrefix(moduleAsPath, targetWithoutPy) {
|
|
9157
|
-
const moduleIndex = targetWithoutPy.indexOf(moduleAsPath);
|
|
9158
|
-
if (moduleIndex < 0) return false;
|
|
9159
|
-
const prefix = targetWithoutPy.substring(0, moduleIndex);
|
|
9160
|
-
const prefixSlashes = (prefix.match(/\//g) || []).length;
|
|
9161
|
-
return prefixSlashes <= 1 && (prefix === "" || prefix.endsWith("/"));
|
|
9162
|
-
}
|
|
9163
|
-
function matchesPythonModule(importPath, targetPath) {
|
|
9164
|
-
if (!importPath.includes(".")) {
|
|
9165
|
-
return false;
|
|
9166
|
-
}
|
|
9167
|
-
const moduleAsPath = importPath.replace(/\./g, "/");
|
|
9168
|
-
const targetWithoutPy = targetPath.replace(/\.py$/, "");
|
|
9169
|
-
return matchesDirectPythonModule(moduleAsPath, targetWithoutPy) || matchesParentPythonPackage(moduleAsPath, targetWithoutPy) || matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) || matchesWithSourcePrefix(moduleAsPath, targetWithoutPy);
|
|
9170
|
-
}
|
|
9171
|
-
function matchesPHPNamespace(importPath, targetPath) {
|
|
9172
|
-
const importComponents = importPath.split("/").filter(Boolean);
|
|
9173
|
-
const targetComponents = targetPath.split("/").filter(Boolean);
|
|
9174
|
-
if (importComponents.length === 0 || targetComponents.length === 0) {
|
|
9175
|
-
return false;
|
|
9176
|
-
}
|
|
9177
|
-
let matched = 0;
|
|
9178
|
-
for (let i = 1; i <= importComponents.length && i <= targetComponents.length; i++) {
|
|
9179
|
-
const impComp = importComponents[importComponents.length - i].toLowerCase();
|
|
9180
|
-
const targetComp = targetComponents[targetComponents.length - i].toLowerCase();
|
|
9181
|
-
if (impComp === targetComp) {
|
|
9182
|
-
matched++;
|
|
9183
|
-
} else {
|
|
9184
|
-
break;
|
|
9185
|
-
}
|
|
9186
|
-
}
|
|
9187
|
-
return matched === importComponents.length;
|
|
9188
|
-
}
|
|
9189
|
-
function getCanonicalPath(filepath, workspaceRoot) {
|
|
9190
|
-
let canonical = filepath.replace(/\\/g, "/");
|
|
9191
|
-
if (canonical.startsWith(workspaceRoot + "/")) {
|
|
9192
|
-
canonical = canonical.substring(workspaceRoot.length + 1);
|
|
9193
|
-
}
|
|
9194
|
-
return canonical;
|
|
9195
|
-
}
|
|
9196
|
-
function isTestFile(filepath) {
|
|
9197
|
-
return /\.(test|spec)\.[^/]+$/.test(filepath) || /(^|[/\\])(test|tests|__tests__)[/\\]/.test(filepath);
|
|
9122
|
+
return await wrapToolHandler(FindSimilarSchema, async (validatedArgs) => {
|
|
9123
|
+
log(`Finding similar code...`);
|
|
9124
|
+
await checkAndReconnect();
|
|
9125
|
+
const codeEmbedding = await embeddings.embed(validatedArgs.code);
|
|
9126
|
+
const limit = validatedArgs.limit ?? 5;
|
|
9127
|
+
const extraLimit = limit + 10;
|
|
9128
|
+
let results = await vectorDB.search(codeEmbedding, extraLimit, validatedArgs.code);
|
|
9129
|
+
results = deduplicateResults(results);
|
|
9130
|
+
const inputCode = validatedArgs.code.trim();
|
|
9131
|
+
results = results.filter((r) => {
|
|
9132
|
+
if (r.score >= 0.1) return true;
|
|
9133
|
+
return r.content.trim() !== inputCode;
|
|
9134
|
+
});
|
|
9135
|
+
const filtersApplied = { prunedLowRelevance: 0 };
|
|
9136
|
+
if (validatedArgs.language) {
|
|
9137
|
+
filtersApplied.language = validatedArgs.language;
|
|
9138
|
+
results = applyLanguageFilter(results, validatedArgs.language);
|
|
9139
|
+
}
|
|
9140
|
+
if (validatedArgs.pathHint) {
|
|
9141
|
+
filtersApplied.pathHint = validatedArgs.pathHint;
|
|
9142
|
+
results = applyPathHintFilter(results, validatedArgs.pathHint);
|
|
9143
|
+
}
|
|
9144
|
+
const { filtered, prunedCount } = pruneIrrelevantResults(results);
|
|
9145
|
+
filtersApplied.prunedLowRelevance = prunedCount;
|
|
9146
|
+
const finalResults = filtered.slice(0, limit);
|
|
9147
|
+
log(`Found ${finalResults.length} similar chunks`);
|
|
9148
|
+
const hasFilters = filtersApplied.language || filtersApplied.pathHint || filtersApplied.prunedLowRelevance > 0;
|
|
9149
|
+
return {
|
|
9150
|
+
indexInfo: getIndexMetadata(),
|
|
9151
|
+
results: shapeResults(finalResults, "find_similar"),
|
|
9152
|
+
...hasFilters && { filtersApplied },
|
|
9153
|
+
...finalResults.length === 0 && {
|
|
9154
|
+
note: "0 results. Ensure the code snippet is at least 24 characters and representative of the pattern. Try grep for exact string matches."
|
|
9155
|
+
}
|
|
9156
|
+
};
|
|
9157
|
+
})(args);
|
|
9198
9158
|
}
|
|
9199
9159
|
|
|
9200
9160
|
// src/mcp/handlers/get-files-context.ts
|
|
9201
|
-
import {
|
|
9161
|
+
import {
|
|
9162
|
+
normalizePath,
|
|
9163
|
+
matchesFile,
|
|
9164
|
+
getCanonicalPath,
|
|
9165
|
+
isTestFile,
|
|
9166
|
+
MAX_CHUNKS_PER_FILE
|
|
9167
|
+
} from "@liendev/core";
|
|
9202
9168
|
var SCAN_LIMIT = 1e4;
|
|
9203
9169
|
async function searchFileChunks(filepaths, ctx) {
|
|
9204
9170
|
const { vectorDB, workspaceRoot } = ctx;
|
|
@@ -9228,10 +9194,7 @@ async function findRelatedChunks(filepaths, fileChunksMap, ctx) {
|
|
|
9228
9194
|
(embedding, i) => vectorDB.search(embedding, 5, filesWithChunks[i].chunks[0].content)
|
|
9229
9195
|
)
|
|
9230
9196
|
);
|
|
9231
|
-
const relatedChunksMap = Array.from(
|
|
9232
|
-
{ length: filepaths.length },
|
|
9233
|
-
() => []
|
|
9234
|
-
);
|
|
9197
|
+
const relatedChunksMap = Array.from({ length: filepaths.length }, () => []);
|
|
9235
9198
|
filesWithChunks.forEach(({ filepath, index }, i) => {
|
|
9236
9199
|
const related = relatedSearches[i];
|
|
9237
9200
|
const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
|
|
@@ -9246,10 +9209,10 @@ async function findRelatedChunks(filepaths, fileChunksMap, ctx) {
|
|
|
9246
9209
|
}
|
|
9247
9210
|
function createPathCache(workspaceRoot) {
|
|
9248
9211
|
const cache = /* @__PURE__ */ new Map();
|
|
9249
|
-
const normalize = (
|
|
9250
|
-
if (cache.has(
|
|
9251
|
-
const normalized = normalizePath(
|
|
9252
|
-
cache.set(
|
|
9212
|
+
const normalize = (path7) => {
|
|
9213
|
+
if (cache.has(path7)) return cache.get(path7);
|
|
9214
|
+
const normalized = normalizePath(path7, workspaceRoot);
|
|
9215
|
+
cache.set(path7, normalized);
|
|
9253
9216
|
return normalized;
|
|
9254
9217
|
};
|
|
9255
9218
|
return { normalize, cache };
|
|
@@ -9281,10 +9244,7 @@ function deduplicateChunks(fileChunks, relatedChunks) {
|
|
|
9281
9244
|
function buildFilesData(filepaths, fileChunksMap, relatedChunksMap, testAssociationsMap) {
|
|
9282
9245
|
const filesData = {};
|
|
9283
9246
|
filepaths.forEach((filepath, i) => {
|
|
9284
|
-
const dedupedChunks = deduplicateChunks(
|
|
9285
|
-
fileChunksMap[i],
|
|
9286
|
-
relatedChunksMap[i] || []
|
|
9287
|
-
);
|
|
9247
|
+
const dedupedChunks = deduplicateChunks(fileChunksMap[i], relatedChunksMap[i] || []);
|
|
9288
9248
|
filesData[filepath] = {
|
|
9289
9249
|
chunks: dedupedChunks,
|
|
9290
9250
|
testAssociations: testAssociationsMap[i]
|
|
@@ -9321,61 +9281,48 @@ function buildMultiFileResponse(filesData, indexInfo, note) {
|
|
|
9321
9281
|
}
|
|
9322
9282
|
async function handleGetFilesContext(args, ctx) {
|
|
9323
9283
|
const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9324
|
-
return await wrapToolHandler(
|
|
9325
|
-
|
|
9326
|
-
|
|
9327
|
-
|
|
9328
|
-
|
|
9329
|
-
|
|
9330
|
-
|
|
9331
|
-
|
|
9332
|
-
|
|
9333
|
-
|
|
9334
|
-
|
|
9335
|
-
|
|
9336
|
-
|
|
9337
|
-
|
|
9338
|
-
|
|
9339
|
-
|
|
9340
|
-
|
|
9341
|
-
|
|
9342
|
-
|
|
9343
|
-
|
|
9344
|
-
|
|
9345
|
-
)
|
|
9346
|
-
|
|
9347
|
-
const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
|
|
9348
|
-
const hitScanLimit = allChunks.length === SCAN_LIMIT;
|
|
9349
|
-
if (hitScanLimit) {
|
|
9350
|
-
log(
|
|
9351
|
-
`Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`,
|
|
9352
|
-
"warning"
|
|
9353
|
-
);
|
|
9354
|
-
}
|
|
9355
|
-
const testAssociationsMap = findTestAssociations(
|
|
9356
|
-
filepaths,
|
|
9357
|
-
allChunks,
|
|
9358
|
-
handlerCtx
|
|
9359
|
-
);
|
|
9360
|
-
const filesData = buildFilesData(
|
|
9361
|
-
filepaths,
|
|
9362
|
-
fileChunksMap,
|
|
9363
|
-
relatedChunksMap,
|
|
9364
|
-
testAssociationsMap
|
|
9365
|
-
);
|
|
9366
|
-
const totalChunks = Object.values(filesData).reduce(
|
|
9367
|
-
(sum, f) => sum + f.chunks.length,
|
|
9368
|
-
0
|
|
9284
|
+
return await wrapToolHandler(GetFilesContextSchema, async (validatedArgs) => {
|
|
9285
|
+
const filepaths = Array.isArray(validatedArgs.filepaths) ? validatedArgs.filepaths : [validatedArgs.filepaths];
|
|
9286
|
+
const isSingleFile = !Array.isArray(validatedArgs.filepaths);
|
|
9287
|
+
log(`Getting context for: ${filepaths.join(", ")}`);
|
|
9288
|
+
await checkAndReconnect();
|
|
9289
|
+
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
9290
|
+
const handlerCtx = {
|
|
9291
|
+
vectorDB,
|
|
9292
|
+
embeddings,
|
|
9293
|
+
log,
|
|
9294
|
+
workspaceRoot
|
|
9295
|
+
};
|
|
9296
|
+
const fileChunksMap = await searchFileChunks(filepaths, handlerCtx);
|
|
9297
|
+
let relatedChunksMap = [];
|
|
9298
|
+
if (validatedArgs.includeRelated !== false) {
|
|
9299
|
+
relatedChunksMap = await findRelatedChunks(filepaths, fileChunksMap, handlerCtx);
|
|
9300
|
+
}
|
|
9301
|
+
const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
|
|
9302
|
+
const hitScanLimit = allChunks.length === SCAN_LIMIT;
|
|
9303
|
+
if (hitScanLimit) {
|
|
9304
|
+
log(
|
|
9305
|
+
`Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`,
|
|
9306
|
+
"warning"
|
|
9369
9307
|
);
|
|
9370
|
-
log(`Found ${totalChunks} total chunks`);
|
|
9371
|
-
const note = buildScanLimitNote(hitScanLimit);
|
|
9372
|
-
const indexInfo = getIndexMetadata();
|
|
9373
|
-
return isSingleFile ? buildSingleFileResponse(filepaths[0], filesData, indexInfo, note) : buildMultiFileResponse(filesData, indexInfo, note);
|
|
9374
9308
|
}
|
|
9375
|
-
|
|
9309
|
+
const testAssociationsMap = findTestAssociations(filepaths, allChunks, handlerCtx);
|
|
9310
|
+
const filesData = buildFilesData(
|
|
9311
|
+
filepaths,
|
|
9312
|
+
fileChunksMap,
|
|
9313
|
+
relatedChunksMap,
|
|
9314
|
+
testAssociationsMap
|
|
9315
|
+
);
|
|
9316
|
+
const totalChunks = Object.values(filesData).reduce((sum, f) => sum + f.chunks.length, 0);
|
|
9317
|
+
log(`Found ${totalChunks} total chunks`);
|
|
9318
|
+
const note = buildScanLimitNote(hitScanLimit);
|
|
9319
|
+
const indexInfo = getIndexMetadata();
|
|
9320
|
+
return isSingleFile ? buildSingleFileResponse(filepaths[0], filesData, indexInfo, note) : buildMultiFileResponse(filesData, indexInfo, note);
|
|
9321
|
+
})(args);
|
|
9376
9322
|
}
|
|
9377
9323
|
|
|
9378
9324
|
// src/mcp/handlers/list-functions.ts
|
|
9325
|
+
import { safeRegex } from "@liendev/core";
|
|
9379
9326
|
async function performContentScan(vectorDB, args, fetchLimit, log) {
|
|
9380
9327
|
log("Falling back to content scan...");
|
|
9381
9328
|
let results = await vectorDB.scanWithFilter({
|
|
@@ -9384,11 +9331,15 @@ async function performContentScan(vectorDB, args, fetchLimit, log) {
|
|
|
9384
9331
|
limit: fetchLimit
|
|
9385
9332
|
});
|
|
9386
9333
|
if (args.pattern) {
|
|
9387
|
-
const regex =
|
|
9388
|
-
|
|
9389
|
-
|
|
9390
|
-
|
|
9391
|
-
|
|
9334
|
+
const regex = safeRegex(args.pattern);
|
|
9335
|
+
if (regex) {
|
|
9336
|
+
results = results.filter((r) => {
|
|
9337
|
+
const symbolName = r.metadata?.symbolName;
|
|
9338
|
+
return symbolName && regex.test(symbolName);
|
|
9339
|
+
});
|
|
9340
|
+
} else {
|
|
9341
|
+
results = results.filter((r) => !!r.metadata?.symbolName);
|
|
9342
|
+
}
|
|
9392
9343
|
}
|
|
9393
9344
|
return {
|
|
9394
9345
|
results,
|
|
@@ -9425,44 +9376,51 @@ function paginateResults(results, offset, limit) {
|
|
|
9425
9376
|
}
|
|
9426
9377
|
async function handleListFunctions(args, ctx) {
|
|
9427
9378
|
const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9428
|
-
return await wrapToolHandler(
|
|
9429
|
-
|
|
9430
|
-
|
|
9431
|
-
|
|
9432
|
-
|
|
9433
|
-
|
|
9434
|
-
|
|
9435
|
-
|
|
9436
|
-
|
|
9437
|
-
|
|
9438
|
-
|
|
9439
|
-
|
|
9440
|
-
|
|
9441
|
-
|
|
9442
|
-
|
|
9443
|
-
|
|
9444
|
-
|
|
9445
|
-
|
|
9446
|
-
|
|
9447
|
-
|
|
9448
|
-
|
|
9449
|
-
|
|
9450
|
-
method: queryResult.method,
|
|
9451
|
-
hasMore,
|
|
9452
|
-
...nextOffset !== void 0 ? { nextOffset } : {},
|
|
9453
|
-
results: shapeResults(paginatedResults, "list_functions"),
|
|
9454
|
-
...notes.length > 0 && { note: notes.join(" ") }
|
|
9455
|
-
};
|
|
9379
|
+
return await wrapToolHandler(ListFunctionsSchema, async (validatedArgs) => {
|
|
9380
|
+
log("Listing functions with symbol metadata...");
|
|
9381
|
+
await checkAndReconnect();
|
|
9382
|
+
const limit = validatedArgs.limit ?? 50;
|
|
9383
|
+
const offset = validatedArgs.offset ?? 0;
|
|
9384
|
+
const fetchLimit = limit + offset + 1;
|
|
9385
|
+
const queryResult = await queryWithFallback(vectorDB, validatedArgs, fetchLimit, log);
|
|
9386
|
+
const { paginatedResults, hasMore, nextOffset } = paginateResults(
|
|
9387
|
+
queryResult.results,
|
|
9388
|
+
offset,
|
|
9389
|
+
limit
|
|
9390
|
+
);
|
|
9391
|
+
log(`Found ${paginatedResults.length} matches using ${queryResult.method} method`);
|
|
9392
|
+
const notes = [];
|
|
9393
|
+
if (queryResult.results.length === 0) {
|
|
9394
|
+
notes.push(
|
|
9395
|
+
'0 results. Try a broader regex pattern (e.g. ".*") or omit the symbolType filter. Use semantic_search for behavior-based queries.'
|
|
9396
|
+
);
|
|
9397
|
+
} else if (paginatedResults.length === 0 && offset > 0) {
|
|
9398
|
+
notes.push(
|
|
9399
|
+
"No results for this page. The offset is beyond the available results; try reducing or resetting the offset to 0."
|
|
9400
|
+
);
|
|
9456
9401
|
}
|
|
9457
|
-
|
|
9402
|
+
if (queryResult.method === "content") {
|
|
9403
|
+
notes.push('Using content search. Run "lien reindex" to enable faster symbol-based queries.');
|
|
9404
|
+
}
|
|
9405
|
+
return {
|
|
9406
|
+
indexInfo: getIndexMetadata(),
|
|
9407
|
+
method: queryResult.method,
|
|
9408
|
+
hasMore,
|
|
9409
|
+
...nextOffset !== void 0 ? { nextOffset } : {},
|
|
9410
|
+
results: shapeResults(paginatedResults, "list_functions"),
|
|
9411
|
+
...notes.length > 0 && { note: notes.join(" ") }
|
|
9412
|
+
};
|
|
9413
|
+
})(args);
|
|
9458
9414
|
}
|
|
9459
9415
|
|
|
9460
|
-
// src/mcp/handlers/get-dependents.ts
|
|
9461
|
-
import { QdrantDB as QdrantDB3 } from "@liendev/core";
|
|
9462
|
-
|
|
9463
9416
|
// src/mcp/handlers/dependency-analyzer.ts
|
|
9464
|
-
import {
|
|
9465
|
-
|
|
9417
|
+
import {
|
|
9418
|
+
findTransitiveDependents,
|
|
9419
|
+
normalizePath as normalizePath2,
|
|
9420
|
+
matchesFile as matchesFile2,
|
|
9421
|
+
getCanonicalPath as getCanonicalPath2,
|
|
9422
|
+
isTestFile as isTestFile2
|
|
9423
|
+
} from "@liendev/core";
|
|
9466
9424
|
var COMPLEXITY_THRESHOLDS = {
|
|
9467
9425
|
HIGH_COMPLEXITY_DEPENDENT: 10,
|
|
9468
9426
|
// Individual file is complex
|
|
@@ -9479,81 +9437,283 @@ var COMPLEXITY_THRESHOLDS = {
|
|
|
9479
9437
|
MEDIUM_MAX: 15
|
|
9480
9438
|
// Occasional branching
|
|
9481
9439
|
};
|
|
9482
|
-
|
|
9483
|
-
|
|
9484
|
-
|
|
9485
|
-
|
|
9486
|
-
|
|
9487
|
-
if (
|
|
9488
|
-
|
|
9440
|
+
var scanCache = null;
|
|
9441
|
+
function collectNamedSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9442
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9443
|
+
if (!importedSymbols || typeof importedSymbols !== "object") return;
|
|
9444
|
+
for (const [importPath, syms] of Object.entries(importedSymbols)) {
|
|
9445
|
+
if (matchesFile2(normalizePathCached(importPath), normalizedTarget)) {
|
|
9446
|
+
for (const sym of syms) symbols.add(sym);
|
|
9489
9447
|
}
|
|
9490
|
-
allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT2 });
|
|
9491
9448
|
}
|
|
9492
|
-
|
|
9493
|
-
|
|
9494
|
-
|
|
9449
|
+
}
|
|
9450
|
+
function collectRawImportSentinel(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9451
|
+
const imports = chunk.metadata.imports || [];
|
|
9452
|
+
for (const imp of imports) {
|
|
9453
|
+
if (matchesFile2(normalizePathCached(imp), normalizedTarget)) symbols.add("*");
|
|
9454
|
+
}
|
|
9455
|
+
}
|
|
9456
|
+
function collectSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9457
|
+
collectNamedSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9458
|
+
collectRawImportSentinel(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9459
|
+
}
|
|
9460
|
+
function collectImportedSymbolsFromTarget(chunks, normalizedTarget, normalizePathCached) {
|
|
9461
|
+
const symbols = /* @__PURE__ */ new Set();
|
|
9462
|
+
for (const chunk of chunks) {
|
|
9463
|
+
collectSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9464
|
+
}
|
|
9465
|
+
return symbols;
|
|
9466
|
+
}
|
|
9467
|
+
function collectExportsFromChunks(chunks) {
|
|
9468
|
+
const allExports = /* @__PURE__ */ new Set();
|
|
9469
|
+
for (const chunk of chunks) {
|
|
9470
|
+
for (const exp of chunk.metadata.exports || []) allExports.add(exp);
|
|
9471
|
+
}
|
|
9472
|
+
return allExports;
|
|
9473
|
+
}
|
|
9474
|
+
function findReExportedSymbols(importsFromTarget, allExports) {
|
|
9475
|
+
if (importsFromTarget.has("*")) return [...allExports];
|
|
9476
|
+
for (const sym of importsFromTarget) {
|
|
9477
|
+
if (sym.startsWith("* as ")) return [...allExports];
|
|
9478
|
+
}
|
|
9479
|
+
const reExported = [];
|
|
9480
|
+
for (const sym of importsFromTarget) {
|
|
9481
|
+
if (allExports.has(sym)) reExported.push(sym);
|
|
9482
|
+
}
|
|
9483
|
+
return reExported;
|
|
9484
|
+
}
|
|
9485
|
+
function buildReExportGraph(allChunksByFile, normalizedTarget, normalizePathCached) {
|
|
9486
|
+
const reExporters = [];
|
|
9487
|
+
for (const [filepath, chunks] of allChunksByFile.entries()) {
|
|
9488
|
+
if (matchesFile2(filepath, normalizedTarget)) continue;
|
|
9489
|
+
const importsFromTarget = collectImportedSymbolsFromTarget(
|
|
9490
|
+
chunks,
|
|
9491
|
+
normalizedTarget,
|
|
9492
|
+
normalizePathCached
|
|
9493
|
+
);
|
|
9494
|
+
const allExports = collectExportsFromChunks(chunks);
|
|
9495
|
+
if (importsFromTarget.size === 0 || allExports.size === 0) continue;
|
|
9496
|
+
const reExportedSymbols = findReExportedSymbols(importsFromTarget, allExports);
|
|
9497
|
+
if (reExportedSymbols.length > 0) {
|
|
9498
|
+
reExporters.push({ filepath, reExportedSymbols });
|
|
9499
|
+
}
|
|
9500
|
+
}
|
|
9501
|
+
return reExporters;
|
|
9502
|
+
}
|
|
9503
|
+
function fileImportsSymbolFromAny(chunks, targetSymbol, targetPaths, normalizePathCached) {
|
|
9504
|
+
return chunks.some((chunk) => {
|
|
9505
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9506
|
+
if (!importedSymbols) return false;
|
|
9507
|
+
for (const [importPath, symbols] of Object.entries(importedSymbols)) {
|
|
9508
|
+
const normalizedImport = normalizePathCached(importPath);
|
|
9509
|
+
const matchesAny = targetPaths.some((tp) => matchesFile2(normalizedImport, tp));
|
|
9510
|
+
if (matchesAny) {
|
|
9511
|
+
if (symbols.includes(targetSymbol)) return true;
|
|
9512
|
+
if (symbols.some((s) => s.startsWith("* as "))) return true;
|
|
9513
|
+
}
|
|
9514
|
+
}
|
|
9515
|
+
return false;
|
|
9516
|
+
});
|
|
9517
|
+
}
|
|
9518
|
+
function addChunkToImportIndex(chunk, normalizePathCached, importIndex) {
|
|
9519
|
+
const imports = chunk.metadata.imports || [];
|
|
9520
|
+
for (const imp of imports) {
|
|
9521
|
+
const normalizedImport = normalizePathCached(imp);
|
|
9522
|
+
if (!importIndex.has(normalizedImport)) {
|
|
9523
|
+
importIndex.set(normalizedImport, []);
|
|
9524
|
+
}
|
|
9525
|
+
importIndex.get(normalizedImport).push(chunk);
|
|
9526
|
+
}
|
|
9527
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9528
|
+
if (importedSymbols && typeof importedSymbols === "object") {
|
|
9529
|
+
for (const modulePath of Object.keys(importedSymbols)) {
|
|
9530
|
+
const normalizedImport = normalizePathCached(modulePath);
|
|
9531
|
+
if (!importIndex.has(normalizedImport)) {
|
|
9532
|
+
importIndex.set(normalizedImport, []);
|
|
9533
|
+
}
|
|
9534
|
+
importIndex.get(normalizedImport).push(chunk);
|
|
9535
|
+
}
|
|
9536
|
+
}
|
|
9537
|
+
}
|
|
9538
|
+
function addChunkToFileMap(chunk, normalizePathCached, fileMap, seenRanges) {
|
|
9539
|
+
const canonical = normalizePathCached(chunk.metadata.file);
|
|
9540
|
+
if (!fileMap.has(canonical)) {
|
|
9541
|
+
fileMap.set(canonical, []);
|
|
9542
|
+
seenRanges.set(canonical, /* @__PURE__ */ new Set());
|
|
9543
|
+
}
|
|
9544
|
+
const rangeKey = `${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
|
|
9545
|
+
const seen = seenRanges.get(canonical);
|
|
9546
|
+
if (seen.has(rangeKey)) return;
|
|
9547
|
+
seen.add(rangeKey);
|
|
9548
|
+
fileMap.get(canonical).push(chunk);
|
|
9549
|
+
}
|
|
9550
|
+
async function scanChunksPaginated(vectorDB, crossRepo, log, normalizePathCached) {
|
|
9551
|
+
const importIndex = /* @__PURE__ */ new Map();
|
|
9552
|
+
const allChunksByFile = /* @__PURE__ */ new Map();
|
|
9553
|
+
const seenRanges = /* @__PURE__ */ new Map();
|
|
9554
|
+
let totalChunks = 0;
|
|
9555
|
+
if (crossRepo && vectorDB.supportsCrossRepo) {
|
|
9556
|
+
const CROSS_REPO_LIMIT = 1e5;
|
|
9557
|
+
const allChunks = await vectorDB.scanCrossRepo({ limit: CROSS_REPO_LIMIT });
|
|
9558
|
+
totalChunks = allChunks.length;
|
|
9559
|
+
const hitLimit = totalChunks >= CROSS_REPO_LIMIT;
|
|
9560
|
+
if (hitLimit) {
|
|
9561
|
+
log(
|
|
9562
|
+
`Warning: cross-repo scan hit ${CROSS_REPO_LIMIT} chunk limit. Results may be incomplete.`,
|
|
9563
|
+
"warning"
|
|
9564
|
+
);
|
|
9565
|
+
}
|
|
9566
|
+
for (const chunk of allChunks) {
|
|
9567
|
+
addChunkToImportIndex(chunk, normalizePathCached, importIndex);
|
|
9568
|
+
addChunkToFileMap(chunk, normalizePathCached, allChunksByFile, seenRanges);
|
|
9569
|
+
}
|
|
9570
|
+
return { importIndex, allChunksByFile, totalChunks, hitLimit };
|
|
9571
|
+
}
|
|
9572
|
+
if (crossRepo) {
|
|
9573
|
+
log(
|
|
9574
|
+
"Warning: crossRepo=true requires a cross-repo-capable backend. Falling back to single-repo paginated scan.",
|
|
9575
|
+
"warning"
|
|
9576
|
+
);
|
|
9577
|
+
}
|
|
9578
|
+
for await (const page of vectorDB.scanPaginated({ pageSize: 1e3 })) {
|
|
9579
|
+
totalChunks += page.length;
|
|
9580
|
+
for (const chunk of page) {
|
|
9581
|
+
addChunkToImportIndex(chunk, normalizePathCached, importIndex);
|
|
9582
|
+
addChunkToFileMap(chunk, normalizePathCached, allChunksByFile, seenRanges);
|
|
9583
|
+
}
|
|
9495
9584
|
}
|
|
9496
|
-
return {
|
|
9585
|
+
return { importIndex, allChunksByFile, totalChunks, hitLimit: false };
|
|
9497
9586
|
}
|
|
9498
9587
|
function createPathNormalizer() {
|
|
9499
9588
|
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
9500
9589
|
const cache = /* @__PURE__ */ new Map();
|
|
9501
|
-
return (
|
|
9502
|
-
if (!cache.has(
|
|
9503
|
-
cache.set(
|
|
9590
|
+
return (path7) => {
|
|
9591
|
+
if (!cache.has(path7)) {
|
|
9592
|
+
cache.set(path7, normalizePath2(path7, workspaceRoot));
|
|
9504
9593
|
}
|
|
9505
|
-
return cache.get(
|
|
9594
|
+
return cache.get(path7);
|
|
9506
9595
|
};
|
|
9507
9596
|
}
|
|
9508
9597
|
function groupChunksByFile(chunks) {
|
|
9509
9598
|
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
9510
9599
|
const chunksByFile = /* @__PURE__ */ new Map();
|
|
9511
9600
|
for (const chunk of chunks) {
|
|
9512
|
-
const canonical =
|
|
9601
|
+
const canonical = getCanonicalPath2(chunk.metadata.file, workspaceRoot);
|
|
9513
9602
|
const existing = chunksByFile.get(canonical) || [];
|
|
9514
9603
|
existing.push(chunk);
|
|
9515
9604
|
chunksByFile.set(canonical, existing);
|
|
9516
9605
|
}
|
|
9517
9606
|
return chunksByFile;
|
|
9518
9607
|
}
|
|
9519
|
-
function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached,
|
|
9608
|
+
function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached, targetFileChunks, filepath, log, reExporterPaths = []) {
|
|
9520
9609
|
if (symbol) {
|
|
9521
|
-
validateSymbolExport(
|
|
9522
|
-
return findSymbolUsages(
|
|
9610
|
+
validateSymbolExport(targetFileChunks, symbol, filepath, log);
|
|
9611
|
+
return findSymbolUsages(
|
|
9612
|
+
chunksByFile,
|
|
9613
|
+
symbol,
|
|
9614
|
+
normalizedTarget,
|
|
9615
|
+
normalizePathCached,
|
|
9616
|
+
reExporterPaths
|
|
9617
|
+
);
|
|
9523
9618
|
}
|
|
9524
9619
|
const dependents = Array.from(chunksByFile.keys()).map((fp) => ({
|
|
9525
9620
|
filepath: fp,
|
|
9526
|
-
isTestFile:
|
|
9621
|
+
isTestFile: isTestFile2(fp)
|
|
9527
9622
|
}));
|
|
9528
9623
|
return { dependents, totalUsageCount: void 0 };
|
|
9529
9624
|
}
|
|
9530
|
-
function validateSymbolExport(
|
|
9531
|
-
const
|
|
9532
|
-
|
|
9533
|
-
return matchesFile(chunkFile, normalizedTarget) && chunk.metadata.exports?.includes(symbol);
|
|
9534
|
-
});
|
|
9535
|
-
if (!targetFileExportsSymbol) {
|
|
9625
|
+
function validateSymbolExport(targetFileChunks, symbol, filepath, log) {
|
|
9626
|
+
const exportsSymbol = targetFileChunks.some((chunk) => chunk.metadata.exports?.includes(symbol));
|
|
9627
|
+
if (!exportsSymbol) {
|
|
9536
9628
|
log(`Warning: Symbol "${symbol}" not found in exports of ${filepath}`, "warning");
|
|
9537
9629
|
}
|
|
9538
9630
|
}
|
|
9539
|
-
|
|
9540
|
-
const
|
|
9541
|
-
|
|
9631
|
+
function mergeChunksByFile(target, source) {
|
|
9632
|
+
for (const [fp, chunks] of source.entries()) {
|
|
9633
|
+
const existing = target.get(fp);
|
|
9634
|
+
if (existing) {
|
|
9635
|
+
existing.push(...chunks);
|
|
9636
|
+
} else {
|
|
9637
|
+
target.set(fp, chunks);
|
|
9638
|
+
}
|
|
9639
|
+
}
|
|
9640
|
+
}
|
|
9641
|
+
function mergeTransitiveDependents(reExporters, importIndex, normalizedTarget, normalizePathCached, allChunksByFile, chunksByFile, log) {
|
|
9642
|
+
const existingFiles = new Set(chunksByFile.keys());
|
|
9643
|
+
const transitiveChunks = findTransitiveDependents(
|
|
9644
|
+
reExporters.map((r) => r.filepath),
|
|
9645
|
+
importIndex,
|
|
9646
|
+
normalizedTarget,
|
|
9647
|
+
normalizePathCached,
|
|
9648
|
+
allChunksByFile,
|
|
9649
|
+
existingFiles
|
|
9650
|
+
);
|
|
9651
|
+
if (transitiveChunks.length > 0) {
|
|
9652
|
+
const transitiveByFile = groupChunksByFile(transitiveChunks);
|
|
9653
|
+
mergeChunksByFile(chunksByFile, transitiveByFile);
|
|
9654
|
+
log(`Found ${transitiveByFile.size} additional dependents via re-export chains`);
|
|
9655
|
+
}
|
|
9656
|
+
}
|
|
9657
|
+
async function getOrScanChunks(vectorDB, crossRepo, log, normalizePathCached, indexVersion) {
|
|
9658
|
+
if (indexVersion !== void 0 && scanCache !== null && scanCache.indexVersion === indexVersion && scanCache.crossRepo === crossRepo) {
|
|
9659
|
+
log(`Using cached import index (${scanCache.totalChunks} chunks, version ${indexVersion})`);
|
|
9660
|
+
return scanCache;
|
|
9661
|
+
}
|
|
9662
|
+
const scanResult = await scanChunksPaginated(vectorDB, crossRepo, log, normalizePathCached);
|
|
9663
|
+
if (indexVersion !== void 0) {
|
|
9664
|
+
scanCache = { indexVersion, crossRepo, ...scanResult };
|
|
9665
|
+
}
|
|
9666
|
+
log(`Scanned ${scanResult.totalChunks} chunks for imports...`);
|
|
9667
|
+
return scanResult;
|
|
9668
|
+
}
|
|
9669
|
+
function resolveTransitiveDependents(allChunksByFile, normalizedTarget, normalizePathCached, importIndex, chunksByFile, log) {
|
|
9670
|
+
const reExporters = buildReExportGraph(allChunksByFile, normalizedTarget, normalizePathCached);
|
|
9671
|
+
if (reExporters.length > 0) {
|
|
9672
|
+
mergeTransitiveDependents(
|
|
9673
|
+
reExporters,
|
|
9674
|
+
importIndex,
|
|
9675
|
+
normalizedTarget,
|
|
9676
|
+
normalizePathCached,
|
|
9677
|
+
allChunksByFile,
|
|
9678
|
+
chunksByFile,
|
|
9679
|
+
log
|
|
9680
|
+
);
|
|
9681
|
+
}
|
|
9682
|
+
return reExporters;
|
|
9683
|
+
}
|
|
9684
|
+
async function findDependents(vectorDB, filepath, crossRepo, log, symbol, indexVersion) {
|
|
9542
9685
|
const normalizePathCached = createPathNormalizer();
|
|
9543
9686
|
const normalizedTarget = normalizePathCached(filepath);
|
|
9544
|
-
const importIndex =
|
|
9687
|
+
const { importIndex, allChunksByFile, hitLimit } = await getOrScanChunks(
|
|
9688
|
+
vectorDB,
|
|
9689
|
+
crossRepo,
|
|
9690
|
+
log,
|
|
9691
|
+
normalizePathCached,
|
|
9692
|
+
indexVersion
|
|
9693
|
+
);
|
|
9545
9694
|
const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
|
|
9546
9695
|
const chunksByFile = groupChunksByFile(dependentChunks);
|
|
9696
|
+
const reExporters = resolveTransitiveDependents(
|
|
9697
|
+
allChunksByFile,
|
|
9698
|
+
normalizedTarget,
|
|
9699
|
+
normalizePathCached,
|
|
9700
|
+
importIndex,
|
|
9701
|
+
chunksByFile,
|
|
9702
|
+
log
|
|
9703
|
+
);
|
|
9547
9704
|
const fileComplexities = calculateFileComplexities(chunksByFile);
|
|
9548
9705
|
const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
|
|
9706
|
+
const targetFileChunks = symbol ? allChunksByFile.get(normalizedTarget) ?? [] : [];
|
|
9707
|
+
const reExporterPaths = reExporters.map((re) => re.filepath);
|
|
9549
9708
|
const { dependents, totalUsageCount } = buildDependentsList(
|
|
9550
9709
|
chunksByFile,
|
|
9551
9710
|
symbol,
|
|
9552
9711
|
normalizedTarget,
|
|
9553
9712
|
normalizePathCached,
|
|
9554
|
-
|
|
9713
|
+
targetFileChunks,
|
|
9555
9714
|
filepath,
|
|
9556
|
-
log
|
|
9715
|
+
log,
|
|
9716
|
+
reExporterPaths
|
|
9557
9717
|
);
|
|
9558
9718
|
dependents.sort((a, b) => {
|
|
9559
9719
|
if (a.isTestFile === b.isTestFile) return 0;
|
|
@@ -9561,6 +9721,7 @@ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
|
9561
9721
|
});
|
|
9562
9722
|
const testDependentCount = dependents.filter((f) => f.isTestFile).length;
|
|
9563
9723
|
const productionDependentCount = dependents.length - testDependentCount;
|
|
9724
|
+
const allChunks = crossRepo ? Array.from(allChunksByFile.values()).flat() : [];
|
|
9564
9725
|
return {
|
|
9565
9726
|
dependents,
|
|
9566
9727
|
productionDependentCount,
|
|
@@ -9573,29 +9734,6 @@ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
|
9573
9734
|
totalUsageCount
|
|
9574
9735
|
};
|
|
9575
9736
|
}
|
|
9576
|
-
function buildImportIndex(allChunks, normalizePathCached) {
|
|
9577
|
-
const importIndex = /* @__PURE__ */ new Map();
|
|
9578
|
-
const addToIndex = (importPath, chunk) => {
|
|
9579
|
-
const normalizedImport = normalizePathCached(importPath);
|
|
9580
|
-
if (!importIndex.has(normalizedImport)) {
|
|
9581
|
-
importIndex.set(normalizedImport, []);
|
|
9582
|
-
}
|
|
9583
|
-
importIndex.get(normalizedImport).push(chunk);
|
|
9584
|
-
};
|
|
9585
|
-
for (const chunk of allChunks) {
|
|
9586
|
-
const imports = chunk.metadata.imports || [];
|
|
9587
|
-
for (const imp of imports) {
|
|
9588
|
-
addToIndex(imp, chunk);
|
|
9589
|
-
}
|
|
9590
|
-
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9591
|
-
if (importedSymbols && typeof importedSymbols === "object") {
|
|
9592
|
-
for (const modulePath of Object.keys(importedSymbols)) {
|
|
9593
|
-
addToIndex(modulePath, chunk);
|
|
9594
|
-
}
|
|
9595
|
-
}
|
|
9596
|
-
}
|
|
9597
|
-
return importIndex;
|
|
9598
|
-
}
|
|
9599
9737
|
function findDependentChunks(importIndex, normalizedTarget) {
|
|
9600
9738
|
const dependentChunks = [];
|
|
9601
9739
|
const seenChunkIds = /* @__PURE__ */ new Set();
|
|
@@ -9612,7 +9750,7 @@ function findDependentChunks(importIndex, normalizedTarget) {
|
|
|
9612
9750
|
}
|
|
9613
9751
|
}
|
|
9614
9752
|
for (const [normalizedImport, chunks] of importIndex.entries()) {
|
|
9615
|
-
if (normalizedImport !== normalizedTarget &&
|
|
9753
|
+
if (normalizedImport !== normalizedTarget && matchesFile2(normalizedImport, normalizedTarget)) {
|
|
9616
9754
|
for (const chunk of chunks) {
|
|
9617
9755
|
addChunk(chunk);
|
|
9618
9756
|
}
|
|
@@ -9651,7 +9789,11 @@ function calculateOverallComplexityMetrics(fileComplexities) {
|
|
|
9651
9789
|
const allMaxes = fileComplexities.map((f) => f.maxComplexity);
|
|
9652
9790
|
const totalAvg = allAvgs.reduce((a, b) => a + b, 0) / allAvgs.length;
|
|
9653
9791
|
const globalMax = Math.max(...allMaxes);
|
|
9654
|
-
const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({
|
|
9792
|
+
const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({
|
|
9793
|
+
filepath: f.filepath,
|
|
9794
|
+
maxComplexity: f.maxComplexity,
|
|
9795
|
+
avgComplexity: f.avgComplexity
|
|
9796
|
+
}));
|
|
9655
9797
|
const complexityRiskBoost = calculateComplexityRiskBoost(totalAvg, globalMax);
|
|
9656
9798
|
return {
|
|
9657
9799
|
averageComplexity: Math.round(totalAvg * 10) / 10,
|
|
@@ -9713,37 +9855,24 @@ function groupDependentsByRepo(dependents, chunks) {
|
|
|
9713
9855
|
}
|
|
9714
9856
|
return grouped;
|
|
9715
9857
|
}
|
|
9716
|
-
function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9858
|
+
function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached, reExporterPaths = []) {
|
|
9717
9859
|
const dependents = [];
|
|
9718
9860
|
let totalUsageCount = 0;
|
|
9861
|
+
const allTargetPaths = [normalizedTarget, ...reExporterPaths];
|
|
9719
9862
|
for (const [filepath, chunks] of chunksByFile.entries()) {
|
|
9720
|
-
if (!
|
|
9863
|
+
if (!fileImportsSymbolFromAny(chunks, targetSymbol, allTargetPaths, normalizePathCached)) {
|
|
9721
9864
|
continue;
|
|
9722
9865
|
}
|
|
9723
9866
|
const usages = extractSymbolUsagesFromChunks(chunks, targetSymbol);
|
|
9724
9867
|
dependents.push({
|
|
9725
9868
|
filepath,
|
|
9726
|
-
isTestFile:
|
|
9869
|
+
isTestFile: isTestFile2(filepath),
|
|
9727
9870
|
usages: usages.length > 0 ? usages : void 0
|
|
9728
9871
|
});
|
|
9729
9872
|
totalUsageCount += usages.length;
|
|
9730
9873
|
}
|
|
9731
9874
|
return { dependents, totalUsageCount };
|
|
9732
9875
|
}
|
|
9733
|
-
function fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9734
|
-
return chunks.some((chunk) => {
|
|
9735
|
-
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9736
|
-
if (!importedSymbols) return false;
|
|
9737
|
-
for (const [importPath, symbols] of Object.entries(importedSymbols)) {
|
|
9738
|
-
const normalizedImport = normalizePathCached(importPath);
|
|
9739
|
-
if (matchesFile(normalizedImport, normalizedTarget)) {
|
|
9740
|
-
if (symbols.includes(targetSymbol)) return true;
|
|
9741
|
-
if (symbols.some((s) => s.startsWith("* as "))) return true;
|
|
9742
|
-
}
|
|
9743
|
-
}
|
|
9744
|
-
return false;
|
|
9745
|
-
});
|
|
9746
|
-
}
|
|
9747
9876
|
function extractSymbolUsagesFromChunks(chunks, targetSymbol) {
|
|
9748
9877
|
const usages = [];
|
|
9749
9878
|
for (const chunk of chunks) {
|
|
@@ -9790,12 +9919,14 @@ function extractSnippet(lines, callLine, startLine, symbolName) {
|
|
|
9790
9919
|
|
|
9791
9920
|
// src/mcp/handlers/get-dependents.ts
|
|
9792
9921
|
function checkCrossRepoFallback(crossRepo, vectorDB) {
|
|
9793
|
-
return Boolean(crossRepo && !
|
|
9922
|
+
return Boolean(crossRepo && !vectorDB.supportsCrossRepo);
|
|
9794
9923
|
}
|
|
9795
9924
|
function buildNotes(crossRepoFallback, hitLimit) {
|
|
9796
9925
|
const notes = [];
|
|
9797
9926
|
if (crossRepoFallback) {
|
|
9798
|
-
notes.push(
|
|
9927
|
+
notes.push(
|
|
9928
|
+
"Cross-repo search requires a cross-repo-capable backend. Fell back to single-repo search."
|
|
9929
|
+
);
|
|
9799
9930
|
}
|
|
9800
9931
|
if (hitLimit) {
|
|
9801
9932
|
notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
|
|
@@ -9815,9 +9946,7 @@ function logRiskAssessment(analysis, riskLevel, symbol, log) {
|
|
|
9815
9946
|
);
|
|
9816
9947
|
}
|
|
9817
9948
|
} else {
|
|
9818
|
-
log(
|
|
9819
|
-
`Found ${analysis.dependents.length} dependents ${prodTest} - risk: ${riskLevel}`
|
|
9820
|
-
);
|
|
9949
|
+
log(`Found ${analysis.dependents.length} dependents ${prodTest} - risk: ${riskLevel}`);
|
|
9821
9950
|
}
|
|
9822
9951
|
}
|
|
9823
9952
|
function buildDependentsResponse(analysis, args, riskLevel, indexInfo, notes, crossRepo, vectorDB) {
|
|
@@ -9841,46 +9970,51 @@ function buildDependentsResponse(analysis, args, riskLevel, indexInfo, notes, cr
|
|
|
9841
9970
|
if (notes.length > 0) {
|
|
9842
9971
|
response.note = notes.join(" ");
|
|
9843
9972
|
}
|
|
9844
|
-
if (crossRepo && vectorDB
|
|
9973
|
+
if (crossRepo && vectorDB.supportsCrossRepo) {
|
|
9845
9974
|
response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
|
|
9846
9975
|
}
|
|
9847
9976
|
return response;
|
|
9848
9977
|
}
|
|
9849
9978
|
async function handleGetDependents(args, ctx) {
|
|
9850
9979
|
const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9851
|
-
return await wrapToolHandler(
|
|
9852
|
-
|
|
9853
|
-
|
|
9854
|
-
|
|
9855
|
-
|
|
9856
|
-
|
|
9857
|
-
|
|
9858
|
-
|
|
9859
|
-
|
|
9860
|
-
|
|
9861
|
-
|
|
9862
|
-
|
|
9863
|
-
|
|
9864
|
-
|
|
9865
|
-
|
|
9866
|
-
|
|
9867
|
-
|
|
9868
|
-
|
|
9869
|
-
|
|
9870
|
-
|
|
9871
|
-
|
|
9872
|
-
|
|
9873
|
-
|
|
9874
|
-
|
|
9875
|
-
|
|
9876
|
-
|
|
9877
|
-
|
|
9878
|
-
|
|
9980
|
+
return await wrapToolHandler(GetDependentsSchema, async (validatedArgs) => {
|
|
9981
|
+
const { crossRepo, filepath, symbol } = validatedArgs;
|
|
9982
|
+
const symbolSuffix = symbol ? ` (symbol: ${symbol})` : "";
|
|
9983
|
+
const crossRepoSuffix = crossRepo ? " (cross-repo)" : "";
|
|
9984
|
+
log(`Finding dependents of: ${filepath}${symbolSuffix}${crossRepoSuffix}`);
|
|
9985
|
+
await checkAndReconnect();
|
|
9986
|
+
const indexInfo = getIndexMetadata();
|
|
9987
|
+
const analysis = await findDependents(
|
|
9988
|
+
vectorDB,
|
|
9989
|
+
filepath,
|
|
9990
|
+
crossRepo ?? false,
|
|
9991
|
+
log,
|
|
9992
|
+
symbol,
|
|
9993
|
+
indexInfo.indexVersion
|
|
9994
|
+
);
|
|
9995
|
+
const riskLevel = calculateRiskLevel(
|
|
9996
|
+
analysis.dependents.length,
|
|
9997
|
+
analysis.complexityMetrics.complexityRiskBoost,
|
|
9998
|
+
analysis.productionDependentCount
|
|
9999
|
+
);
|
|
10000
|
+
logRiskAssessment(analysis, riskLevel, symbol, log);
|
|
10001
|
+
const crossRepoFallback = checkCrossRepoFallback(crossRepo, vectorDB);
|
|
10002
|
+
const notes = buildNotes(crossRepoFallback, analysis.hitLimit);
|
|
10003
|
+
return buildDependentsResponse(
|
|
10004
|
+
analysis,
|
|
10005
|
+
validatedArgs,
|
|
10006
|
+
riskLevel,
|
|
10007
|
+
indexInfo,
|
|
10008
|
+
notes,
|
|
10009
|
+
crossRepo,
|
|
10010
|
+
vectorDB
|
|
10011
|
+
);
|
|
10012
|
+
})(args);
|
|
9879
10013
|
}
|
|
9880
10014
|
|
|
9881
10015
|
// src/mcp/handlers/get-complexity.ts
|
|
9882
10016
|
var import_collect = __toESM(require_dist(), 1);
|
|
9883
|
-
import { ComplexityAnalyzer
|
|
10017
|
+
import { ComplexityAnalyzer } from "@liendev/core";
|
|
9884
10018
|
function transformViolation(v, fileData) {
|
|
9885
10019
|
return {
|
|
9886
10020
|
filepath: v.filepath,
|
|
@@ -9919,7 +10053,7 @@ async function fetchCrossRepoChunks(vectorDB, crossRepo, repoIds, log) {
|
|
|
9919
10053
|
if (!crossRepo) {
|
|
9920
10054
|
return { chunks: [], fallback: false };
|
|
9921
10055
|
}
|
|
9922
|
-
if (vectorDB
|
|
10056
|
+
if (vectorDB.supportsCrossRepo) {
|
|
9923
10057
|
const chunks = await vectorDB.scanCrossRepo({ limit: 1e5, repoIds });
|
|
9924
10058
|
log(`Scanned ${chunks.length} chunks across repos`);
|
|
9925
10059
|
return { chunks, fallback: false };
|
|
@@ -9928,7 +10062,11 @@ async function fetchCrossRepoChunks(vectorDB, crossRepo, repoIds, log) {
|
|
|
9928
10062
|
}
|
|
9929
10063
|
function processViolations(report, threshold, top) {
|
|
9930
10064
|
const allViolations = (0, import_collect.default)(Object.entries(report.files)).flatMap(
|
|
9931
|
-
([
|
|
10065
|
+
([
|
|
10066
|
+
,
|
|
10067
|
+
/* filepath unused */
|
|
10068
|
+
fileData
|
|
10069
|
+
]) => fileData.violations.map((v) => transformViolation(v, fileData))
|
|
9932
10070
|
).sortByDesc("complexity").all();
|
|
9933
10071
|
const violations = threshold !== void 0 ? allViolations.filter((v) => v.complexity >= threshold) : allViolations;
|
|
9934
10072
|
const severityCounts = (0, import_collect.default)(violations).countBy("severity").all();
|
|
@@ -9942,61 +10080,61 @@ function processViolations(report, threshold, top) {
|
|
|
9942
10080
|
};
|
|
9943
10081
|
}
|
|
9944
10082
|
function buildCrossRepoFallbackNote(fallback) {
|
|
9945
|
-
return fallback ? "Cross-repo analysis requires
|
|
10083
|
+
return fallback ? "Cross-repo analysis requires a cross-repo-capable backend. Fell back to single-repo analysis." : void 0;
|
|
9946
10084
|
}
|
|
9947
10085
|
async function handleGetComplexity(args, ctx) {
|
|
9948
10086
|
const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9949
|
-
return await wrapToolHandler(
|
|
9950
|
-
|
|
9951
|
-
|
|
9952
|
-
|
|
9953
|
-
|
|
9954
|
-
|
|
9955
|
-
|
|
9956
|
-
|
|
9957
|
-
|
|
9958
|
-
|
|
9959
|
-
|
|
9960
|
-
|
|
9961
|
-
|
|
9962
|
-
|
|
9963
|
-
|
|
9964
|
-
|
|
9965
|
-
|
|
9966
|
-
|
|
9967
|
-
|
|
10087
|
+
return await wrapToolHandler(GetComplexitySchema, async (validatedArgs) => {
|
|
10088
|
+
const { crossRepo, repoIds, files, top, threshold } = validatedArgs;
|
|
10089
|
+
log(`Analyzing complexity${crossRepo ? " (cross-repo)" : ""}...`);
|
|
10090
|
+
await checkAndReconnect();
|
|
10091
|
+
const { chunks: allChunks, fallback } = await fetchCrossRepoChunks(
|
|
10092
|
+
vectorDB,
|
|
10093
|
+
crossRepo,
|
|
10094
|
+
repoIds,
|
|
10095
|
+
log
|
|
10096
|
+
);
|
|
10097
|
+
const analyzer = new ComplexityAnalyzer(vectorDB);
|
|
10098
|
+
const report = await analyzer.analyze(files, crossRepo && !fallback, repoIds);
|
|
10099
|
+
log(`Analyzed ${report.summary.filesAnalyzed} files`);
|
|
10100
|
+
const { violations, topViolations, bySeverity } = processViolations(
|
|
10101
|
+
report,
|
|
10102
|
+
threshold,
|
|
10103
|
+
top ?? 10
|
|
10104
|
+
);
|
|
10105
|
+
const note = buildCrossRepoFallbackNote(fallback);
|
|
10106
|
+
if (note) {
|
|
10107
|
+
log(
|
|
10108
|
+
"Warning: crossRepo=true requires a cross-repo-capable backend. Falling back to single-repo analysis.",
|
|
10109
|
+
"warning"
|
|
9968
10110
|
);
|
|
9969
|
-
const note = buildCrossRepoFallbackNote(fallback);
|
|
9970
|
-
if (note) {
|
|
9971
|
-
log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo analysis.", "warning");
|
|
9972
|
-
}
|
|
9973
|
-
return {
|
|
9974
|
-
indexInfo: getIndexMetadata(),
|
|
9975
|
-
summary: {
|
|
9976
|
-
filesAnalyzed: report.summary.filesAnalyzed,
|
|
9977
|
-
avgComplexity: report.summary.avgComplexity,
|
|
9978
|
-
maxComplexity: report.summary.maxComplexity,
|
|
9979
|
-
violationCount: violations.length,
|
|
9980
|
-
bySeverity
|
|
9981
|
-
},
|
|
9982
|
-
violations: topViolations,
|
|
9983
|
-
...crossRepo && !fallback && allChunks.length > 0 && {
|
|
9984
|
-
groupedByRepo: groupViolationsByRepo(topViolations, allChunks)
|
|
9985
|
-
},
|
|
9986
|
-
...note && { note }
|
|
9987
|
-
};
|
|
9988
10111
|
}
|
|
9989
|
-
|
|
10112
|
+
return {
|
|
10113
|
+
indexInfo: getIndexMetadata(),
|
|
10114
|
+
summary: {
|
|
10115
|
+
filesAnalyzed: report.summary.filesAnalyzed,
|
|
10116
|
+
avgComplexity: report.summary.avgComplexity,
|
|
10117
|
+
maxComplexity: report.summary.maxComplexity,
|
|
10118
|
+
violationCount: violations.length,
|
|
10119
|
+
bySeverity
|
|
10120
|
+
},
|
|
10121
|
+
violations: topViolations,
|
|
10122
|
+
...crossRepo && !fallback && allChunks.length > 0 && {
|
|
10123
|
+
groupedByRepo: groupViolationsByRepo(topViolations, allChunks)
|
|
10124
|
+
},
|
|
10125
|
+
...note && { note }
|
|
10126
|
+
};
|
|
10127
|
+
})(args);
|
|
9990
10128
|
}
|
|
9991
10129
|
|
|
9992
10130
|
// src/mcp/handlers/index.ts
|
|
9993
10131
|
var toolHandlers = {
|
|
9994
|
-
|
|
9995
|
-
|
|
9996
|
-
|
|
9997
|
-
|
|
9998
|
-
|
|
9999
|
-
|
|
10132
|
+
semantic_search: handleSemanticSearch,
|
|
10133
|
+
find_similar: handleFindSimilar,
|
|
10134
|
+
get_files_context: handleGetFilesContext,
|
|
10135
|
+
list_functions: handleListFunctions,
|
|
10136
|
+
get_dependents: handleGetDependents,
|
|
10137
|
+
get_complexity: handleGetComplexity
|
|
10000
10138
|
};
|
|
10001
10139
|
|
|
10002
10140
|
// src/mcp/server-config.ts
|
|
@@ -10084,7 +10222,7 @@ function mergePendingFiles(pendingFiles, newFiles) {
|
|
|
10084
10222
|
}
|
|
10085
10223
|
}
|
|
10086
10224
|
function createReindexStateManager() {
|
|
10087
|
-
|
|
10225
|
+
const state = {
|
|
10088
10226
|
inProgress: false,
|
|
10089
10227
|
pendingFiles: [],
|
|
10090
10228
|
lastReindexTimestamp: null,
|
|
@@ -10108,12 +10246,12 @@ function createReindexStateManager() {
|
|
|
10108
10246
|
},
|
|
10109
10247
|
/**
|
|
10110
10248
|
* Start a new reindex operation.
|
|
10111
|
-
*
|
|
10249
|
+
*
|
|
10112
10250
|
* **Important**: Silently ignores empty or null file arrays without incrementing
|
|
10113
10251
|
* activeOperations. This is intentional - if there's no work to do, no operation
|
|
10114
10252
|
* is started. Callers should check for empty arrays before calling if they need
|
|
10115
10253
|
* to track "attempted" operations.
|
|
10116
|
-
*
|
|
10254
|
+
*
|
|
10117
10255
|
* @param files - Array of file paths to reindex. Empty/null arrays are ignored.
|
|
10118
10256
|
*/
|
|
10119
10257
|
startReindex: (files) => {
|
|
@@ -10127,10 +10265,10 @@ function createReindexStateManager() {
|
|
|
10127
10265
|
},
|
|
10128
10266
|
/**
|
|
10129
10267
|
* Mark a reindex operation as complete.
|
|
10130
|
-
*
|
|
10268
|
+
*
|
|
10131
10269
|
* Logs a warning if called without a matching startReindex.
|
|
10132
10270
|
* Only clears state when all concurrent operations finish.
|
|
10133
|
-
*
|
|
10271
|
+
*
|
|
10134
10272
|
* @param durationMs - Duration of the reindex operation in milliseconds
|
|
10135
10273
|
*/
|
|
10136
10274
|
completeReindex: (durationMs) => {
|
|
@@ -10149,7 +10287,7 @@ function createReindexStateManager() {
|
|
|
10149
10287
|
},
|
|
10150
10288
|
/**
|
|
10151
10289
|
* Mark a reindex operation as failed.
|
|
10152
|
-
*
|
|
10290
|
+
*
|
|
10153
10291
|
* Logs a warning if called without a matching startReindex.
|
|
10154
10292
|
* Only clears state when all concurrent operations finish/fail.
|
|
10155
10293
|
*/
|
|
@@ -10167,13 +10305,13 @@ function createReindexStateManager() {
|
|
|
10167
10305
|
},
|
|
10168
10306
|
/**
|
|
10169
10307
|
* Manually reset state if it's stuck.
|
|
10170
|
-
*
|
|
10308
|
+
*
|
|
10171
10309
|
* **WARNING**: Only use this if you're certain operations have crashed without cleanup.
|
|
10172
10310
|
* This will forcibly clear the inProgress flag and reset activeOperations counter.
|
|
10173
|
-
*
|
|
10311
|
+
*
|
|
10174
10312
|
* Use this when getState() health check detects stuck state and you've verified
|
|
10175
10313
|
* no legitimate operations are running.
|
|
10176
|
-
*
|
|
10314
|
+
*
|
|
10177
10315
|
* @returns true if state was reset, false if state was already clean
|
|
10178
10316
|
*/
|
|
10179
10317
|
resetIfStuck: () => {
|
|
@@ -10192,60 +10330,292 @@ function createReindexStateManager() {
|
|
|
10192
10330
|
};
|
|
10193
10331
|
}
|
|
10194
10332
|
|
|
10195
|
-
// src/mcp/
|
|
10196
|
-
|
|
10197
|
-
|
|
10198
|
-
|
|
10199
|
-
|
|
10200
|
-
|
|
10201
|
-
|
|
10202
|
-
|
|
10203
|
-
|
|
10204
|
-
}
|
|
10205
|
-
|
|
10206
|
-
|
|
10333
|
+
// src/mcp/git-detection.ts
|
|
10334
|
+
import fs4 from "fs/promises";
|
|
10335
|
+
import {
|
|
10336
|
+
GitStateTracker,
|
|
10337
|
+
indexMultipleFiles as indexMultipleFiles2,
|
|
10338
|
+
isGitAvailable,
|
|
10339
|
+
isGitRepo as isGitRepo2,
|
|
10340
|
+
DEFAULT_GIT_POLL_INTERVAL_MS as DEFAULT_GIT_POLL_INTERVAL_MS2,
|
|
10341
|
+
createGitignoreFilter as createGitignoreFilter2
|
|
10342
|
+
} from "@liendev/core";
|
|
10343
|
+
|
|
10344
|
+
// src/mcp/file-change-handler.ts
|
|
10345
|
+
import fs3 from "fs/promises";
|
|
10346
|
+
import {
|
|
10347
|
+
indexMultipleFiles,
|
|
10348
|
+
indexSingleFile,
|
|
10349
|
+
ManifestManager,
|
|
10350
|
+
computeContentHash,
|
|
10351
|
+
normalizeToRelativePath as normalizeToRelativePath2,
|
|
10352
|
+
createGitignoreFilter
|
|
10353
|
+
} from "@liendev/core";
|
|
10354
|
+
async function handleFileDeletion(filepath, vectorDB, log) {
|
|
10355
|
+
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10356
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10357
|
+
try {
|
|
10358
|
+
await vectorDB.deleteByFile(filepath);
|
|
10359
|
+
await manifest.removeFile(filepath);
|
|
10360
|
+
log(`\u2713 Removed ${filepath} from index`);
|
|
10361
|
+
} catch (error) {
|
|
10362
|
+
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10363
|
+
throw error;
|
|
10364
|
+
}
|
|
10207
10365
|
}
|
|
10208
|
-
async function
|
|
10209
|
-
const
|
|
10210
|
-
|
|
10211
|
-
const
|
|
10212
|
-
|
|
10213
|
-
|
|
10366
|
+
async function handleBatchDeletions(deletedFiles, vectorDB, log) {
|
|
10367
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10368
|
+
const failures = [];
|
|
10369
|
+
for (const filepath of deletedFiles) {
|
|
10370
|
+
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10371
|
+
try {
|
|
10372
|
+
await vectorDB.deleteByFile(filepath);
|
|
10373
|
+
await manifest.removeFile(filepath);
|
|
10374
|
+
log(`\u2713 Removed ${filepath} from index`);
|
|
10375
|
+
} catch (error) {
|
|
10376
|
+
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10377
|
+
failures.push(filepath);
|
|
10378
|
+
}
|
|
10214
10379
|
}
|
|
10215
|
-
if (
|
|
10216
|
-
throw new Error(`
|
|
10380
|
+
if (failures.length > 0) {
|
|
10381
|
+
throw new Error(`Failed to delete ${failures.length} file(s): ${failures.join(", ")}`);
|
|
10217
10382
|
}
|
|
10218
|
-
log("Loading embedding model...");
|
|
10219
|
-
await embeddings.initialize();
|
|
10220
|
-
log("Loading vector database...");
|
|
10221
|
-
await vectorDB.initialize();
|
|
10222
|
-
log("Embeddings and vector DB ready");
|
|
10223
|
-
return { embeddings, vectorDB };
|
|
10224
10383
|
}
|
|
10225
|
-
async function
|
|
10226
|
-
const
|
|
10227
|
-
|
|
10228
|
-
|
|
10229
|
-
|
|
10384
|
+
async function canSkipReindex(filepath, rootDir, vectorDB, log) {
|
|
10385
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10386
|
+
const normalizedPath = normalizeToRelativePath2(filepath, rootDir);
|
|
10387
|
+
const manifestData = await manifest.load();
|
|
10388
|
+
const existingEntry = manifestData?.files[normalizedPath];
|
|
10389
|
+
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10390
|
+
if (!shouldReindex && newMtime !== void 0 && existingEntry) {
|
|
10391
|
+
const skipped = await manifest.transaction(async (data) => {
|
|
10392
|
+
const entry = data.files[normalizedPath];
|
|
10393
|
+
if (entry) {
|
|
10394
|
+
entry.lastModified = newMtime;
|
|
10395
|
+
return true;
|
|
10396
|
+
}
|
|
10397
|
+
return false;
|
|
10398
|
+
});
|
|
10399
|
+
return !!skipped;
|
|
10400
|
+
}
|
|
10401
|
+
return false;
|
|
10402
|
+
}
|
|
10403
|
+
async function handleSingleFileChange(filepath, type, rootDir, vectorDB, embeddings, log, reindexStateManager) {
|
|
10404
|
+
const action = type === "add" ? "added" : "changed";
|
|
10405
|
+
if (type === "change") {
|
|
10230
10406
|
try {
|
|
10231
|
-
|
|
10232
|
-
await indexCodebase2({ rootDir, verbose: true });
|
|
10233
|
-
log("\u2705 Initial indexing complete!");
|
|
10407
|
+
if (await canSkipReindex(filepath, rootDir, vectorDB, log)) return;
|
|
10234
10408
|
} catch (error) {
|
|
10235
|
-
log(
|
|
10236
|
-
|
|
10409
|
+
log(`Content hash check failed, will reindex: ${error}`, "warning");
|
|
10410
|
+
}
|
|
10411
|
+
}
|
|
10412
|
+
const startTime = Date.now();
|
|
10413
|
+
reindexStateManager.startReindex([filepath]);
|
|
10414
|
+
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
10415
|
+
try {
|
|
10416
|
+
await indexSingleFile(filepath, vectorDB, embeddings, { verbose: false, rootDir });
|
|
10417
|
+
const duration = Date.now() - startTime;
|
|
10418
|
+
reindexStateManager.completeReindex(duration);
|
|
10419
|
+
} catch (error) {
|
|
10420
|
+
reindexStateManager.failReindex();
|
|
10421
|
+
log(`Failed to reindex ${filepath}: ${error}`, "warning");
|
|
10422
|
+
}
|
|
10423
|
+
}
|
|
10424
|
+
async function shouldReindexFile(filepath, existingEntry, log) {
|
|
10425
|
+
if (!existingEntry?.contentHash) {
|
|
10426
|
+
return { shouldReindex: true };
|
|
10427
|
+
}
|
|
10428
|
+
const currentHash = await computeContentHash(filepath);
|
|
10429
|
+
if (currentHash && currentHash === existingEntry.contentHash) {
|
|
10430
|
+
log(`\u23ED\uFE0F File mtime changed but content unchanged: ${filepath}`, "debug");
|
|
10431
|
+
try {
|
|
10432
|
+
const stats = await fs3.stat(filepath);
|
|
10433
|
+
return { shouldReindex: false, newMtime: stats.mtimeMs };
|
|
10434
|
+
} catch {
|
|
10435
|
+
return { shouldReindex: true };
|
|
10237
10436
|
}
|
|
10238
10437
|
}
|
|
10438
|
+
return { shouldReindex: true };
|
|
10439
|
+
}
|
|
10440
|
+
async function checkFilesAgainstManifest(files, rootDir, manifestFiles, log) {
|
|
10441
|
+
const results = [];
|
|
10442
|
+
for (const filepath of files) {
|
|
10443
|
+
const normalizedPath = normalizeToRelativePath2(filepath, rootDir);
|
|
10444
|
+
const existingEntry = manifestFiles[normalizedPath];
|
|
10445
|
+
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10446
|
+
results.push({ filepath, normalizedPath, shouldReindex, newMtime });
|
|
10447
|
+
}
|
|
10448
|
+
return results;
|
|
10449
|
+
}
|
|
10450
|
+
async function updateUnchangedMtimes(manifest, results) {
|
|
10451
|
+
const hasUpdates = results.some((r) => !r.shouldReindex && r.newMtime !== void 0);
|
|
10452
|
+
if (!hasUpdates) return;
|
|
10453
|
+
await manifest.transaction(async (data) => {
|
|
10454
|
+
for (const result of results) {
|
|
10455
|
+
if (!result.shouldReindex && result.newMtime !== void 0) {
|
|
10456
|
+
const entry = data.files[result.normalizedPath];
|
|
10457
|
+
if (entry) {
|
|
10458
|
+
entry.lastModified = result.newMtime;
|
|
10459
|
+
}
|
|
10460
|
+
}
|
|
10461
|
+
}
|
|
10462
|
+
return null;
|
|
10463
|
+
});
|
|
10464
|
+
}
|
|
10465
|
+
async function filterModifiedFilesByHash(modifiedFiles, rootDir, vectorDB, log) {
|
|
10466
|
+
if (modifiedFiles.length === 0) return [];
|
|
10467
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10468
|
+
const manifestData = await manifest.load();
|
|
10469
|
+
if (!manifestData) return modifiedFiles;
|
|
10470
|
+
const checkResults = await checkFilesAgainstManifest(
|
|
10471
|
+
modifiedFiles,
|
|
10472
|
+
rootDir,
|
|
10473
|
+
manifestData.files,
|
|
10474
|
+
log
|
|
10475
|
+
);
|
|
10476
|
+
await updateUnchangedMtimes(manifest, checkResults);
|
|
10477
|
+
return checkResults.filter((r) => r.shouldReindex).map((r) => r.filepath);
|
|
10478
|
+
}
|
|
10479
|
+
async function prepareFilesForReindexing(event, rootDir, vectorDB, log) {
|
|
10480
|
+
const addedFiles = event.added || [];
|
|
10481
|
+
const modifiedFiles = event.modified || [];
|
|
10482
|
+
const deletedFiles = event.deleted || [];
|
|
10483
|
+
let modifiedFilesToReindex = [];
|
|
10484
|
+
try {
|
|
10485
|
+
modifiedFilesToReindex = await filterModifiedFilesByHash(modifiedFiles, rootDir, vectorDB, log);
|
|
10486
|
+
} catch (error) {
|
|
10487
|
+
log(`Hash-based filtering failed, will reindex all modified files: ${error}`, "warning");
|
|
10488
|
+
modifiedFilesToReindex = modifiedFiles;
|
|
10489
|
+
}
|
|
10490
|
+
const filesToIndex = [...addedFiles, ...modifiedFilesToReindex];
|
|
10491
|
+
return { filesToIndex, deletedFiles };
|
|
10492
|
+
}
|
|
10493
|
+
async function executeReindexOperations(filesToIndex, deletedFiles, rootDir, vectorDB, embeddings, log) {
|
|
10494
|
+
const operations = [];
|
|
10495
|
+
if (filesToIndex.length > 0) {
|
|
10496
|
+
log(`\u{1F4C1} ${filesToIndex.length} file(s) changed, reindexing...`);
|
|
10497
|
+
operations.push(
|
|
10498
|
+
indexMultipleFiles(filesToIndex, vectorDB, embeddings, { verbose: false, rootDir })
|
|
10499
|
+
);
|
|
10500
|
+
}
|
|
10501
|
+
if (deletedFiles.length > 0) {
|
|
10502
|
+
operations.push(handleBatchDeletions(deletedFiles, vectorDB, log));
|
|
10503
|
+
}
|
|
10504
|
+
await Promise.all(operations);
|
|
10505
|
+
}
|
|
10506
|
+
async function handleBatchEvent(event, rootDir, vectorDB, embeddings, log, reindexStateManager) {
|
|
10507
|
+
const { filesToIndex, deletedFiles } = await prepareFilesForReindexing(
|
|
10508
|
+
event,
|
|
10509
|
+
rootDir,
|
|
10510
|
+
vectorDB,
|
|
10511
|
+
log
|
|
10512
|
+
);
|
|
10513
|
+
const allFiles = [...filesToIndex, ...deletedFiles];
|
|
10514
|
+
if (allFiles.length === 0) {
|
|
10515
|
+
return;
|
|
10516
|
+
}
|
|
10517
|
+
const startTime = Date.now();
|
|
10518
|
+
reindexStateManager.startReindex(allFiles);
|
|
10519
|
+
try {
|
|
10520
|
+
await executeReindexOperations(filesToIndex, deletedFiles, rootDir, vectorDB, embeddings, log);
|
|
10521
|
+
const duration = Date.now() - startTime;
|
|
10522
|
+
reindexStateManager.completeReindex(duration);
|
|
10523
|
+
log(
|
|
10524
|
+
`\u2713 Processed ${filesToIndex.length} file(s) + ${deletedFiles.length} deletion(s) in ${duration}ms`
|
|
10525
|
+
);
|
|
10526
|
+
} catch (error) {
|
|
10527
|
+
reindexStateManager.failReindex();
|
|
10528
|
+
log(`Batch reindex failed: ${error}`, "warning");
|
|
10529
|
+
}
|
|
10530
|
+
}
|
|
10531
|
+
async function handleUnlinkEvent(filepath, vectorDB, log, reindexStateManager) {
|
|
10532
|
+
const startTime = Date.now();
|
|
10533
|
+
reindexStateManager.startReindex([filepath]);
|
|
10534
|
+
try {
|
|
10535
|
+
await handleFileDeletion(filepath, vectorDB, log);
|
|
10536
|
+
const duration = Date.now() - startTime;
|
|
10537
|
+
reindexStateManager.completeReindex(duration);
|
|
10538
|
+
} catch (error) {
|
|
10539
|
+
reindexStateManager.failReindex();
|
|
10540
|
+
log(`Failed to process deletion for ${filepath}: ${error}`, "warning");
|
|
10541
|
+
}
|
|
10542
|
+
}
|
|
10543
|
+
function isFileIgnored(filepath, rootDir, isIgnored) {
|
|
10544
|
+
return isIgnored(normalizeToRelativePath2(filepath, rootDir));
|
|
10545
|
+
}
|
|
10546
|
+
function filterFileChangeEvent(event, ignoreFilter, rootDir) {
|
|
10547
|
+
return {
|
|
10548
|
+
...event,
|
|
10549
|
+
added: (event.added || []).filter((f) => !isFileIgnored(f, rootDir, ignoreFilter)),
|
|
10550
|
+
modified: (event.modified || []).filter((f) => !isFileIgnored(f, rootDir, ignoreFilter)),
|
|
10551
|
+
deleted: event.deleted || []
|
|
10552
|
+
};
|
|
10553
|
+
}
|
|
10554
|
+
function isGitignoreFile(filepath) {
|
|
10555
|
+
const name = filepath.split(/[/\\]/).pop() ?? filepath;
|
|
10556
|
+
return name === ".gitignore";
|
|
10557
|
+
}
|
|
10558
|
+
function hasGitignoreChange(event) {
|
|
10559
|
+
if (event.type === "batch") {
|
|
10560
|
+
const allFiles = [...event.added || [], ...event.modified || [], ...event.deleted || []];
|
|
10561
|
+
return allFiles.some(isGitignoreFile);
|
|
10562
|
+
}
|
|
10563
|
+
return event.filepath ? isGitignoreFile(event.filepath) : false;
|
|
10564
|
+
}
|
|
10565
|
+
function createFileChangeHandler(rootDir, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10566
|
+
let ignoreFilter = null;
|
|
10567
|
+
return async (event) => {
|
|
10568
|
+
if (hasGitignoreChange(event)) {
|
|
10569
|
+
ignoreFilter = null;
|
|
10570
|
+
}
|
|
10571
|
+
if (!ignoreFilter) {
|
|
10572
|
+
ignoreFilter = await createGitignoreFilter(rootDir);
|
|
10573
|
+
}
|
|
10574
|
+
const { type } = event;
|
|
10575
|
+
if (type === "batch") {
|
|
10576
|
+
const filtered = filterFileChangeEvent(event, ignoreFilter, rootDir);
|
|
10577
|
+
const totalToProcess = filtered.added.length + filtered.modified.length + filtered.deleted.length;
|
|
10578
|
+
if (totalToProcess === 0) return;
|
|
10579
|
+
await checkAndReconnect();
|
|
10580
|
+
await handleBatchEvent(filtered, rootDir, vectorDB, embeddings, log, reindexStateManager);
|
|
10581
|
+
} else if (type === "unlink") {
|
|
10582
|
+
await checkAndReconnect();
|
|
10583
|
+
await handleUnlinkEvent(event.filepath, vectorDB, log, reindexStateManager);
|
|
10584
|
+
} else {
|
|
10585
|
+
if (isFileIgnored(event.filepath, rootDir, ignoreFilter)) return;
|
|
10586
|
+
await checkAndReconnect();
|
|
10587
|
+
await handleSingleFileChange(
|
|
10588
|
+
event.filepath,
|
|
10589
|
+
type,
|
|
10590
|
+
rootDir,
|
|
10591
|
+
vectorDB,
|
|
10592
|
+
embeddings,
|
|
10593
|
+
log,
|
|
10594
|
+
reindexStateManager
|
|
10595
|
+
);
|
|
10596
|
+
}
|
|
10597
|
+
};
|
|
10239
10598
|
}
|
|
10240
|
-
|
|
10599
|
+
|
|
10600
|
+
// src/mcp/git-detection.ts
|
|
10601
|
+
async function handleGitStartup(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10241
10602
|
log("Checking for git changes...");
|
|
10242
10603
|
const changedFiles = await gitTracker.initialize();
|
|
10243
10604
|
if (changedFiles && changedFiles.length > 0) {
|
|
10605
|
+
const isIgnored = await createGitignoreFilter2(rootDir);
|
|
10606
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, isIgnored);
|
|
10607
|
+
if (filteredFiles.length === 0) {
|
|
10608
|
+
log("\u2713 Index is up to date with git state");
|
|
10609
|
+
return;
|
|
10610
|
+
}
|
|
10244
10611
|
const startTime = Date.now();
|
|
10245
|
-
reindexStateManager.startReindex(
|
|
10246
|
-
log(`\u{1F33F} Git changes detected: ${
|
|
10612
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10613
|
+
log(`\u{1F33F} Git changes detected: ${filteredFiles.length} files changed`);
|
|
10247
10614
|
try {
|
|
10248
|
-
|
|
10615
|
+
await checkAndReconnect();
|
|
10616
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, {
|
|
10617
|
+
verbose: false
|
|
10618
|
+
});
|
|
10249
10619
|
const duration = Date.now() - startTime;
|
|
10250
10620
|
reindexStateManager.completeReindex(duration);
|
|
10251
10621
|
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
@@ -10257,8 +10627,12 @@ async function handleGitStartup(gitTracker, vectorDB, embeddings, _verbose, log,
|
|
|
10257
10627
|
log("\u2713 Index is up to date with git state");
|
|
10258
10628
|
}
|
|
10259
10629
|
}
|
|
10260
|
-
function createGitPollInterval(gitTracker, vectorDB, embeddings,
|
|
10630
|
+
function createGitPollInterval(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10631
|
+
let isIgnored = null;
|
|
10632
|
+
let pollInProgress = false;
|
|
10261
10633
|
return setInterval(async () => {
|
|
10634
|
+
if (pollInProgress) return;
|
|
10635
|
+
pollInProgress = true;
|
|
10262
10636
|
try {
|
|
10263
10637
|
const changedFiles = await gitTracker.detectChanges();
|
|
10264
10638
|
if (changedFiles && changedFiles.length > 0) {
|
|
@@ -10270,11 +10644,22 @@ function createGitPollInterval(gitTracker, vectorDB, embeddings, _verbose, log,
|
|
|
10270
10644
|
);
|
|
10271
10645
|
return;
|
|
10272
10646
|
}
|
|
10647
|
+
if (changedFiles.some(isGitignoreFile)) {
|
|
10648
|
+
isIgnored = null;
|
|
10649
|
+
}
|
|
10650
|
+
if (!isIgnored) {
|
|
10651
|
+
isIgnored = await createGitignoreFilter2(rootDir);
|
|
10652
|
+
}
|
|
10653
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, isIgnored);
|
|
10654
|
+
if (filteredFiles.length === 0) return;
|
|
10273
10655
|
const startTime = Date.now();
|
|
10274
|
-
reindexStateManager.startReindex(
|
|
10275
|
-
log(`\u{1F33F} Git change detected: ${
|
|
10656
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10657
|
+
log(`\u{1F33F} Git change detected: ${filteredFiles.length} files changed`);
|
|
10276
10658
|
try {
|
|
10277
|
-
|
|
10659
|
+
await checkAndReconnect();
|
|
10660
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, {
|
|
10661
|
+
verbose: false
|
|
10662
|
+
});
|
|
10278
10663
|
const duration = Date.now() - startTime;
|
|
10279
10664
|
reindexStateManager.completeReindex(duration);
|
|
10280
10665
|
log(`\u2713 Background reindex complete: ${count} files in ${duration}ms`);
|
|
@@ -10285,48 +10670,99 @@ function createGitPollInterval(gitTracker, vectorDB, embeddings, _verbose, log,
|
|
|
10285
10670
|
}
|
|
10286
10671
|
} catch (error) {
|
|
10287
10672
|
log(`Git detection check failed: ${error}`, "warning");
|
|
10673
|
+
} finally {
|
|
10674
|
+
pollInProgress = false;
|
|
10288
10675
|
}
|
|
10289
10676
|
}, DEFAULT_GIT_POLL_INTERVAL_MS2);
|
|
10290
10677
|
}
|
|
10291
|
-
function
|
|
10678
|
+
function shouldSkipGitReindex(gitReindexInProgress, lastGitReindexTime, cooldownMs, reindexStateManager, log) {
|
|
10679
|
+
const { inProgress: globalInProgress } = reindexStateManager.getState();
|
|
10680
|
+
if (gitReindexInProgress || globalInProgress) {
|
|
10681
|
+
log("Git reindex already in progress, skipping", "debug");
|
|
10682
|
+
return true;
|
|
10683
|
+
}
|
|
10684
|
+
const timeSinceLastReindex = Date.now() - lastGitReindexTime;
|
|
10685
|
+
if (timeSinceLastReindex < cooldownMs) {
|
|
10686
|
+
log(`Git change ignored (cooldown: ${cooldownMs - timeSinceLastReindex}ms remaining)`, "debug");
|
|
10687
|
+
return true;
|
|
10688
|
+
}
|
|
10689
|
+
return false;
|
|
10690
|
+
}
|
|
10691
|
+
async function detectAndFilterGitChanges(gitTracker, rootDir, getIgnoreFilter, setIgnoreFilter, log) {
|
|
10692
|
+
log("\u{1F33F} Git change detected (event-driven)");
|
|
10693
|
+
const changedFiles = await gitTracker.detectChanges();
|
|
10694
|
+
if (!changedFiles || changedFiles.length === 0) return null;
|
|
10695
|
+
if (changedFiles.some(isGitignoreFile)) {
|
|
10696
|
+
setIgnoreFilter(null);
|
|
10697
|
+
}
|
|
10698
|
+
let filter = getIgnoreFilter();
|
|
10699
|
+
if (!filter) {
|
|
10700
|
+
filter = await createGitignoreFilter2(rootDir);
|
|
10701
|
+
setIgnoreFilter(filter);
|
|
10702
|
+
}
|
|
10703
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, filter);
|
|
10704
|
+
return filteredFiles.length > 0 ? filteredFiles : null;
|
|
10705
|
+
}
|
|
10706
|
+
async function executeGitReindex(filteredFiles, vectorDB, embeddings, reindexStateManager, checkAndReconnect, log) {
|
|
10707
|
+
const startTime = Date.now();
|
|
10708
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10709
|
+
log(`Reindexing ${filteredFiles.length} files from git change`);
|
|
10710
|
+
try {
|
|
10711
|
+
await checkAndReconnect();
|
|
10712
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, { verbose: false });
|
|
10713
|
+
const duration = Date.now() - startTime;
|
|
10714
|
+
reindexStateManager.completeReindex(duration);
|
|
10715
|
+
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10716
|
+
} catch (error) {
|
|
10717
|
+
reindexStateManager.failReindex();
|
|
10718
|
+
log(`Git reindex failed: ${error}`, "warning");
|
|
10719
|
+
throw error;
|
|
10720
|
+
}
|
|
10721
|
+
}
|
|
10722
|
+
function createGitChangeHandler(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10723
|
+
let isIgnored = null;
|
|
10292
10724
|
let gitReindexInProgress = false;
|
|
10293
10725
|
let lastGitReindexTime = 0;
|
|
10294
10726
|
const GIT_REINDEX_COOLDOWN_MS = 5e3;
|
|
10295
10727
|
return async () => {
|
|
10296
|
-
|
|
10297
|
-
|
|
10298
|
-
|
|
10299
|
-
|
|
10300
|
-
|
|
10301
|
-
|
|
10302
|
-
|
|
10303
|
-
log(`Git change ignored (cooldown: ${GIT_REINDEX_COOLDOWN_MS - timeSinceLastReindex}ms remaining)`, "debug");
|
|
10304
|
-
return;
|
|
10305
|
-
}
|
|
10306
|
-
log("\u{1F33F} Git change detected (event-driven)");
|
|
10307
|
-
const changedFiles = await gitTracker.detectChanges();
|
|
10308
|
-
if (!changedFiles || changedFiles.length === 0) {
|
|
10728
|
+
if (shouldSkipGitReindex(
|
|
10729
|
+
gitReindexInProgress,
|
|
10730
|
+
lastGitReindexTime,
|
|
10731
|
+
GIT_REINDEX_COOLDOWN_MS,
|
|
10732
|
+
reindexStateManager,
|
|
10733
|
+
log
|
|
10734
|
+
)) {
|
|
10309
10735
|
return;
|
|
10310
10736
|
}
|
|
10311
10737
|
gitReindexInProgress = true;
|
|
10312
|
-
const startTime = Date.now();
|
|
10313
|
-
reindexStateManager.startReindex(changedFiles);
|
|
10314
|
-
log(`Reindexing ${changedFiles.length} files from git change`);
|
|
10315
10738
|
try {
|
|
10316
|
-
const
|
|
10317
|
-
|
|
10318
|
-
|
|
10319
|
-
|
|
10739
|
+
const filteredFiles = await detectAndFilterGitChanges(
|
|
10740
|
+
gitTracker,
|
|
10741
|
+
rootDir,
|
|
10742
|
+
() => isIgnored,
|
|
10743
|
+
(f) => {
|
|
10744
|
+
isIgnored = f;
|
|
10745
|
+
},
|
|
10746
|
+
log
|
|
10747
|
+
);
|
|
10748
|
+
if (!filteredFiles) return;
|
|
10749
|
+
await executeGitReindex(
|
|
10750
|
+
filteredFiles,
|
|
10751
|
+
vectorDB,
|
|
10752
|
+
embeddings,
|
|
10753
|
+
reindexStateManager,
|
|
10754
|
+
checkAndReconnect,
|
|
10755
|
+
log
|
|
10756
|
+
);
|
|
10320
10757
|
lastGitReindexTime = Date.now();
|
|
10321
10758
|
} catch (error) {
|
|
10322
|
-
|
|
10323
|
-
log(`Git reindex failed: ${error}`, "warning");
|
|
10759
|
+
log(`Git change handler failed: ${error}`, "warning");
|
|
10324
10760
|
} finally {
|
|
10325
10761
|
gitReindexInProgress = false;
|
|
10326
10762
|
}
|
|
10327
10763
|
};
|
|
10328
10764
|
}
|
|
10329
|
-
async function setupGitDetection(rootDir, vectorDB, embeddings,
|
|
10765
|
+
async function setupGitDetection(rootDir, vectorDB, embeddings, log, reindexStateManager, fileWatcher, checkAndReconnect) {
|
|
10330
10766
|
const gitAvailable = await isGitAvailable();
|
|
10331
10767
|
const isRepo = await isGitRepo2(rootDir);
|
|
10332
10768
|
if (!gitAvailable) {
|
|
@@ -10340,18 +10776,27 @@ async function setupGitDetection(rootDir, vectorDB, embeddings, verbose, log, re
|
|
|
10340
10776
|
log("\u2713 Detected git repository");
|
|
10341
10777
|
const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
|
|
10342
10778
|
try {
|
|
10343
|
-
await handleGitStartup(
|
|
10779
|
+
await handleGitStartup(
|
|
10780
|
+
rootDir,
|
|
10781
|
+
gitTracker,
|
|
10782
|
+
vectorDB,
|
|
10783
|
+
embeddings,
|
|
10784
|
+
log,
|
|
10785
|
+
reindexStateManager,
|
|
10786
|
+
checkAndReconnect
|
|
10787
|
+
);
|
|
10344
10788
|
} catch (error) {
|
|
10345
10789
|
log(`Failed to check git state on startup: ${error}`, "warning");
|
|
10346
10790
|
}
|
|
10347
10791
|
if (fileWatcher) {
|
|
10348
10792
|
const gitChangeHandler = createGitChangeHandler(
|
|
10793
|
+
rootDir,
|
|
10349
10794
|
gitTracker,
|
|
10350
10795
|
vectorDB,
|
|
10351
10796
|
embeddings,
|
|
10352
|
-
verbose,
|
|
10353
10797
|
log,
|
|
10354
|
-
reindexStateManager
|
|
10798
|
+
reindexStateManager,
|
|
10799
|
+
checkAndReconnect
|
|
10355
10800
|
);
|
|
10356
10801
|
fileWatcher.watchGit(gitChangeHandler);
|
|
10357
10802
|
log("\u2713 Git detection enabled (event-driven via file watcher)");
|
|
@@ -10359,185 +10804,98 @@ async function setupGitDetection(rootDir, vectorDB, embeddings, verbose, log, re
|
|
|
10359
10804
|
}
|
|
10360
10805
|
const pollIntervalSeconds = DEFAULT_GIT_POLL_INTERVAL_MS2 / 1e3;
|
|
10361
10806
|
log(`\u2713 Git detection enabled (polling fallback every ${pollIntervalSeconds}s)`);
|
|
10362
|
-
const gitPollInterval = createGitPollInterval(
|
|
10807
|
+
const gitPollInterval = createGitPollInterval(
|
|
10808
|
+
rootDir,
|
|
10809
|
+
gitTracker,
|
|
10810
|
+
vectorDB,
|
|
10811
|
+
embeddings,
|
|
10812
|
+
log,
|
|
10813
|
+
reindexStateManager,
|
|
10814
|
+
checkAndReconnect
|
|
10815
|
+
);
|
|
10363
10816
|
return { gitTracker, gitPollInterval };
|
|
10364
10817
|
}
|
|
10365
|
-
async function
|
|
10366
|
-
|
|
10367
|
-
const
|
|
10368
|
-
|
|
10369
|
-
|
|
10370
|
-
|
|
10371
|
-
|
|
10372
|
-
} catch (error) {
|
|
10373
|
-
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10374
|
-
throw error;
|
|
10375
|
-
}
|
|
10376
|
-
}
|
|
10377
|
-
async function handleSingleFileChange(filepath, type, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10378
|
-
const action = type === "add" ? "added" : "changed";
|
|
10379
|
-
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10380
|
-
if (type === "change") {
|
|
10381
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10382
|
-
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10818
|
+
async function filterGitChangedFiles(changedFiles, rootDir, ignoreFilter) {
|
|
10819
|
+
const results = [];
|
|
10820
|
+
for (const filepath of changedFiles) {
|
|
10821
|
+
if (!isFileIgnored(filepath, rootDir, ignoreFilter)) {
|
|
10822
|
+
results.push(filepath);
|
|
10823
|
+
continue;
|
|
10824
|
+
}
|
|
10383
10825
|
try {
|
|
10384
|
-
|
|
10385
|
-
return manifestData.files[normalizedPath];
|
|
10386
|
-
});
|
|
10387
|
-
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10388
|
-
if (!shouldReindex && newMtime && existingEntry) {
|
|
10389
|
-
const skipReindex = await manifest.transaction(async (manifestData) => {
|
|
10390
|
-
const entry = manifestData.files[normalizedPath];
|
|
10391
|
-
if (entry) {
|
|
10392
|
-
entry.lastModified = newMtime;
|
|
10393
|
-
return true;
|
|
10394
|
-
}
|
|
10395
|
-
return false;
|
|
10396
|
-
});
|
|
10397
|
-
if (skipReindex) {
|
|
10398
|
-
return;
|
|
10399
|
-
}
|
|
10400
|
-
}
|
|
10826
|
+
await fs4.access(filepath);
|
|
10401
10827
|
} catch (error) {
|
|
10402
|
-
|
|
10828
|
+
if (error.code === "ENOENT") {
|
|
10829
|
+
results.push(filepath);
|
|
10830
|
+
}
|
|
10403
10831
|
}
|
|
10404
10832
|
}
|
|
10405
|
-
|
|
10406
|
-
reindexStateManager.startReindex([filepath]);
|
|
10407
|
-
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
10408
|
-
try {
|
|
10409
|
-
await indexSingleFile(filepath, vectorDB, embeddings, { verbose: false, rootDir });
|
|
10410
|
-
const duration = Date.now() - startTime;
|
|
10411
|
-
reindexStateManager.completeReindex(duration);
|
|
10412
|
-
} catch (error) {
|
|
10413
|
-
reindexStateManager.failReindex();
|
|
10414
|
-
log(`Failed to reindex ${filepath}: ${error}`, "warning");
|
|
10415
|
-
}
|
|
10833
|
+
return results;
|
|
10416
10834
|
}
|
|
10417
|
-
|
|
10418
|
-
|
|
10419
|
-
|
|
10420
|
-
|
|
10421
|
-
|
|
10422
|
-
|
|
10423
|
-
|
|
10835
|
+
|
|
10836
|
+
// src/mcp/cleanup.ts
|
|
10837
|
+
function setupCleanupHandlers(server, versionCheckInterval, gitPollInterval, fileWatcher, log) {
|
|
10838
|
+
let cleaningUp = false;
|
|
10839
|
+
return async () => {
|
|
10840
|
+
if (cleaningUp) return;
|
|
10841
|
+
cleaningUp = true;
|
|
10424
10842
|
try {
|
|
10425
|
-
|
|
10426
|
-
|
|
10427
|
-
|
|
10428
|
-
|
|
10429
|
-
|
|
10430
|
-
}
|
|
10431
|
-
|
|
10432
|
-
return { shouldReindex: true };
|
|
10433
|
-
}
|
|
10434
|
-
async function filterModifiedFilesByHash(modifiedFiles, vectorDB, log) {
|
|
10435
|
-
if (modifiedFiles.length === 0) {
|
|
10436
|
-
return [];
|
|
10437
|
-
}
|
|
10438
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10439
|
-
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10440
|
-
const manifestData = await manifest.transaction(async (data) => data);
|
|
10441
|
-
if (!manifestData) {
|
|
10442
|
-
return modifiedFiles;
|
|
10443
|
-
}
|
|
10444
|
-
const checkResults = [];
|
|
10445
|
-
for (const filepath of modifiedFiles) {
|
|
10446
|
-
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10447
|
-
const existingEntry = manifestData.files[normalizedPath];
|
|
10448
|
-
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10449
|
-
checkResults.push({
|
|
10450
|
-
filepath,
|
|
10451
|
-
normalizedPath,
|
|
10452
|
-
shouldReindex,
|
|
10453
|
-
newMtime
|
|
10454
|
-
});
|
|
10455
|
-
}
|
|
10456
|
-
await manifest.transaction(async (data) => {
|
|
10457
|
-
for (const result of checkResults) {
|
|
10458
|
-
if (!result.shouldReindex && result.newMtime) {
|
|
10459
|
-
const entry = data.files[result.normalizedPath];
|
|
10460
|
-
if (entry) {
|
|
10461
|
-
entry.lastModified = result.newMtime;
|
|
10462
|
-
}
|
|
10463
|
-
}
|
|
10843
|
+
log("Shutting down MCP server...");
|
|
10844
|
+
await server.close();
|
|
10845
|
+
clearInterval(versionCheckInterval);
|
|
10846
|
+
if (gitPollInterval) clearInterval(gitPollInterval);
|
|
10847
|
+
if (fileWatcher) await fileWatcher.stop();
|
|
10848
|
+
} finally {
|
|
10849
|
+
process.exit(0);
|
|
10464
10850
|
}
|
|
10465
|
-
|
|
10466
|
-
});
|
|
10467
|
-
return checkResults.filter((r) => r.shouldReindex).map((r) => r.filepath);
|
|
10851
|
+
};
|
|
10468
10852
|
}
|
|
10469
|
-
|
|
10470
|
-
|
|
10471
|
-
|
|
10472
|
-
|
|
10473
|
-
|
|
10474
|
-
|
|
10475
|
-
|
|
10476
|
-
|
|
10477
|
-
|
|
10478
|
-
|
|
10479
|
-
}
|
|
10480
|
-
const filesToIndex = [...addedFiles, ...modifiedFilesToReindex];
|
|
10481
|
-
return { filesToIndex, deletedFiles };
|
|
10853
|
+
|
|
10854
|
+
// src/mcp/server.ts
|
|
10855
|
+
var __filename2 = fileURLToPath2(import.meta.url);
|
|
10856
|
+
var __dirname2 = dirname2(__filename2);
|
|
10857
|
+
var require3 = createRequire2(import.meta.url);
|
|
10858
|
+
var packageJson2;
|
|
10859
|
+
try {
|
|
10860
|
+
packageJson2 = require3(join2(__dirname2, "../package.json"));
|
|
10861
|
+
} catch {
|
|
10862
|
+
packageJson2 = require3(join2(__dirname2, "../../package.json"));
|
|
10482
10863
|
}
|
|
10483
|
-
async function
|
|
10484
|
-
const
|
|
10485
|
-
|
|
10486
|
-
|
|
10487
|
-
|
|
10864
|
+
async function initializeDatabase(rootDir, log) {
|
|
10865
|
+
const embeddings = new WorkerEmbeddings();
|
|
10866
|
+
log("Creating vector database...");
|
|
10867
|
+
const vectorDB = await createVectorDB(rootDir);
|
|
10868
|
+
if (!vectorDB) {
|
|
10869
|
+
throw new Error("createVectorDB returned undefined or null");
|
|
10488
10870
|
}
|
|
10489
|
-
if (
|
|
10490
|
-
|
|
10491
|
-
|
|
10492
|
-
deletedFiles.map((deleted) => handleFileDeletion(deleted, vectorDB, log))
|
|
10493
|
-
)
|
|
10871
|
+
if (typeof vectorDB.initialize !== "function") {
|
|
10872
|
+
throw new Error(
|
|
10873
|
+
`Invalid vectorDB instance: ${vectorDB.constructor?.name || "unknown"}. Expected VectorDBInterface but got: ${JSON.stringify(Object.keys(vectorDB))}`
|
|
10494
10874
|
);
|
|
10495
10875
|
}
|
|
10496
|
-
|
|
10497
|
-
|
|
10498
|
-
|
|
10499
|
-
|
|
10500
|
-
|
|
10501
|
-
|
|
10502
|
-
return;
|
|
10503
|
-
}
|
|
10504
|
-
const startTime = Date.now();
|
|
10505
|
-
reindexStateManager.startReindex(allFiles);
|
|
10506
|
-
try {
|
|
10507
|
-
await executeReindexOperations(filesToIndex, deletedFiles, vectorDB, embeddings, log);
|
|
10508
|
-
const duration = Date.now() - startTime;
|
|
10509
|
-
reindexStateManager.completeReindex(duration);
|
|
10510
|
-
log(`\u2713 Processed ${filesToIndex.length} file(s) + ${deletedFiles.length} deletion(s) in ${duration}ms`);
|
|
10511
|
-
} catch (error) {
|
|
10512
|
-
reindexStateManager.failReindex();
|
|
10513
|
-
log(`Batch reindex failed: ${error}`, "warning");
|
|
10514
|
-
}
|
|
10515
|
-
}
|
|
10516
|
-
async function handleUnlinkEvent(filepath, vectorDB, log, reindexStateManager) {
|
|
10517
|
-
const startTime = Date.now();
|
|
10518
|
-
reindexStateManager.startReindex([filepath]);
|
|
10519
|
-
try {
|
|
10520
|
-
await handleFileDeletion(filepath, vectorDB, log);
|
|
10521
|
-
const duration = Date.now() - startTime;
|
|
10522
|
-
reindexStateManager.completeReindex(duration);
|
|
10523
|
-
} catch (error) {
|
|
10524
|
-
reindexStateManager.failReindex();
|
|
10525
|
-
log(`Failed to process deletion for ${filepath}: ${error}`, "warning");
|
|
10526
|
-
}
|
|
10876
|
+
log("Loading embedding model...");
|
|
10877
|
+
await embeddings.initialize();
|
|
10878
|
+
log("Loading vector database...");
|
|
10879
|
+
await vectorDB.initialize();
|
|
10880
|
+
log("Embeddings and vector DB ready");
|
|
10881
|
+
return { embeddings, vectorDB };
|
|
10527
10882
|
}
|
|
10528
|
-
function
|
|
10529
|
-
|
|
10530
|
-
|
|
10531
|
-
|
|
10532
|
-
|
|
10533
|
-
|
|
10534
|
-
|
|
10535
|
-
|
|
10536
|
-
|
|
10883
|
+
async function handleAutoIndexing(vectorDB, rootDir, log) {
|
|
10884
|
+
const hasIndex = await vectorDB.hasData();
|
|
10885
|
+
if (!hasIndex) {
|
|
10886
|
+
log("\u{1F4E6} No index found - running initial indexing...");
|
|
10887
|
+
log("\u23F1\uFE0F This may take 5-20 minutes depending on project size");
|
|
10888
|
+
try {
|
|
10889
|
+
const { indexCodebase: indexCodebase2 } = await import("@liendev/core");
|
|
10890
|
+
await indexCodebase2({ rootDir, verbose: true });
|
|
10891
|
+
log("\u2705 Initial indexing complete!");
|
|
10892
|
+
} catch (error) {
|
|
10893
|
+
log(`\u26A0\uFE0F Initial indexing failed: ${error}`, "warning");
|
|
10894
|
+
log("You can manually run: lien index", "warning");
|
|
10537
10895
|
}
|
|
10538
|
-
}
|
|
10896
|
+
}
|
|
10539
10897
|
}
|
|
10540
|
-
async function setupFileWatching(watch, rootDir, vectorDB, embeddings,
|
|
10898
|
+
async function setupFileWatching(watch, rootDir, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10541
10899
|
const fileWatchingEnabled = watch !== void 0 ? watch : true;
|
|
10542
10900
|
if (!fileWatchingEnabled) {
|
|
10543
10901
|
return null;
|
|
@@ -10545,7 +10903,14 @@ async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose,
|
|
|
10545
10903
|
log("\u{1F440} Starting file watcher...");
|
|
10546
10904
|
const fileWatcher = new FileWatcher(rootDir);
|
|
10547
10905
|
try {
|
|
10548
|
-
const handler = createFileChangeHandler(
|
|
10906
|
+
const handler = createFileChangeHandler(
|
|
10907
|
+
rootDir,
|
|
10908
|
+
vectorDB,
|
|
10909
|
+
embeddings,
|
|
10910
|
+
log,
|
|
10911
|
+
reindexStateManager,
|
|
10912
|
+
checkAndReconnect
|
|
10913
|
+
);
|
|
10549
10914
|
await fileWatcher.start(handler);
|
|
10550
10915
|
log(`\u2713 File watching enabled (watching ${fileWatcher.getWatchedFiles().length} files)`);
|
|
10551
10916
|
return fileWatcher;
|
|
@@ -10556,23 +10921,11 @@ async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose,
|
|
|
10556
10921
|
}
|
|
10557
10922
|
function setupTransport(log) {
|
|
10558
10923
|
const transport = new StdioServerTransport();
|
|
10559
|
-
transport.onclose = () => {
|
|
10560
|
-
log("Transport closed");
|
|
10561
|
-
};
|
|
10562
10924
|
transport.onerror = (error) => {
|
|
10563
|
-
log(`Transport error: ${error}
|
|
10925
|
+
log(`Transport error: ${error}`, "warning");
|
|
10564
10926
|
};
|
|
10565
10927
|
return transport;
|
|
10566
10928
|
}
|
|
10567
|
-
function setupCleanupHandlers(versionCheckInterval, gitPollInterval, fileWatcher, log) {
|
|
10568
|
-
return async () => {
|
|
10569
|
-
log("Shutting down MCP server...");
|
|
10570
|
-
clearInterval(versionCheckInterval);
|
|
10571
|
-
if (gitPollInterval) clearInterval(gitPollInterval);
|
|
10572
|
-
if (fileWatcher) await fileWatcher.stop();
|
|
10573
|
-
process.exit(0);
|
|
10574
|
-
};
|
|
10575
|
-
}
|
|
10576
10929
|
function setupVersionChecking(vectorDB, log, reindexStateManager) {
|
|
10577
10930
|
const checkAndReconnect = async () => {
|
|
10578
10931
|
try {
|
|
@@ -10623,9 +10976,6 @@ function createMCPLog(server, verbose) {
|
|
|
10623
10976
|
async function initializeComponents(rootDir, earlyLog) {
|
|
10624
10977
|
try {
|
|
10625
10978
|
const result = await initializeDatabase(rootDir, earlyLog);
|
|
10626
|
-
if (!result.vectorDB || typeof result.vectorDB.initialize !== "function") {
|
|
10627
|
-
throw new Error(`Invalid vectorDB instance: ${result.vectorDB?.constructor?.name || "undefined"}. Missing initialize method.`);
|
|
10628
|
-
}
|
|
10629
10979
|
return result;
|
|
10630
10980
|
} catch (error) {
|
|
10631
10981
|
console.error(`Failed to initialize: ${error}`);
|
|
@@ -10643,13 +10993,35 @@ function createMCPServer() {
|
|
|
10643
10993
|
);
|
|
10644
10994
|
}
|
|
10645
10995
|
async function setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, options) {
|
|
10646
|
-
const { rootDir,
|
|
10996
|
+
const { rootDir, watch } = options;
|
|
10647
10997
|
const { vectorDB, embeddings } = toolContext;
|
|
10648
10998
|
registerMCPHandlers(server, toolContext, log);
|
|
10649
10999
|
await handleAutoIndexing(vectorDB, rootDir, log);
|
|
10650
|
-
const fileWatcher = await setupFileWatching(
|
|
10651
|
-
|
|
10652
|
-
|
|
11000
|
+
const fileWatcher = await setupFileWatching(
|
|
11001
|
+
watch,
|
|
11002
|
+
rootDir,
|
|
11003
|
+
vectorDB,
|
|
11004
|
+
embeddings,
|
|
11005
|
+
log,
|
|
11006
|
+
reindexStateManager,
|
|
11007
|
+
toolContext.checkAndReconnect
|
|
11008
|
+
);
|
|
11009
|
+
const { gitPollInterval } = await setupGitDetection(
|
|
11010
|
+
rootDir,
|
|
11011
|
+
vectorDB,
|
|
11012
|
+
embeddings,
|
|
11013
|
+
log,
|
|
11014
|
+
reindexStateManager,
|
|
11015
|
+
fileWatcher,
|
|
11016
|
+
toolContext.checkAndReconnect
|
|
11017
|
+
);
|
|
11018
|
+
const cleanup = setupCleanupHandlers(
|
|
11019
|
+
server,
|
|
11020
|
+
versionCheckInterval,
|
|
11021
|
+
gitPollInterval,
|
|
11022
|
+
fileWatcher,
|
|
11023
|
+
log
|
|
11024
|
+
);
|
|
10653
11025
|
process.on("SIGINT", cleanup);
|
|
10654
11026
|
process.on("SIGTERM", cleanup);
|
|
10655
11027
|
const transport = setupTransport(log);
|
|
@@ -10672,7 +11044,11 @@ async function startMCPServer(options) {
|
|
|
10672
11044
|
const server = createMCPServer();
|
|
10673
11045
|
const log = createMCPLog(server, verbose);
|
|
10674
11046
|
const reindexStateManager = createReindexStateManager();
|
|
10675
|
-
const {
|
|
11047
|
+
const {
|
|
11048
|
+
interval: versionCheckInterval,
|
|
11049
|
+
checkAndReconnect,
|
|
11050
|
+
getIndexMetadata
|
|
11051
|
+
} = setupVersionChecking(vectorDB, log, reindexStateManager);
|
|
10676
11052
|
const toolContext = {
|
|
10677
11053
|
vectorDB,
|
|
10678
11054
|
embeddings,
|
|
@@ -10682,41 +11058,45 @@ async function startMCPServer(options) {
|
|
|
10682
11058
|
getIndexMetadata,
|
|
10683
11059
|
getReindexState: () => reindexStateManager.getState()
|
|
10684
11060
|
};
|
|
10685
|
-
await setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, {
|
|
11061
|
+
await setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, {
|
|
11062
|
+
rootDir,
|
|
11063
|
+
watch
|
|
11064
|
+
});
|
|
10686
11065
|
}
|
|
10687
11066
|
|
|
10688
11067
|
// src/cli/serve.ts
|
|
11068
|
+
init_banner();
|
|
10689
11069
|
async function serveCommand(options) {
|
|
10690
11070
|
const rootDir = options.root ? path4.resolve(options.root) : process.cwd();
|
|
10691
11071
|
try {
|
|
10692
11072
|
if (options.root) {
|
|
10693
11073
|
try {
|
|
10694
|
-
const stats = await
|
|
11074
|
+
const stats = await fs5.stat(rootDir);
|
|
10695
11075
|
if (!stats.isDirectory()) {
|
|
10696
|
-
console.error(
|
|
11076
|
+
console.error(chalk6.red(`Error: --root path is not a directory: ${rootDir}`));
|
|
10697
11077
|
process.exit(1);
|
|
10698
11078
|
}
|
|
10699
11079
|
} catch (error) {
|
|
10700
11080
|
if (error.code === "ENOENT") {
|
|
10701
|
-
console.error(
|
|
11081
|
+
console.error(chalk6.red(`Error: --root directory does not exist: ${rootDir}`));
|
|
10702
11082
|
} else if (error.code === "EACCES") {
|
|
10703
|
-
console.error(
|
|
11083
|
+
console.error(chalk6.red(`Error: --root directory is not accessible: ${rootDir}`));
|
|
10704
11084
|
} else {
|
|
10705
|
-
console.error(
|
|
10706
|
-
console.error(
|
|
11085
|
+
console.error(chalk6.red(`Error: Failed to access --root directory: ${rootDir}`));
|
|
11086
|
+
console.error(chalk6.dim(error.message));
|
|
10707
11087
|
}
|
|
10708
11088
|
process.exit(1);
|
|
10709
11089
|
}
|
|
10710
11090
|
}
|
|
10711
11091
|
showBanner();
|
|
10712
|
-
console.error(
|
|
11092
|
+
console.error(chalk6.bold("Starting MCP server...\n"));
|
|
10713
11093
|
if (options.root) {
|
|
10714
|
-
console.error(
|
|
11094
|
+
console.error(chalk6.dim(`Serving from: ${rootDir}
|
|
10715
11095
|
`));
|
|
10716
11096
|
}
|
|
10717
11097
|
if (options.watch) {
|
|
10718
|
-
console.error(
|
|
10719
|
-
console.error(
|
|
11098
|
+
console.error(chalk6.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
|
|
11099
|
+
console.error(chalk6.dim(" Use --no-watch to disable file watching\n"));
|
|
10720
11100
|
}
|
|
10721
11101
|
const watch = options.noWatch ? false : options.watch ? true : void 0;
|
|
10722
11102
|
await startMCPServer({
|
|
@@ -10725,14 +11105,14 @@ async function serveCommand(options) {
|
|
|
10725
11105
|
watch
|
|
10726
11106
|
});
|
|
10727
11107
|
} catch (error) {
|
|
10728
|
-
console.error(
|
|
11108
|
+
console.error(chalk6.red("Failed to start MCP server:"), error);
|
|
10729
11109
|
process.exit(1);
|
|
10730
11110
|
}
|
|
10731
11111
|
}
|
|
10732
11112
|
|
|
10733
11113
|
// src/cli/complexity.ts
|
|
10734
|
-
import
|
|
10735
|
-
import
|
|
11114
|
+
import chalk7 from "chalk";
|
|
11115
|
+
import fs6 from "fs";
|
|
10736
11116
|
import path5 from "path";
|
|
10737
11117
|
import { VectorDB } from "@liendev/core";
|
|
10738
11118
|
import { ComplexityAnalyzer as ComplexityAnalyzer2 } from "@liendev/core";
|
|
@@ -10741,13 +11121,17 @@ var VALID_FAIL_ON = ["error", "warning"];
|
|
|
10741
11121
|
var VALID_FORMATS = ["text", "json", "sarif"];
|
|
10742
11122
|
function validateFailOn(failOn) {
|
|
10743
11123
|
if (failOn && !VALID_FAIL_ON.includes(failOn)) {
|
|
10744
|
-
console.error(
|
|
11124
|
+
console.error(
|
|
11125
|
+
chalk7.red(`Error: Invalid --fail-on value "${failOn}". Must be either 'error' or 'warning'`)
|
|
11126
|
+
);
|
|
10745
11127
|
process.exit(1);
|
|
10746
11128
|
}
|
|
10747
11129
|
}
|
|
10748
11130
|
function validateFormat(format) {
|
|
10749
11131
|
if (!VALID_FORMATS.includes(format)) {
|
|
10750
|
-
console.error(
|
|
11132
|
+
console.error(
|
|
11133
|
+
chalk7.red(`Error: Invalid --format value "${format}". Must be one of: text, json, sarif`)
|
|
11134
|
+
);
|
|
10751
11135
|
process.exit(1);
|
|
10752
11136
|
}
|
|
10753
11137
|
}
|
|
@@ -10755,11 +11139,11 @@ function validateFilesExist(files, rootDir) {
|
|
|
10755
11139
|
if (!files || files.length === 0) return;
|
|
10756
11140
|
const missingFiles = files.filter((file) => {
|
|
10757
11141
|
const fullPath = path5.isAbsolute(file) ? file : path5.join(rootDir, file);
|
|
10758
|
-
return !
|
|
11142
|
+
return !fs6.existsSync(fullPath);
|
|
10759
11143
|
});
|
|
10760
11144
|
if (missingFiles.length > 0) {
|
|
10761
|
-
console.error(
|
|
10762
|
-
missingFiles.forEach((file) => console.error(
|
|
11145
|
+
console.error(chalk7.red(`Error: File${missingFiles.length > 1 ? "s" : ""} not found:`));
|
|
11146
|
+
missingFiles.forEach((file) => console.error(chalk7.red(` - ${file}`)));
|
|
10763
11147
|
process.exit(1);
|
|
10764
11148
|
}
|
|
10765
11149
|
}
|
|
@@ -10767,8 +11151,12 @@ async function ensureIndexExists(vectorDB) {
|
|
|
10767
11151
|
try {
|
|
10768
11152
|
await vectorDB.scanWithFilter({ limit: 1 });
|
|
10769
11153
|
} catch {
|
|
10770
|
-
console.error(
|
|
10771
|
-
console.log(
|
|
11154
|
+
console.error(chalk7.red("Error: Index not found"));
|
|
11155
|
+
console.log(
|
|
11156
|
+
chalk7.yellow("\nRun"),
|
|
11157
|
+
chalk7.bold("lien index"),
|
|
11158
|
+
chalk7.yellow("to index your codebase first")
|
|
11159
|
+
);
|
|
10772
11160
|
process.exit(1);
|
|
10773
11161
|
}
|
|
10774
11162
|
}
|
|
@@ -10778,10 +11166,6 @@ async function complexityCommand(options) {
|
|
|
10778
11166
|
validateFailOn(options.failOn);
|
|
10779
11167
|
validateFormat(options.format);
|
|
10780
11168
|
validateFilesExist(options.files, rootDir);
|
|
10781
|
-
if (options.threshold || options.cyclomaticThreshold || options.cognitiveThreshold) {
|
|
10782
|
-
console.warn(chalk6.yellow("Warning: Threshold overrides via CLI flags are not supported."));
|
|
10783
|
-
console.warn(chalk6.yellow("Use the MCP tool with threshold parameter for custom thresholds."));
|
|
10784
|
-
}
|
|
10785
11169
|
const vectorDB = new VectorDB(rootDir);
|
|
10786
11170
|
await vectorDB.initialize();
|
|
10787
11171
|
await ensureIndexExists(vectorDB);
|
|
@@ -10793,11 +11177,102 @@ async function complexityCommand(options) {
|
|
|
10793
11177
|
if (hasViolations) process.exit(1);
|
|
10794
11178
|
}
|
|
10795
11179
|
} catch (error) {
|
|
10796
|
-
console.error(
|
|
11180
|
+
console.error(chalk7.red("Error analyzing complexity:"), error);
|
|
10797
11181
|
process.exit(1);
|
|
10798
11182
|
}
|
|
10799
11183
|
}
|
|
10800
11184
|
|
|
11185
|
+
// src/cli/config.ts
|
|
11186
|
+
import chalk8 from "chalk";
|
|
11187
|
+
import path6 from "path";
|
|
11188
|
+
import os2 from "os";
|
|
11189
|
+
import { loadGlobalConfig, mergeGlobalConfig } from "@liendev/core";
|
|
11190
|
+
var CONFIG_PATH = path6.join(os2.homedir(), ".lien", "config.json");
|
|
11191
|
+
var ALLOWED_KEYS = {
|
|
11192
|
+
backend: {
|
|
11193
|
+
values: ["lancedb", "qdrant"],
|
|
11194
|
+
description: "Vector database backend"
|
|
11195
|
+
},
|
|
11196
|
+
"qdrant.url": {
|
|
11197
|
+
values: [],
|
|
11198
|
+
description: "Qdrant server URL"
|
|
11199
|
+
},
|
|
11200
|
+
"qdrant.apiKey": {
|
|
11201
|
+
values: [],
|
|
11202
|
+
description: "Qdrant API key"
|
|
11203
|
+
}
|
|
11204
|
+
};
|
|
11205
|
+
function getConfigValue(config, key) {
|
|
11206
|
+
const parts = key.split(".");
|
|
11207
|
+
let current = config;
|
|
11208
|
+
for (const part of parts) {
|
|
11209
|
+
if (current == null || typeof current !== "object") return void 0;
|
|
11210
|
+
current = current[part];
|
|
11211
|
+
}
|
|
11212
|
+
return current == null ? void 0 : String(current);
|
|
11213
|
+
}
|
|
11214
|
+
function buildPartialConfig(key, value) {
|
|
11215
|
+
switch (key) {
|
|
11216
|
+
case "backend":
|
|
11217
|
+
return { backend: value };
|
|
11218
|
+
case "qdrant.url":
|
|
11219
|
+
return { qdrant: { url: value } };
|
|
11220
|
+
case "qdrant.apiKey":
|
|
11221
|
+
return { qdrant: { url: "", apiKey: value } };
|
|
11222
|
+
default:
|
|
11223
|
+
return {};
|
|
11224
|
+
}
|
|
11225
|
+
}
|
|
11226
|
+
async function configSetCommand(key, value) {
|
|
11227
|
+
const allowed = ALLOWED_KEYS[key];
|
|
11228
|
+
if (!allowed) {
|
|
11229
|
+
console.error(chalk8.red(`Unknown config key: "${key}"`));
|
|
11230
|
+
console.log(chalk8.dim("Valid keys:"), Object.keys(ALLOWED_KEYS).join(", "));
|
|
11231
|
+
process.exit(1);
|
|
11232
|
+
}
|
|
11233
|
+
if (allowed.values.length > 0 && !allowed.values.includes(value)) {
|
|
11234
|
+
console.error(chalk8.red(`Invalid value "${value}" for ${key}`));
|
|
11235
|
+
console.log(chalk8.dim("Valid values:"), allowed.values.join(", "));
|
|
11236
|
+
process.exit(1);
|
|
11237
|
+
}
|
|
11238
|
+
if (key === "qdrant.apiKey") {
|
|
11239
|
+
const existing = await loadGlobalConfig();
|
|
11240
|
+
if (!existing.qdrant?.url) {
|
|
11241
|
+
console.error(chalk8.red("Set qdrant.url first before setting qdrant.apiKey"));
|
|
11242
|
+
process.exit(1);
|
|
11243
|
+
}
|
|
11244
|
+
}
|
|
11245
|
+
const partial = buildPartialConfig(key, value);
|
|
11246
|
+
await mergeGlobalConfig(partial);
|
|
11247
|
+
console.log(chalk8.green(`Set ${key} = ${value}`));
|
|
11248
|
+
console.log(chalk8.dim(`Config: ${CONFIG_PATH}`));
|
|
11249
|
+
}
|
|
11250
|
+
async function configGetCommand(key) {
|
|
11251
|
+
if (!ALLOWED_KEYS[key]) {
|
|
11252
|
+
console.error(chalk8.red(`Unknown config key: "${key}"`));
|
|
11253
|
+
console.log(chalk8.dim("Valid keys:"), Object.keys(ALLOWED_KEYS).join(", "));
|
|
11254
|
+
process.exit(1);
|
|
11255
|
+
}
|
|
11256
|
+
const config = await loadGlobalConfig();
|
|
11257
|
+
const value = getConfigValue(config, key);
|
|
11258
|
+
if (value === void 0) {
|
|
11259
|
+
console.log(chalk8.dim(`${key}: (not set)`));
|
|
11260
|
+
} else {
|
|
11261
|
+
console.log(`${key}: ${value}`);
|
|
11262
|
+
}
|
|
11263
|
+
}
|
|
11264
|
+
async function configListCommand() {
|
|
11265
|
+
const config = await loadGlobalConfig();
|
|
11266
|
+
console.log(chalk8.bold("Global Configuration"));
|
|
11267
|
+
console.log(chalk8.dim(`File: ${CONFIG_PATH}
|
|
11268
|
+
`));
|
|
11269
|
+
for (const [key, meta] of Object.entries(ALLOWED_KEYS)) {
|
|
11270
|
+
const value = getConfigValue(config, key);
|
|
11271
|
+
const display = value ?? chalk8.dim("(not set)");
|
|
11272
|
+
console.log(` ${chalk8.cyan(key)}: ${display} ${chalk8.dim(`\u2014 ${meta.description}`)}`);
|
|
11273
|
+
}
|
|
11274
|
+
}
|
|
11275
|
+
|
|
10801
11276
|
// src/cli/index.ts
|
|
10802
11277
|
var __filename3 = fileURLToPath3(import.meta.url);
|
|
10803
11278
|
var __dirname3 = dirname3(__filename3);
|
|
@@ -10811,10 +11286,18 @@ try {
|
|
|
10811
11286
|
var program = new Command();
|
|
10812
11287
|
program.name("lien").description("Local semantic code search for AI assistants via MCP").version(packageJson3.version);
|
|
10813
11288
|
program.command("init").description("Initialize Lien in the current directory").option("-u, --upgrade", "Upgrade existing config with new options").option("-y, --yes", "Skip interactive prompts and use defaults").option("-p, --path <path>", "Path to initialize (defaults to current directory)").action(initCommand);
|
|
10814
|
-
program.command("index").description("Index the codebase for semantic search").option("-f, --force", "Force full reindex (skip incremental)").option("-
|
|
10815
|
-
program.command("serve").description(
|
|
11289
|
+
program.command("index").description("Index the codebase for semantic search").option("-f, --force", "Force full reindex (skip incremental)").option("-v, --verbose", "Show detailed logging during indexing").action(indexCommand);
|
|
11290
|
+
program.command("serve").description(
|
|
11291
|
+
"Start the MCP server (works with Cursor, Claude Code, Windsurf, and any MCP client)"
|
|
11292
|
+
).option("-p, --port <port>", "Port number (for future use)", "7133").option("--no-watch", "Disable file watching for this session").option("-w, --watch", "[DEPRECATED] File watching is now enabled by default").option("-r, --root <path>", "Root directory to serve (defaults to current directory)").action(serveCommand);
|
|
10816
11293
|
program.command("status").description("Show indexing status and statistics").action(statusCommand);
|
|
10817
|
-
program.command("complexity").description("Analyze code complexity").option("--files <paths...>", "Specific files to analyze").option("--format <type>", "Output format: text, json, sarif", "text").option("--
|
|
11294
|
+
program.command("complexity").description("Analyze code complexity").option("--files <paths...>", "Specific files to analyze").option("--format <type>", "Output format: text, json, sarif", "text").option("--fail-on <severity>", "Exit 1 if violations: error, warning").action(complexityCommand);
|
|
11295
|
+
var configCmd = program.command("config").description("Manage global configuration (~/.lien/config.json)");
|
|
11296
|
+
configCmd.command("set <key> <value>").description("Set a global config value").action(configSetCommand);
|
|
11297
|
+
configCmd.command("get <key>").description("Get a config value").action(configGetCommand);
|
|
11298
|
+
configCmd.command("list").description("Show all current config").action(configListCommand);
|
|
11299
|
+
program.addHelpText("beforeAll", `Quick start: run 'lien serve' in your project directory
|
|
11300
|
+
`);
|
|
10818
11301
|
|
|
10819
11302
|
// src/index.ts
|
|
10820
11303
|
program.parse();
|