@liendev/lien 0.34.0 → 0.35.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +812 -446
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -3984,7 +3984,7 @@ async function indexCommand(options) {
|
|
|
3984
3984
|
|
|
3985
3985
|
// src/cli/serve.ts
|
|
3986
3986
|
import chalk5 from "chalk";
|
|
3987
|
-
import
|
|
3987
|
+
import fs5 from "fs/promises";
|
|
3988
3988
|
import path4 from "path";
|
|
3989
3989
|
|
|
3990
3990
|
// src/mcp/server.ts
|
|
@@ -3992,26 +3992,17 @@ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
|
3992
3992
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3993
3993
|
import { createRequire as createRequire2 } from "module";
|
|
3994
3994
|
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
3995
|
-
import { dirname as dirname2, join as join2
|
|
3995
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
3996
3996
|
import {
|
|
3997
|
-
|
|
3998
|
-
GitStateTracker,
|
|
3999
|
-
indexMultipleFiles,
|
|
4000
|
-
indexSingleFile,
|
|
4001
|
-
ManifestManager,
|
|
4002
|
-
isGitAvailable,
|
|
4003
|
-
isGitRepo as isGitRepo2,
|
|
3997
|
+
WorkerEmbeddings,
|
|
4004
3998
|
VERSION_CHECK_INTERVAL_MS,
|
|
4005
|
-
|
|
4006
|
-
createVectorDB,
|
|
4007
|
-
computeContentHash,
|
|
4008
|
-
normalizeToRelativePath
|
|
3999
|
+
createVectorDB
|
|
4009
4000
|
} from "@liendev/core";
|
|
4010
4001
|
|
|
4011
4002
|
// src/watcher/index.ts
|
|
4012
4003
|
import chokidar from "chokidar";
|
|
4013
4004
|
import path3 from "path";
|
|
4014
|
-
import {
|
|
4005
|
+
import { detectEcosystems, getEcosystemExcludePatterns, ALWAYS_IGNORE_PATTERNS } from "@liendev/core";
|
|
4015
4006
|
var FileWatcher = class {
|
|
4016
4007
|
watcher = null;
|
|
4017
4008
|
rootDir;
|
|
@@ -4028,6 +4019,7 @@ var FileWatcher = class {
|
|
|
4028
4019
|
firstChangeTimestamp = null;
|
|
4029
4020
|
// Track when batch started
|
|
4030
4021
|
// Git watching state
|
|
4022
|
+
gitWatcher = null;
|
|
4031
4023
|
gitChangeTimer = null;
|
|
4032
4024
|
gitChangeHandler = null;
|
|
4033
4025
|
GIT_DEBOUNCE_MS = 1e3;
|
|
@@ -4036,38 +4028,15 @@ var FileWatcher = class {
|
|
|
4036
4028
|
this.rootDir = rootDir;
|
|
4037
4029
|
}
|
|
4038
4030
|
/**
|
|
4039
|
-
* Detect watch patterns from
|
|
4031
|
+
* Detect watch patterns from ecosystem presets or use defaults.
|
|
4040
4032
|
*/
|
|
4041
4033
|
async getWatchPatterns() {
|
|
4042
4034
|
try {
|
|
4043
|
-
const
|
|
4044
|
-
|
|
4045
|
-
|
|
4046
|
-
|
|
4047
|
-
|
|
4048
|
-
if (!detector) {
|
|
4049
|
-
return null;
|
|
4050
|
-
}
|
|
4051
|
-
const config = await detector.generateConfig(this.rootDir, detection.path);
|
|
4052
|
-
return {
|
|
4053
|
-
name: detection.name,
|
|
4054
|
-
path: detection.path,
|
|
4055
|
-
enabled: true,
|
|
4056
|
-
config
|
|
4057
|
-
};
|
|
4058
|
-
})
|
|
4059
|
-
);
|
|
4060
|
-
const validFrameworks = frameworks.filter((f) => f !== null);
|
|
4061
|
-
const includePatterns = validFrameworks.flatMap((f) => f.config.include);
|
|
4062
|
-
const excludePatterns = validFrameworks.flatMap((f) => f.config.exclude);
|
|
4063
|
-
if (includePatterns.length === 0) {
|
|
4064
|
-
return this.getDefaultPatterns();
|
|
4065
|
-
}
|
|
4066
|
-
return { include: includePatterns, exclude: excludePatterns };
|
|
4067
|
-
} else {
|
|
4068
|
-
return this.getDefaultPatterns();
|
|
4069
|
-
}
|
|
4070
|
-
} catch (error) {
|
|
4035
|
+
const ecosystems = await detectEcosystems(this.rootDir);
|
|
4036
|
+
const ecosystemExcludes = getEcosystemExcludePatterns(ecosystems);
|
|
4037
|
+
const mergedExcludes = [.../* @__PURE__ */ new Set([...ALWAYS_IGNORE_PATTERNS, ...ecosystemExcludes])];
|
|
4038
|
+
return { include: ["**/*"], exclude: mergedExcludes };
|
|
4039
|
+
} catch {
|
|
4071
4040
|
return this.getDefaultPatterns();
|
|
4072
4041
|
}
|
|
4073
4042
|
}
|
|
@@ -4077,13 +4046,7 @@ var FileWatcher = class {
|
|
|
4077
4046
|
getDefaultPatterns() {
|
|
4078
4047
|
return {
|
|
4079
4048
|
include: ["**/*"],
|
|
4080
|
-
exclude: [
|
|
4081
|
-
"**/node_modules/**",
|
|
4082
|
-
"**/vendor/**",
|
|
4083
|
-
"**/dist/**",
|
|
4084
|
-
"**/build/**",
|
|
4085
|
-
"**/.git/**"
|
|
4086
|
-
]
|
|
4049
|
+
exclude: [...ALWAYS_IGNORE_PATTERNS]
|
|
4087
4050
|
};
|
|
4088
4051
|
}
|
|
4089
4052
|
/**
|
|
@@ -4134,17 +4097,17 @@ var FileWatcher = class {
|
|
|
4134
4097
|
}
|
|
4135
4098
|
let readyFired = false;
|
|
4136
4099
|
await Promise.race([
|
|
4137
|
-
new Promise((
|
|
4100
|
+
new Promise((resolve) => {
|
|
4138
4101
|
const readyHandler = () => {
|
|
4139
4102
|
readyFired = true;
|
|
4140
|
-
|
|
4103
|
+
resolve();
|
|
4141
4104
|
};
|
|
4142
4105
|
this.watcher.once("ready", readyHandler);
|
|
4143
4106
|
}),
|
|
4144
|
-
new Promise((
|
|
4107
|
+
new Promise((resolve) => {
|
|
4145
4108
|
setTimeout(() => {
|
|
4146
4109
|
if (!readyFired) {
|
|
4147
|
-
|
|
4110
|
+
resolve();
|
|
4148
4111
|
}
|
|
4149
4112
|
}, 1e3);
|
|
4150
4113
|
})
|
|
@@ -4175,29 +4138,47 @@ var FileWatcher = class {
|
|
|
4175
4138
|
if (!this.watcher) {
|
|
4176
4139
|
throw new Error("Cannot watch git - watcher not started");
|
|
4177
4140
|
}
|
|
4141
|
+
if (this.gitWatcher) {
|
|
4142
|
+
void this.gitWatcher.close().catch(() => {
|
|
4143
|
+
});
|
|
4144
|
+
this.gitWatcher = null;
|
|
4145
|
+
}
|
|
4178
4146
|
this.gitChangeHandler = onGitChange;
|
|
4179
|
-
|
|
4180
|
-
|
|
4181
|
-
|
|
4182
|
-
|
|
4183
|
-
|
|
4184
|
-
|
|
4185
|
-
|
|
4186
|
-
|
|
4187
|
-
|
|
4188
|
-
|
|
4147
|
+
const gitPaths = [
|
|
4148
|
+
"HEAD",
|
|
4149
|
+
"index",
|
|
4150
|
+
"refs/**",
|
|
4151
|
+
"MERGE_HEAD",
|
|
4152
|
+
"REBASE_HEAD",
|
|
4153
|
+
"CHERRY_PICK_HEAD",
|
|
4154
|
+
"logs/refs/stash"
|
|
4155
|
+
].map((p) => path3.join(this.rootDir, ".git", p).replace(/\\/g, "/"));
|
|
4156
|
+
this.gitWatcher = chokidar.watch(gitPaths, {
|
|
4157
|
+
persistent: true,
|
|
4158
|
+
ignoreInitial: true
|
|
4159
|
+
});
|
|
4160
|
+
this.gitWatcher.on("add", () => this.handleGitChange()).on("change", () => this.handleGitChange()).on("unlink", () => this.handleGitChange()).on("error", (error) => {
|
|
4161
|
+
try {
|
|
4162
|
+
const message = "[FileWatcher] Git watcher error: " + (error instanceof Error ? error.stack || error.message : String(error)) + "\n";
|
|
4163
|
+
process.stderr.write(message);
|
|
4164
|
+
} catch {
|
|
4165
|
+
}
|
|
4166
|
+
});
|
|
4189
4167
|
}
|
|
4190
4168
|
/**
|
|
4191
4169
|
* Check if a filepath is a git-related change
|
|
4192
4170
|
*/
|
|
4193
4171
|
isGitChange(filepath) {
|
|
4194
4172
|
const normalized = filepath.replace(/\\/g, "/");
|
|
4195
|
-
return normalized.
|
|
4173
|
+
return normalized.startsWith(".git/") || normalized.includes("/.git/");
|
|
4196
4174
|
}
|
|
4197
4175
|
/**
|
|
4198
4176
|
* Handle git-related file changes with debouncing
|
|
4199
4177
|
*/
|
|
4200
4178
|
handleGitChange() {
|
|
4179
|
+
if (!this.gitChangeHandler) {
|
|
4180
|
+
return;
|
|
4181
|
+
}
|
|
4201
4182
|
if (this.gitChangeTimer) {
|
|
4202
4183
|
clearTimeout(this.gitChangeTimer);
|
|
4203
4184
|
}
|
|
@@ -4218,8 +4199,7 @@ var FileWatcher = class {
|
|
|
4218
4199
|
* before starting a new batch to prevent race conditions.
|
|
4219
4200
|
*/
|
|
4220
4201
|
handleChange(type, filepath) {
|
|
4221
|
-
if (this.
|
|
4222
|
-
this.handleGitChange();
|
|
4202
|
+
if (this.isGitChange(filepath)) {
|
|
4223
4203
|
return;
|
|
4224
4204
|
}
|
|
4225
4205
|
if (!this.onChangeHandler) {
|
|
@@ -4368,6 +4348,13 @@ var FileWatcher = class {
|
|
|
4368
4348
|
if (!this.watcher) {
|
|
4369
4349
|
return;
|
|
4370
4350
|
}
|
|
4351
|
+
if (this.gitWatcher) {
|
|
4352
|
+
try {
|
|
4353
|
+
await this.gitWatcher.close();
|
|
4354
|
+
} catch {
|
|
4355
|
+
}
|
|
4356
|
+
this.gitWatcher = null;
|
|
4357
|
+
}
|
|
4371
4358
|
const handler = this.onChangeHandler;
|
|
4372
4359
|
this.onChangeHandler = null;
|
|
4373
4360
|
this.gitChangeHandler = null;
|
|
@@ -4376,7 +4363,7 @@ var FileWatcher = class {
|
|
|
4376
4363
|
this.gitChangeTimer = null;
|
|
4377
4364
|
}
|
|
4378
4365
|
while (this.batchInProgress) {
|
|
4379
|
-
await new Promise((
|
|
4366
|
+
await new Promise((resolve) => setTimeout(resolve, 50));
|
|
4380
4367
|
}
|
|
4381
4368
|
if (this.batchTimer) {
|
|
4382
4369
|
clearTimeout(this.batchTimer);
|
|
@@ -4909,8 +4896,8 @@ function getErrorMap() {
|
|
|
4909
4896
|
|
|
4910
4897
|
// ../../node_modules/zod/v3/helpers/parseUtil.js
|
|
4911
4898
|
var makeIssue = (params) => {
|
|
4912
|
-
const { data, path:
|
|
4913
|
-
const fullPath = [...
|
|
4899
|
+
const { data, path: path7, errorMaps, issueData } = params;
|
|
4900
|
+
const fullPath = [...path7, ...issueData.path || []];
|
|
4914
4901
|
const fullIssue = {
|
|
4915
4902
|
...issueData,
|
|
4916
4903
|
path: fullPath
|
|
@@ -5026,11 +5013,11 @@ var errorUtil;
|
|
|
5026
5013
|
|
|
5027
5014
|
// ../../node_modules/zod/v3/types.js
|
|
5028
5015
|
var ParseInputLazyPath = class {
|
|
5029
|
-
constructor(parent, value,
|
|
5016
|
+
constructor(parent, value, path7, key) {
|
|
5030
5017
|
this._cachedPath = [];
|
|
5031
5018
|
this.parent = parent;
|
|
5032
5019
|
this.data = value;
|
|
5033
|
-
this._path =
|
|
5020
|
+
this._path = path7;
|
|
5034
5021
|
this._key = key;
|
|
5035
5022
|
}
|
|
5036
5023
|
get path() {
|
|
@@ -8744,30 +8731,33 @@ function applyResponseBudget(result, maxChars = MAX_RESPONSE_CHARS) {
|
|
|
8744
8731
|
if (arrays.length === 0) {
|
|
8745
8732
|
return { result };
|
|
8746
8733
|
}
|
|
8747
|
-
|
|
8748
|
-
|
|
8749
|
-
|
|
8750
|
-
|
|
8734
|
+
const originalItemCount = arrays.reduce((sum, arr) => sum + arr.length, 0);
|
|
8735
|
+
truncateArrays(arrays, 10);
|
|
8736
|
+
if (measureSize(cloned) <= maxChars) {
|
|
8737
|
+
return buildResult(cloned, originalChars, 1, arrays, originalItemCount);
|
|
8751
8738
|
}
|
|
8739
|
+
dropArrayItems(arrays, cloned, maxChars);
|
|
8752
8740
|
if (measureSize(cloned) <= maxChars) {
|
|
8753
|
-
return buildResult(cloned, originalChars,
|
|
8741
|
+
return buildResult(cloned, originalChars, 2, arrays, originalItemCount);
|
|
8754
8742
|
}
|
|
8755
|
-
|
|
8743
|
+
truncateArrays(arrays, 3);
|
|
8744
|
+
return buildResult(cloned, originalChars, 3, arrays, originalItemCount);
|
|
8745
|
+
}
|
|
8746
|
+
function truncateArrays(arrays, maxLines) {
|
|
8756
8747
|
for (const arr of arrays) {
|
|
8757
|
-
|
|
8758
|
-
|
|
8759
|
-
currentSize = measureSize(cloned);
|
|
8748
|
+
for (const item of arr) {
|
|
8749
|
+
item.content = truncateContent(item.content, maxLines);
|
|
8760
8750
|
}
|
|
8761
8751
|
}
|
|
8762
|
-
|
|
8763
|
-
|
|
8764
|
-
|
|
8752
|
+
}
|
|
8753
|
+
function dropArrayItems(arrays, root, maxChars) {
|
|
8754
|
+
let currentSize = measureSize(root);
|
|
8765
8755
|
for (const arr of arrays) {
|
|
8766
|
-
|
|
8767
|
-
|
|
8756
|
+
while (arr.length > 1 && currentSize > maxChars) {
|
|
8757
|
+
arr.pop();
|
|
8758
|
+
currentSize = measureSize(root);
|
|
8768
8759
|
}
|
|
8769
8760
|
}
|
|
8770
|
-
return buildResult(cloned, originalChars, 3);
|
|
8771
8761
|
}
|
|
8772
8762
|
function truncateContent(content, maxLines) {
|
|
8773
8763
|
const lines = content.split("\n");
|
|
@@ -8796,15 +8786,19 @@ function walk(node, found) {
|
|
|
8796
8786
|
walk(value, found);
|
|
8797
8787
|
}
|
|
8798
8788
|
}
|
|
8799
|
-
function buildResult(cloned, originalChars, phase) {
|
|
8789
|
+
function buildResult(cloned, originalChars, phase, arrays, originalItemCount) {
|
|
8800
8790
|
const finalChars = measureSize(cloned);
|
|
8791
|
+
const finalItemCount = arrays.reduce((sum, arr) => sum + arr.length, 0);
|
|
8792
|
+
const message = finalItemCount < originalItemCount ? `Showing ${finalItemCount} of ${originalItemCount} results (truncated). Use narrower filters or smaller limit for complete results.` : `Showing all ${finalItemCount} results (content trimmed to fit). Use narrower filters or smaller limit for complete results.`;
|
|
8801
8793
|
return {
|
|
8802
8794
|
result: cloned,
|
|
8803
8795
|
truncation: {
|
|
8804
8796
|
originalChars,
|
|
8805
8797
|
finalChars,
|
|
8798
|
+
originalItemCount,
|
|
8799
|
+
finalItemCount,
|
|
8806
8800
|
phase,
|
|
8807
|
-
message
|
|
8801
|
+
message
|
|
8808
8802
|
}
|
|
8809
8803
|
};
|
|
8810
8804
|
}
|
|
@@ -9106,9 +9100,21 @@ async function handleFindSimilar(args, ctx) {
|
|
|
9106
9100
|
}
|
|
9107
9101
|
|
|
9108
9102
|
// src/mcp/utils/path-matching.ts
|
|
9109
|
-
|
|
9110
|
-
|
|
9111
|
-
|
|
9103
|
+
import { getSupportedExtensions } from "@liendev/core";
|
|
9104
|
+
function escapeRegex(str) {
|
|
9105
|
+
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
9106
|
+
}
|
|
9107
|
+
var extensionRegex = null;
|
|
9108
|
+
function getExtensionRegex() {
|
|
9109
|
+
if (!extensionRegex) {
|
|
9110
|
+
const extPattern = getSupportedExtensions().map(escapeRegex).join("|");
|
|
9111
|
+
extensionRegex = new RegExp(`\\.(${extPattern})$`);
|
|
9112
|
+
}
|
|
9113
|
+
return extensionRegex;
|
|
9114
|
+
}
|
|
9115
|
+
function normalizePath(path7, workspaceRoot) {
|
|
9116
|
+
let normalized = path7.replace(/['"]/g, "").trim().replace(/\\/g, "/");
|
|
9117
|
+
normalized = normalized.replace(getExtensionRegex(), "");
|
|
9112
9118
|
if (normalized.startsWith(workspaceRoot + "/")) {
|
|
9113
9119
|
normalized = normalized.substring(workspaceRoot.length + 1);
|
|
9114
9120
|
}
|
|
@@ -9246,10 +9252,10 @@ async function findRelatedChunks(filepaths, fileChunksMap, ctx) {
|
|
|
9246
9252
|
}
|
|
9247
9253
|
function createPathCache(workspaceRoot) {
|
|
9248
9254
|
const cache = /* @__PURE__ */ new Map();
|
|
9249
|
-
const normalize = (
|
|
9250
|
-
if (cache.has(
|
|
9251
|
-
const normalized = normalizePath(
|
|
9252
|
-
cache.set(
|
|
9255
|
+
const normalize = (path7) => {
|
|
9256
|
+
if (cache.has(path7)) return cache.get(path7);
|
|
9257
|
+
const normalized = normalizePath(path7, workspaceRoot);
|
|
9258
|
+
cache.set(path7, normalized);
|
|
9253
9259
|
return normalized;
|
|
9254
9260
|
};
|
|
9255
9261
|
return { normalize, cache };
|
|
@@ -9462,7 +9468,9 @@ import { QdrantDB as QdrantDB3 } from "@liendev/core";
|
|
|
9462
9468
|
|
|
9463
9469
|
// src/mcp/handlers/dependency-analyzer.ts
|
|
9464
9470
|
import { QdrantDB as QdrantDB2 } from "@liendev/core";
|
|
9465
|
-
|
|
9471
|
+
import {
|
|
9472
|
+
findTransitiveDependents
|
|
9473
|
+
} from "@liendev/core";
|
|
9466
9474
|
var COMPLEXITY_THRESHOLDS = {
|
|
9467
9475
|
HIGH_COMPLEXITY_DEPENDENT: 10,
|
|
9468
9476
|
// Individual file is complex
|
|
@@ -9479,30 +9487,144 @@ var COMPLEXITY_THRESHOLDS = {
|
|
|
9479
9487
|
MEDIUM_MAX: 15
|
|
9480
9488
|
// Occasional branching
|
|
9481
9489
|
};
|
|
9482
|
-
|
|
9483
|
-
|
|
9490
|
+
function collectNamedSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9491
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9492
|
+
if (!importedSymbols || typeof importedSymbols !== "object") return;
|
|
9493
|
+
for (const [importPath, syms] of Object.entries(importedSymbols)) {
|
|
9494
|
+
if (matchesFile(normalizePathCached(importPath), normalizedTarget)) {
|
|
9495
|
+
for (const sym of syms) symbols.add(sym);
|
|
9496
|
+
}
|
|
9497
|
+
}
|
|
9498
|
+
}
|
|
9499
|
+
function collectRawImportSentinel(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9500
|
+
const imports = chunk.metadata.imports || [];
|
|
9501
|
+
for (const imp of imports) {
|
|
9502
|
+
if (matchesFile(normalizePathCached(imp), normalizedTarget)) symbols.add("*");
|
|
9503
|
+
}
|
|
9504
|
+
}
|
|
9505
|
+
function collectSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols) {
|
|
9506
|
+
collectNamedSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9507
|
+
collectRawImportSentinel(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9508
|
+
}
|
|
9509
|
+
function collectImportedSymbolsFromTarget(chunks, normalizedTarget, normalizePathCached) {
|
|
9510
|
+
const symbols = /* @__PURE__ */ new Set();
|
|
9511
|
+
for (const chunk of chunks) {
|
|
9512
|
+
collectSymbolsFromChunk(chunk, normalizedTarget, normalizePathCached, symbols);
|
|
9513
|
+
}
|
|
9514
|
+
return symbols;
|
|
9515
|
+
}
|
|
9516
|
+
function collectExportsFromChunks(chunks) {
|
|
9517
|
+
const allExports = /* @__PURE__ */ new Set();
|
|
9518
|
+
for (const chunk of chunks) {
|
|
9519
|
+
for (const exp of chunk.metadata.exports || []) allExports.add(exp);
|
|
9520
|
+
}
|
|
9521
|
+
return allExports;
|
|
9522
|
+
}
|
|
9523
|
+
function findReExportedSymbols(importsFromTarget, allExports) {
|
|
9524
|
+
if (importsFromTarget.has("*")) return [...allExports];
|
|
9525
|
+
for (const sym of importsFromTarget) {
|
|
9526
|
+
if (sym.startsWith("* as ")) return [...allExports];
|
|
9527
|
+
}
|
|
9528
|
+
const reExported = [];
|
|
9529
|
+
for (const sym of importsFromTarget) {
|
|
9530
|
+
if (allExports.has(sym)) reExported.push(sym);
|
|
9531
|
+
}
|
|
9532
|
+
return reExported;
|
|
9533
|
+
}
|
|
9534
|
+
function buildReExportGraph(allChunksByFile, normalizedTarget, normalizePathCached) {
|
|
9535
|
+
const reExporters = [];
|
|
9536
|
+
for (const [filepath, chunks] of allChunksByFile.entries()) {
|
|
9537
|
+
if (matchesFile(filepath, normalizedTarget)) continue;
|
|
9538
|
+
const importsFromTarget = collectImportedSymbolsFromTarget(chunks, normalizedTarget, normalizePathCached);
|
|
9539
|
+
const allExports = collectExportsFromChunks(chunks);
|
|
9540
|
+
if (importsFromTarget.size === 0 || allExports.size === 0) continue;
|
|
9541
|
+
const reExportedSymbols = findReExportedSymbols(importsFromTarget, allExports);
|
|
9542
|
+
if (reExportedSymbols.length > 0) {
|
|
9543
|
+
reExporters.push({ filepath, reExportedSymbols });
|
|
9544
|
+
}
|
|
9545
|
+
}
|
|
9546
|
+
return reExporters;
|
|
9547
|
+
}
|
|
9548
|
+
function fileImportsSymbolFromAny(chunks, targetSymbol, targetPaths, normalizePathCached) {
|
|
9549
|
+
return chunks.some((chunk) => {
|
|
9550
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9551
|
+
if (!importedSymbols) return false;
|
|
9552
|
+
for (const [importPath, symbols] of Object.entries(importedSymbols)) {
|
|
9553
|
+
const normalizedImport = normalizePathCached(importPath);
|
|
9554
|
+
const matchesAny = targetPaths.some((tp) => matchesFile(normalizedImport, tp));
|
|
9555
|
+
if (matchesAny) {
|
|
9556
|
+
if (symbols.includes(targetSymbol)) return true;
|
|
9557
|
+
if (symbols.some((s) => s.startsWith("* as "))) return true;
|
|
9558
|
+
}
|
|
9559
|
+
}
|
|
9560
|
+
return false;
|
|
9561
|
+
});
|
|
9562
|
+
}
|
|
9563
|
+
function addChunkToImportIndex(chunk, normalizePathCached, importIndex) {
|
|
9564
|
+
const imports = chunk.metadata.imports || [];
|
|
9565
|
+
for (const imp of imports) {
|
|
9566
|
+
const normalizedImport = normalizePathCached(imp);
|
|
9567
|
+
if (!importIndex.has(normalizedImport)) {
|
|
9568
|
+
importIndex.set(normalizedImport, []);
|
|
9569
|
+
}
|
|
9570
|
+
importIndex.get(normalizedImport).push(chunk);
|
|
9571
|
+
}
|
|
9572
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9573
|
+
if (importedSymbols && typeof importedSymbols === "object") {
|
|
9574
|
+
for (const modulePath of Object.keys(importedSymbols)) {
|
|
9575
|
+
const normalizedImport = normalizePathCached(modulePath);
|
|
9576
|
+
if (!importIndex.has(normalizedImport)) {
|
|
9577
|
+
importIndex.set(normalizedImport, []);
|
|
9578
|
+
}
|
|
9579
|
+
importIndex.get(normalizedImport).push(chunk);
|
|
9580
|
+
}
|
|
9581
|
+
}
|
|
9582
|
+
}
|
|
9583
|
+
function addChunkToFileMap(chunk, normalizePathCached, fileMap) {
|
|
9584
|
+
const canonical = normalizePathCached(chunk.metadata.file);
|
|
9585
|
+
if (!fileMap.has(canonical)) {
|
|
9586
|
+
fileMap.set(canonical, []);
|
|
9587
|
+
}
|
|
9588
|
+
fileMap.get(canonical).push(chunk);
|
|
9589
|
+
}
|
|
9590
|
+
async function scanChunksPaginated(vectorDB, crossRepo, log, normalizePathCached) {
|
|
9591
|
+
const importIndex = /* @__PURE__ */ new Map();
|
|
9592
|
+
const allChunksByFile = /* @__PURE__ */ new Map();
|
|
9593
|
+
let totalChunks = 0;
|
|
9484
9594
|
if (crossRepo && vectorDB instanceof QdrantDB2) {
|
|
9485
|
-
|
|
9486
|
-
|
|
9487
|
-
|
|
9488
|
-
|
|
9595
|
+
const CROSS_REPO_LIMIT = 1e5;
|
|
9596
|
+
const allChunks = await vectorDB.scanCrossRepo({ limit: CROSS_REPO_LIMIT });
|
|
9597
|
+
totalChunks = allChunks.length;
|
|
9598
|
+
const hitLimit = totalChunks >= CROSS_REPO_LIMIT;
|
|
9599
|
+
if (hitLimit) {
|
|
9600
|
+
log(`Warning: cross-repo scan hit ${CROSS_REPO_LIMIT} chunk limit. Results may be incomplete.`, "warning");
|
|
9601
|
+
}
|
|
9602
|
+
for (const chunk of allChunks) {
|
|
9603
|
+
addChunkToImportIndex(chunk, normalizePathCached, importIndex);
|
|
9604
|
+
addChunkToFileMap(chunk, normalizePathCached, allChunksByFile);
|
|
9489
9605
|
}
|
|
9490
|
-
|
|
9606
|
+
return { importIndex, allChunksByFile, totalChunks, hitLimit };
|
|
9491
9607
|
}
|
|
9492
|
-
|
|
9493
|
-
|
|
9494
|
-
|
|
9608
|
+
if (crossRepo) {
|
|
9609
|
+
log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo paginated scan.", "warning");
|
|
9610
|
+
}
|
|
9611
|
+
for await (const page of vectorDB.scanPaginated({ pageSize: 1e3 })) {
|
|
9612
|
+
totalChunks += page.length;
|
|
9613
|
+
for (const chunk of page) {
|
|
9614
|
+
addChunkToImportIndex(chunk, normalizePathCached, importIndex);
|
|
9615
|
+
addChunkToFileMap(chunk, normalizePathCached, allChunksByFile);
|
|
9616
|
+
}
|
|
9495
9617
|
}
|
|
9496
|
-
return {
|
|
9618
|
+
return { importIndex, allChunksByFile, totalChunks, hitLimit: false };
|
|
9497
9619
|
}
|
|
9498
9620
|
function createPathNormalizer() {
|
|
9499
9621
|
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
9500
9622
|
const cache = /* @__PURE__ */ new Map();
|
|
9501
|
-
return (
|
|
9502
|
-
if (!cache.has(
|
|
9503
|
-
cache.set(
|
|
9623
|
+
return (path7) => {
|
|
9624
|
+
if (!cache.has(path7)) {
|
|
9625
|
+
cache.set(path7, normalizePath(path7, workspaceRoot));
|
|
9504
9626
|
}
|
|
9505
|
-
return cache.get(
|
|
9627
|
+
return cache.get(path7);
|
|
9506
9628
|
};
|
|
9507
9629
|
}
|
|
9508
9630
|
function groupChunksByFile(chunks) {
|
|
@@ -9516,10 +9638,10 @@ function groupChunksByFile(chunks) {
|
|
|
9516
9638
|
}
|
|
9517
9639
|
return chunksByFile;
|
|
9518
9640
|
}
|
|
9519
|
-
function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached,
|
|
9641
|
+
function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached, targetFileChunks, filepath, log, reExporterPaths = []) {
|
|
9520
9642
|
if (symbol) {
|
|
9521
|
-
validateSymbolExport(
|
|
9522
|
-
return findSymbolUsages(chunksByFile, symbol, normalizedTarget, normalizePathCached);
|
|
9643
|
+
validateSymbolExport(targetFileChunks, symbol, filepath, log);
|
|
9644
|
+
return findSymbolUsages(chunksByFile, symbol, normalizedTarget, normalizePathCached, reExporterPaths);
|
|
9523
9645
|
}
|
|
9524
9646
|
const dependents = Array.from(chunksByFile.keys()).map((fp) => ({
|
|
9525
9647
|
filepath: fp,
|
|
@@ -9527,33 +9649,69 @@ function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePa
|
|
|
9527
9649
|
}));
|
|
9528
9650
|
return { dependents, totalUsageCount: void 0 };
|
|
9529
9651
|
}
|
|
9530
|
-
function validateSymbolExport(
|
|
9531
|
-
const
|
|
9532
|
-
|
|
9533
|
-
|
|
9534
|
-
|
|
9535
|
-
if (!targetFileExportsSymbol) {
|
|
9652
|
+
function validateSymbolExport(targetFileChunks, symbol, filepath, log) {
|
|
9653
|
+
const exportsSymbol = targetFileChunks.some(
|
|
9654
|
+
(chunk) => chunk.metadata.exports?.includes(symbol)
|
|
9655
|
+
);
|
|
9656
|
+
if (!exportsSymbol) {
|
|
9536
9657
|
log(`Warning: Symbol "${symbol}" not found in exports of ${filepath}`, "warning");
|
|
9537
9658
|
}
|
|
9538
9659
|
}
|
|
9660
|
+
function mergeChunksByFile(target, source) {
|
|
9661
|
+
for (const [fp, chunks] of source.entries()) {
|
|
9662
|
+
const existing = target.get(fp);
|
|
9663
|
+
if (existing) {
|
|
9664
|
+
existing.push(...chunks);
|
|
9665
|
+
} else {
|
|
9666
|
+
target.set(fp, chunks);
|
|
9667
|
+
}
|
|
9668
|
+
}
|
|
9669
|
+
}
|
|
9670
|
+
function mergeTransitiveDependents(reExporters, importIndex, normalizedTarget, normalizePathCached, allChunksByFile, chunksByFile, log) {
|
|
9671
|
+
const existingFiles = new Set(chunksByFile.keys());
|
|
9672
|
+
const transitiveChunks = findTransitiveDependents(
|
|
9673
|
+
reExporters.map((r) => r.filepath),
|
|
9674
|
+
importIndex,
|
|
9675
|
+
normalizedTarget,
|
|
9676
|
+
normalizePathCached,
|
|
9677
|
+
allChunksByFile,
|
|
9678
|
+
existingFiles
|
|
9679
|
+
);
|
|
9680
|
+
if (transitiveChunks.length > 0) {
|
|
9681
|
+
const transitiveByFile = groupChunksByFile(transitiveChunks);
|
|
9682
|
+
mergeChunksByFile(chunksByFile, transitiveByFile);
|
|
9683
|
+
log(`Found ${transitiveByFile.size} additional dependents via re-export chains`);
|
|
9684
|
+
}
|
|
9685
|
+
}
|
|
9539
9686
|
async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
9540
|
-
const { allChunks, hitLimit } = await scanChunks(vectorDB, crossRepo, log);
|
|
9541
|
-
log(`Scanning ${allChunks.length} chunks for imports...`);
|
|
9542
9687
|
const normalizePathCached = createPathNormalizer();
|
|
9543
9688
|
const normalizedTarget = normalizePathCached(filepath);
|
|
9544
|
-
const importIndex =
|
|
9689
|
+
const { importIndex, allChunksByFile, totalChunks, hitLimit } = await scanChunksPaginated(
|
|
9690
|
+
vectorDB,
|
|
9691
|
+
crossRepo,
|
|
9692
|
+
log,
|
|
9693
|
+
normalizePathCached
|
|
9694
|
+
);
|
|
9695
|
+
log(`Scanned ${totalChunks} chunks for imports...`);
|
|
9545
9696
|
const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
|
|
9546
9697
|
const chunksByFile = groupChunksByFile(dependentChunks);
|
|
9698
|
+
const reExporters = buildReExportGraph(allChunksByFile, normalizedTarget, normalizePathCached);
|
|
9699
|
+
if (reExporters.length > 0) {
|
|
9700
|
+
mergeTransitiveDependents(reExporters, importIndex, normalizedTarget, normalizePathCached, allChunksByFile, chunksByFile, log);
|
|
9701
|
+
}
|
|
9547
9702
|
const fileComplexities = calculateFileComplexities(chunksByFile);
|
|
9548
9703
|
const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
|
|
9704
|
+
const targetFileChunks = symbol ? allChunksByFile.get(normalizedTarget) ?? [] : [];
|
|
9705
|
+
const reExporterPaths = reExporters.map((re) => re.filepath);
|
|
9549
9706
|
const { dependents, totalUsageCount } = buildDependentsList(
|
|
9550
9707
|
chunksByFile,
|
|
9551
9708
|
symbol,
|
|
9552
9709
|
normalizedTarget,
|
|
9553
9710
|
normalizePathCached,
|
|
9554
|
-
|
|
9711
|
+
targetFileChunks,
|
|
9555
9712
|
filepath,
|
|
9556
|
-
log
|
|
9713
|
+
log,
|
|
9714
|
+
reExporterPaths
|
|
9557
9715
|
);
|
|
9558
9716
|
dependents.sort((a, b) => {
|
|
9559
9717
|
if (a.isTestFile === b.isTestFile) return 0;
|
|
@@ -9561,6 +9719,7 @@ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
|
9561
9719
|
});
|
|
9562
9720
|
const testDependentCount = dependents.filter((f) => f.isTestFile).length;
|
|
9563
9721
|
const productionDependentCount = dependents.length - testDependentCount;
|
|
9722
|
+
const allChunks = crossRepo ? Array.from(allChunksByFile.values()).flat() : [];
|
|
9564
9723
|
return {
|
|
9565
9724
|
dependents,
|
|
9566
9725
|
productionDependentCount,
|
|
@@ -9573,29 +9732,6 @@ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
|
9573
9732
|
totalUsageCount
|
|
9574
9733
|
};
|
|
9575
9734
|
}
|
|
9576
|
-
function buildImportIndex(allChunks, normalizePathCached) {
|
|
9577
|
-
const importIndex = /* @__PURE__ */ new Map();
|
|
9578
|
-
const addToIndex = (importPath, chunk) => {
|
|
9579
|
-
const normalizedImport = normalizePathCached(importPath);
|
|
9580
|
-
if (!importIndex.has(normalizedImport)) {
|
|
9581
|
-
importIndex.set(normalizedImport, []);
|
|
9582
|
-
}
|
|
9583
|
-
importIndex.get(normalizedImport).push(chunk);
|
|
9584
|
-
};
|
|
9585
|
-
for (const chunk of allChunks) {
|
|
9586
|
-
const imports = chunk.metadata.imports || [];
|
|
9587
|
-
for (const imp of imports) {
|
|
9588
|
-
addToIndex(imp, chunk);
|
|
9589
|
-
}
|
|
9590
|
-
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9591
|
-
if (importedSymbols && typeof importedSymbols === "object") {
|
|
9592
|
-
for (const modulePath of Object.keys(importedSymbols)) {
|
|
9593
|
-
addToIndex(modulePath, chunk);
|
|
9594
|
-
}
|
|
9595
|
-
}
|
|
9596
|
-
}
|
|
9597
|
-
return importIndex;
|
|
9598
|
-
}
|
|
9599
9735
|
function findDependentChunks(importIndex, normalizedTarget) {
|
|
9600
9736
|
const dependentChunks = [];
|
|
9601
9737
|
const seenChunkIds = /* @__PURE__ */ new Set();
|
|
@@ -9713,11 +9849,12 @@ function groupDependentsByRepo(dependents, chunks) {
|
|
|
9713
9849
|
}
|
|
9714
9850
|
return grouped;
|
|
9715
9851
|
}
|
|
9716
|
-
function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9852
|
+
function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached, reExporterPaths = []) {
|
|
9717
9853
|
const dependents = [];
|
|
9718
9854
|
let totalUsageCount = 0;
|
|
9855
|
+
const allTargetPaths = [normalizedTarget, ...reExporterPaths];
|
|
9719
9856
|
for (const [filepath, chunks] of chunksByFile.entries()) {
|
|
9720
|
-
if (!
|
|
9857
|
+
if (!fileImportsSymbolFromAny(chunks, targetSymbol, allTargetPaths, normalizePathCached)) {
|
|
9721
9858
|
continue;
|
|
9722
9859
|
}
|
|
9723
9860
|
const usages = extractSymbolUsagesFromChunks(chunks, targetSymbol);
|
|
@@ -9730,20 +9867,6 @@ function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normaliz
|
|
|
9730
9867
|
}
|
|
9731
9868
|
return { dependents, totalUsageCount };
|
|
9732
9869
|
}
|
|
9733
|
-
function fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9734
|
-
return chunks.some((chunk) => {
|
|
9735
|
-
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9736
|
-
if (!importedSymbols) return false;
|
|
9737
|
-
for (const [importPath, symbols] of Object.entries(importedSymbols)) {
|
|
9738
|
-
const normalizedImport = normalizePathCached(importPath);
|
|
9739
|
-
if (matchesFile(normalizedImport, normalizedTarget)) {
|
|
9740
|
-
if (symbols.includes(targetSymbol)) return true;
|
|
9741
|
-
if (symbols.some((s) => s.startsWith("* as "))) return true;
|
|
9742
|
-
}
|
|
9743
|
-
}
|
|
9744
|
-
return false;
|
|
9745
|
-
});
|
|
9746
|
-
}
|
|
9747
9870
|
function extractSymbolUsagesFromChunks(chunks, targetSymbol) {
|
|
9748
9871
|
const usages = [];
|
|
9749
9872
|
for (const chunk of chunks) {
|
|
@@ -10192,238 +10315,106 @@ function createReindexStateManager() {
|
|
|
10192
10315
|
};
|
|
10193
10316
|
}
|
|
10194
10317
|
|
|
10195
|
-
// src/mcp/
|
|
10196
|
-
|
|
10197
|
-
|
|
10198
|
-
|
|
10199
|
-
|
|
10200
|
-
|
|
10201
|
-
|
|
10202
|
-
|
|
10203
|
-
|
|
10204
|
-
}
|
|
10205
|
-
|
|
10206
|
-
|
|
10207
|
-
|
|
10208
|
-
|
|
10209
|
-
|
|
10210
|
-
|
|
10211
|
-
|
|
10212
|
-
|
|
10213
|
-
|
|
10214
|
-
|
|
10215
|
-
|
|
10216
|
-
|
|
10318
|
+
// src/mcp/git-detection.ts
|
|
10319
|
+
import fs4 from "fs/promises";
|
|
10320
|
+
import {
|
|
10321
|
+
GitStateTracker,
|
|
10322
|
+
indexMultipleFiles as indexMultipleFiles2,
|
|
10323
|
+
isGitAvailable,
|
|
10324
|
+
isGitRepo as isGitRepo2,
|
|
10325
|
+
DEFAULT_GIT_POLL_INTERVAL_MS as DEFAULT_GIT_POLL_INTERVAL_MS2,
|
|
10326
|
+
createGitignoreFilter as createGitignoreFilter2
|
|
10327
|
+
} from "@liendev/core";
|
|
10328
|
+
|
|
10329
|
+
// src/mcp/file-change-handler.ts
|
|
10330
|
+
import fs3 from "fs/promises";
|
|
10331
|
+
import {
|
|
10332
|
+
indexMultipleFiles,
|
|
10333
|
+
indexSingleFile,
|
|
10334
|
+
ManifestManager,
|
|
10335
|
+
computeContentHash,
|
|
10336
|
+
normalizeToRelativePath,
|
|
10337
|
+
createGitignoreFilter
|
|
10338
|
+
} from "@liendev/core";
|
|
10339
|
+
async function handleFileDeletion(filepath, vectorDB, log) {
|
|
10340
|
+
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10341
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10342
|
+
try {
|
|
10343
|
+
await vectorDB.deleteByFile(filepath);
|
|
10344
|
+
await manifest.removeFile(filepath);
|
|
10345
|
+
log(`\u2713 Removed ${filepath} from index`);
|
|
10346
|
+
} catch (error) {
|
|
10347
|
+
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10348
|
+
throw error;
|
|
10217
10349
|
}
|
|
10218
|
-
log("Loading embedding model...");
|
|
10219
|
-
await embeddings.initialize();
|
|
10220
|
-
log("Loading vector database...");
|
|
10221
|
-
await vectorDB.initialize();
|
|
10222
|
-
log("Embeddings and vector DB ready");
|
|
10223
|
-
return { embeddings, vectorDB };
|
|
10224
10350
|
}
|
|
10225
|
-
async function
|
|
10226
|
-
const
|
|
10227
|
-
|
|
10228
|
-
|
|
10229
|
-
log(
|
|
10351
|
+
async function handleBatchDeletions(deletedFiles, vectorDB, log) {
|
|
10352
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10353
|
+
const failures = [];
|
|
10354
|
+
for (const filepath of deletedFiles) {
|
|
10355
|
+
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10230
10356
|
try {
|
|
10231
|
-
|
|
10232
|
-
await
|
|
10233
|
-
log(
|
|
10357
|
+
await vectorDB.deleteByFile(filepath);
|
|
10358
|
+
await manifest.removeFile(filepath);
|
|
10359
|
+
log(`\u2713 Removed ${filepath} from index`);
|
|
10234
10360
|
} catch (error) {
|
|
10235
|
-
log(
|
|
10236
|
-
|
|
10361
|
+
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10362
|
+
failures.push(filepath);
|
|
10237
10363
|
}
|
|
10238
10364
|
}
|
|
10365
|
+
if (failures.length > 0) {
|
|
10366
|
+
throw new Error(`Failed to delete ${failures.length} file(s): ${failures.join(", ")}`);
|
|
10367
|
+
}
|
|
10239
10368
|
}
|
|
10240
|
-
async function
|
|
10241
|
-
|
|
10242
|
-
const
|
|
10243
|
-
|
|
10244
|
-
|
|
10245
|
-
|
|
10246
|
-
|
|
10369
|
+
async function canSkipReindex(filepath, rootDir, vectorDB, log) {
|
|
10370
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10371
|
+
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10372
|
+
const manifestData = await manifest.load();
|
|
10373
|
+
const existingEntry = manifestData?.files[normalizedPath];
|
|
10374
|
+
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10375
|
+
if (!shouldReindex && newMtime !== void 0 && existingEntry) {
|
|
10376
|
+
const skipped = await manifest.transaction(async (data) => {
|
|
10377
|
+
const entry = data.files[normalizedPath];
|
|
10378
|
+
if (entry) {
|
|
10379
|
+
entry.lastModified = newMtime;
|
|
10380
|
+
return true;
|
|
10381
|
+
}
|
|
10382
|
+
return false;
|
|
10383
|
+
});
|
|
10384
|
+
return !!skipped;
|
|
10385
|
+
}
|
|
10386
|
+
return false;
|
|
10387
|
+
}
|
|
10388
|
+
async function handleSingleFileChange(filepath, type, rootDir, vectorDB, embeddings, log, reindexStateManager) {
|
|
10389
|
+
const action = type === "add" ? "added" : "changed";
|
|
10390
|
+
if (type === "change") {
|
|
10247
10391
|
try {
|
|
10248
|
-
|
|
10249
|
-
const duration = Date.now() - startTime;
|
|
10250
|
-
reindexStateManager.completeReindex(duration);
|
|
10251
|
-
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10392
|
+
if (await canSkipReindex(filepath, rootDir, vectorDB, log)) return;
|
|
10252
10393
|
} catch (error) {
|
|
10253
|
-
|
|
10254
|
-
throw error;
|
|
10394
|
+
log(`Content hash check failed, will reindex: ${error}`, "warning");
|
|
10255
10395
|
}
|
|
10256
|
-
}
|
|
10257
|
-
|
|
10396
|
+
}
|
|
10397
|
+
const startTime = Date.now();
|
|
10398
|
+
reindexStateManager.startReindex([filepath]);
|
|
10399
|
+
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
10400
|
+
try {
|
|
10401
|
+
await indexSingleFile(filepath, vectorDB, embeddings, { verbose: false, rootDir });
|
|
10402
|
+
const duration = Date.now() - startTime;
|
|
10403
|
+
reindexStateManager.completeReindex(duration);
|
|
10404
|
+
} catch (error) {
|
|
10405
|
+
reindexStateManager.failReindex();
|
|
10406
|
+
log(`Failed to reindex ${filepath}: ${error}`, "warning");
|
|
10258
10407
|
}
|
|
10259
10408
|
}
|
|
10260
|
-
function
|
|
10261
|
-
|
|
10409
|
+
async function shouldReindexFile(filepath, existingEntry, log) {
|
|
10410
|
+
if (!existingEntry?.contentHash) {
|
|
10411
|
+
return { shouldReindex: true };
|
|
10412
|
+
}
|
|
10413
|
+
const currentHash = await computeContentHash(filepath);
|
|
10414
|
+
if (currentHash && currentHash === existingEntry.contentHash) {
|
|
10415
|
+
log(`\u23ED\uFE0F File mtime changed but content unchanged: ${filepath}`, "debug");
|
|
10262
10416
|
try {
|
|
10263
|
-
const
|
|
10264
|
-
if (changedFiles && changedFiles.length > 0) {
|
|
10265
|
-
const currentState = reindexStateManager.getState();
|
|
10266
|
-
if (currentState.inProgress) {
|
|
10267
|
-
log(
|
|
10268
|
-
`Background reindex already in progress (${currentState.pendingFiles.length} files pending), skipping git poll cycle`,
|
|
10269
|
-
"debug"
|
|
10270
|
-
);
|
|
10271
|
-
return;
|
|
10272
|
-
}
|
|
10273
|
-
const startTime = Date.now();
|
|
10274
|
-
reindexStateManager.startReindex(changedFiles);
|
|
10275
|
-
log(`\u{1F33F} Git change detected: ${changedFiles.length} files changed`);
|
|
10276
|
-
try {
|
|
10277
|
-
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose: false });
|
|
10278
|
-
const duration = Date.now() - startTime;
|
|
10279
|
-
reindexStateManager.completeReindex(duration);
|
|
10280
|
-
log(`\u2713 Background reindex complete: ${count} files in ${duration}ms`);
|
|
10281
|
-
} catch (error) {
|
|
10282
|
-
reindexStateManager.failReindex();
|
|
10283
|
-
log(`Git background reindex failed: ${error}`, "warning");
|
|
10284
|
-
}
|
|
10285
|
-
}
|
|
10286
|
-
} catch (error) {
|
|
10287
|
-
log(`Git detection check failed: ${error}`, "warning");
|
|
10288
|
-
}
|
|
10289
|
-
}, DEFAULT_GIT_POLL_INTERVAL_MS2);
|
|
10290
|
-
}
|
|
10291
|
-
function createGitChangeHandler(gitTracker, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10292
|
-
let gitReindexInProgress = false;
|
|
10293
|
-
let lastGitReindexTime = 0;
|
|
10294
|
-
const GIT_REINDEX_COOLDOWN_MS = 5e3;
|
|
10295
|
-
return async () => {
|
|
10296
|
-
const { inProgress: globalInProgress } = reindexStateManager.getState();
|
|
10297
|
-
if (gitReindexInProgress || globalInProgress) {
|
|
10298
|
-
log("Git reindex already in progress, skipping", "debug");
|
|
10299
|
-
return;
|
|
10300
|
-
}
|
|
10301
|
-
const timeSinceLastReindex = Date.now() - lastGitReindexTime;
|
|
10302
|
-
if (timeSinceLastReindex < GIT_REINDEX_COOLDOWN_MS) {
|
|
10303
|
-
log(`Git change ignored (cooldown: ${GIT_REINDEX_COOLDOWN_MS - timeSinceLastReindex}ms remaining)`, "debug");
|
|
10304
|
-
return;
|
|
10305
|
-
}
|
|
10306
|
-
log("\u{1F33F} Git change detected (event-driven)");
|
|
10307
|
-
const changedFiles = await gitTracker.detectChanges();
|
|
10308
|
-
if (!changedFiles || changedFiles.length === 0) {
|
|
10309
|
-
return;
|
|
10310
|
-
}
|
|
10311
|
-
gitReindexInProgress = true;
|
|
10312
|
-
const startTime = Date.now();
|
|
10313
|
-
reindexStateManager.startReindex(changedFiles);
|
|
10314
|
-
log(`Reindexing ${changedFiles.length} files from git change`);
|
|
10315
|
-
try {
|
|
10316
|
-
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose: false });
|
|
10317
|
-
const duration = Date.now() - startTime;
|
|
10318
|
-
reindexStateManager.completeReindex(duration);
|
|
10319
|
-
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10320
|
-
lastGitReindexTime = Date.now();
|
|
10321
|
-
} catch (error) {
|
|
10322
|
-
reindexStateManager.failReindex();
|
|
10323
|
-
log(`Git reindex failed: ${error}`, "warning");
|
|
10324
|
-
} finally {
|
|
10325
|
-
gitReindexInProgress = false;
|
|
10326
|
-
}
|
|
10327
|
-
};
|
|
10328
|
-
}
|
|
10329
|
-
async function setupGitDetection(rootDir, vectorDB, embeddings, verbose, log, reindexStateManager, fileWatcher) {
|
|
10330
|
-
const gitAvailable = await isGitAvailable();
|
|
10331
|
-
const isRepo = await isGitRepo2(rootDir);
|
|
10332
|
-
if (!gitAvailable) {
|
|
10333
|
-
log("Git not available - git detection disabled");
|
|
10334
|
-
return { gitTracker: null, gitPollInterval: null };
|
|
10335
|
-
}
|
|
10336
|
-
if (!isRepo) {
|
|
10337
|
-
log("Not a git repository - git detection disabled");
|
|
10338
|
-
return { gitTracker: null, gitPollInterval: null };
|
|
10339
|
-
}
|
|
10340
|
-
log("\u2713 Detected git repository");
|
|
10341
|
-
const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
|
|
10342
|
-
try {
|
|
10343
|
-
await handleGitStartup(gitTracker, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10344
|
-
} catch (error) {
|
|
10345
|
-
log(`Failed to check git state on startup: ${error}`, "warning");
|
|
10346
|
-
}
|
|
10347
|
-
if (fileWatcher) {
|
|
10348
|
-
const gitChangeHandler = createGitChangeHandler(
|
|
10349
|
-
gitTracker,
|
|
10350
|
-
vectorDB,
|
|
10351
|
-
embeddings,
|
|
10352
|
-
verbose,
|
|
10353
|
-
log,
|
|
10354
|
-
reindexStateManager
|
|
10355
|
-
);
|
|
10356
|
-
fileWatcher.watchGit(gitChangeHandler);
|
|
10357
|
-
log("\u2713 Git detection enabled (event-driven via file watcher)");
|
|
10358
|
-
return { gitTracker, gitPollInterval: null };
|
|
10359
|
-
}
|
|
10360
|
-
const pollIntervalSeconds = DEFAULT_GIT_POLL_INTERVAL_MS2 / 1e3;
|
|
10361
|
-
log(`\u2713 Git detection enabled (polling fallback every ${pollIntervalSeconds}s)`);
|
|
10362
|
-
const gitPollInterval = createGitPollInterval(gitTracker, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10363
|
-
return { gitTracker, gitPollInterval };
|
|
10364
|
-
}
|
|
10365
|
-
async function handleFileDeletion(filepath, vectorDB, log) {
|
|
10366
|
-
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10367
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10368
|
-
try {
|
|
10369
|
-
await vectorDB.deleteByFile(filepath);
|
|
10370
|
-
await manifest.removeFile(filepath);
|
|
10371
|
-
log(`\u2713 Removed ${filepath} from index`);
|
|
10372
|
-
} catch (error) {
|
|
10373
|
-
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10374
|
-
throw error;
|
|
10375
|
-
}
|
|
10376
|
-
}
|
|
10377
|
-
async function handleSingleFileChange(filepath, type, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10378
|
-
const action = type === "add" ? "added" : "changed";
|
|
10379
|
-
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10380
|
-
if (type === "change") {
|
|
10381
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10382
|
-
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10383
|
-
try {
|
|
10384
|
-
const existingEntry = await manifest.transaction(async (manifestData) => {
|
|
10385
|
-
return manifestData.files[normalizedPath];
|
|
10386
|
-
});
|
|
10387
|
-
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10388
|
-
if (!shouldReindex && newMtime && existingEntry) {
|
|
10389
|
-
const skipReindex = await manifest.transaction(async (manifestData) => {
|
|
10390
|
-
const entry = manifestData.files[normalizedPath];
|
|
10391
|
-
if (entry) {
|
|
10392
|
-
entry.lastModified = newMtime;
|
|
10393
|
-
return true;
|
|
10394
|
-
}
|
|
10395
|
-
return false;
|
|
10396
|
-
});
|
|
10397
|
-
if (skipReindex) {
|
|
10398
|
-
return;
|
|
10399
|
-
}
|
|
10400
|
-
}
|
|
10401
|
-
} catch (error) {
|
|
10402
|
-
log(`Content hash check failed, will reindex: ${error}`, "warning");
|
|
10403
|
-
}
|
|
10404
|
-
}
|
|
10405
|
-
const startTime = Date.now();
|
|
10406
|
-
reindexStateManager.startReindex([filepath]);
|
|
10407
|
-
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
10408
|
-
try {
|
|
10409
|
-
await indexSingleFile(filepath, vectorDB, embeddings, { verbose: false, rootDir });
|
|
10410
|
-
const duration = Date.now() - startTime;
|
|
10411
|
-
reindexStateManager.completeReindex(duration);
|
|
10412
|
-
} catch (error) {
|
|
10413
|
-
reindexStateManager.failReindex();
|
|
10414
|
-
log(`Failed to reindex ${filepath}: ${error}`, "warning");
|
|
10415
|
-
}
|
|
10416
|
-
}
|
|
10417
|
-
async function shouldReindexFile(filepath, existingEntry, log) {
|
|
10418
|
-
if (!existingEntry?.contentHash) {
|
|
10419
|
-
return { shouldReindex: true };
|
|
10420
|
-
}
|
|
10421
|
-
const currentHash = await computeContentHash(filepath);
|
|
10422
|
-
if (currentHash && currentHash === existingEntry.contentHash) {
|
|
10423
|
-
log(`\u23ED\uFE0F File mtime changed but content unchanged: ${filepath}`, "debug");
|
|
10424
|
-
try {
|
|
10425
|
-
const fs5 = await import("fs/promises");
|
|
10426
|
-
const stats = await fs5.stat(filepath);
|
|
10417
|
+
const stats = await fs3.stat(filepath);
|
|
10427
10418
|
return { shouldReindex: false, newMtime: stats.mtimeMs };
|
|
10428
10419
|
} catch {
|
|
10429
10420
|
return { shouldReindex: true };
|
|
@@ -10431,31 +10422,22 @@ async function shouldReindexFile(filepath, existingEntry, log) {
|
|
|
10431
10422
|
}
|
|
10432
10423
|
return { shouldReindex: true };
|
|
10433
10424
|
}
|
|
10434
|
-
async function
|
|
10435
|
-
|
|
10436
|
-
|
|
10437
|
-
}
|
|
10438
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10439
|
-
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10440
|
-
const manifestData = await manifest.transaction(async (data) => data);
|
|
10441
|
-
if (!manifestData) {
|
|
10442
|
-
return modifiedFiles;
|
|
10443
|
-
}
|
|
10444
|
-
const checkResults = [];
|
|
10445
|
-
for (const filepath of modifiedFiles) {
|
|
10425
|
+
async function checkFilesAgainstManifest(files, rootDir, manifestFiles, log) {
|
|
10426
|
+
const results = [];
|
|
10427
|
+
for (const filepath of files) {
|
|
10446
10428
|
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10447
|
-
const existingEntry =
|
|
10429
|
+
const existingEntry = manifestFiles[normalizedPath];
|
|
10448
10430
|
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10449
|
-
|
|
10450
|
-
filepath,
|
|
10451
|
-
normalizedPath,
|
|
10452
|
-
shouldReindex,
|
|
10453
|
-
newMtime
|
|
10454
|
-
});
|
|
10431
|
+
results.push({ filepath, normalizedPath, shouldReindex, newMtime });
|
|
10455
10432
|
}
|
|
10433
|
+
return results;
|
|
10434
|
+
}
|
|
10435
|
+
async function updateUnchangedMtimes(manifest, results) {
|
|
10436
|
+
const hasUpdates = results.some((r) => !r.shouldReindex && r.newMtime !== void 0);
|
|
10437
|
+
if (!hasUpdates) return;
|
|
10456
10438
|
await manifest.transaction(async (data) => {
|
|
10457
|
-
for (const result of
|
|
10458
|
-
if (!result.shouldReindex && result.newMtime) {
|
|
10439
|
+
for (const result of results) {
|
|
10440
|
+
if (!result.shouldReindex && result.newMtime !== void 0) {
|
|
10459
10441
|
const entry = data.files[result.normalizedPath];
|
|
10460
10442
|
if (entry) {
|
|
10461
10443
|
entry.lastModified = result.newMtime;
|
|
@@ -10464,15 +10446,23 @@ async function filterModifiedFilesByHash(modifiedFiles, vectorDB, log) {
|
|
|
10464
10446
|
}
|
|
10465
10447
|
return null;
|
|
10466
10448
|
});
|
|
10449
|
+
}
|
|
10450
|
+
async function filterModifiedFilesByHash(modifiedFiles, rootDir, vectorDB, log) {
|
|
10451
|
+
if (modifiedFiles.length === 0) return [];
|
|
10452
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10453
|
+
const manifestData = await manifest.load();
|
|
10454
|
+
if (!manifestData) return modifiedFiles;
|
|
10455
|
+
const checkResults = await checkFilesAgainstManifest(modifiedFiles, rootDir, manifestData.files, log);
|
|
10456
|
+
await updateUnchangedMtimes(manifest, checkResults);
|
|
10467
10457
|
return checkResults.filter((r) => r.shouldReindex).map((r) => r.filepath);
|
|
10468
10458
|
}
|
|
10469
|
-
async function prepareFilesForReindexing(event, vectorDB, log) {
|
|
10459
|
+
async function prepareFilesForReindexing(event, rootDir, vectorDB, log) {
|
|
10470
10460
|
const addedFiles = event.added || [];
|
|
10471
10461
|
const modifiedFiles = event.modified || [];
|
|
10472
10462
|
const deletedFiles = event.deleted || [];
|
|
10473
10463
|
let modifiedFilesToReindex = [];
|
|
10474
10464
|
try {
|
|
10475
|
-
modifiedFilesToReindex = await filterModifiedFilesByHash(modifiedFiles, vectorDB, log);
|
|
10465
|
+
modifiedFilesToReindex = await filterModifiedFilesByHash(modifiedFiles, rootDir, vectorDB, log);
|
|
10476
10466
|
} catch (error) {
|
|
10477
10467
|
log(`Hash-based filtering failed, will reindex all modified files: ${error}`, "warning");
|
|
10478
10468
|
modifiedFilesToReindex = modifiedFiles;
|
|
@@ -10480,23 +10470,19 @@ async function prepareFilesForReindexing(event, vectorDB, log) {
|
|
|
10480
10470
|
const filesToIndex = [...addedFiles, ...modifiedFilesToReindex];
|
|
10481
10471
|
return { filesToIndex, deletedFiles };
|
|
10482
10472
|
}
|
|
10483
|
-
async function executeReindexOperations(filesToIndex, deletedFiles, vectorDB, embeddings, log) {
|
|
10473
|
+
async function executeReindexOperations(filesToIndex, deletedFiles, rootDir, vectorDB, embeddings, log) {
|
|
10484
10474
|
const operations = [];
|
|
10485
10475
|
if (filesToIndex.length > 0) {
|
|
10486
10476
|
log(`\u{1F4C1} ${filesToIndex.length} file(s) changed, reindexing...`);
|
|
10487
|
-
operations.push(indexMultipleFiles(filesToIndex, vectorDB, embeddings, { verbose: false }));
|
|
10477
|
+
operations.push(indexMultipleFiles(filesToIndex, vectorDB, embeddings, { verbose: false, rootDir }));
|
|
10488
10478
|
}
|
|
10489
10479
|
if (deletedFiles.length > 0) {
|
|
10490
|
-
operations.push(
|
|
10491
|
-
Promise.all(
|
|
10492
|
-
deletedFiles.map((deleted) => handleFileDeletion(deleted, vectorDB, log))
|
|
10493
|
-
)
|
|
10494
|
-
);
|
|
10480
|
+
operations.push(handleBatchDeletions(deletedFiles, vectorDB, log));
|
|
10495
10481
|
}
|
|
10496
10482
|
await Promise.all(operations);
|
|
10497
10483
|
}
|
|
10498
|
-
async function handleBatchEvent(event, vectorDB, embeddings,
|
|
10499
|
-
const { filesToIndex, deletedFiles } = await prepareFilesForReindexing(event, vectorDB, log);
|
|
10484
|
+
async function handleBatchEvent(event, rootDir, vectorDB, embeddings, log, reindexStateManager) {
|
|
10485
|
+
const { filesToIndex, deletedFiles } = await prepareFilesForReindexing(event, rootDir, vectorDB, log);
|
|
10500
10486
|
const allFiles = [...filesToIndex, ...deletedFiles];
|
|
10501
10487
|
if (allFiles.length === 0) {
|
|
10502
10488
|
return;
|
|
@@ -10504,7 +10490,7 @@ async function handleBatchEvent(event, vectorDB, embeddings, _verbose, log, rein
|
|
|
10504
10490
|
const startTime = Date.now();
|
|
10505
10491
|
reindexStateManager.startReindex(allFiles);
|
|
10506
10492
|
try {
|
|
10507
|
-
await executeReindexOperations(filesToIndex, deletedFiles, vectorDB, embeddings, log);
|
|
10493
|
+
await executeReindexOperations(filesToIndex, deletedFiles, rootDir, vectorDB, embeddings, log);
|
|
10508
10494
|
const duration = Date.now() - startTime;
|
|
10509
10495
|
reindexStateManager.completeReindex(duration);
|
|
10510
10496
|
log(`\u2713 Processed ${filesToIndex.length} file(s) + ${deletedFiles.length} deletion(s) in ${duration}ms`);
|
|
@@ -10525,19 +10511,316 @@ async function handleUnlinkEvent(filepath, vectorDB, log, reindexStateManager) {
|
|
|
10525
10511
|
log(`Failed to process deletion for ${filepath}: ${error}`, "warning");
|
|
10526
10512
|
}
|
|
10527
10513
|
}
|
|
10528
|
-
function
|
|
10514
|
+
function isFileIgnored(filepath, rootDir, isIgnored) {
|
|
10515
|
+
return isIgnored(normalizeToRelativePath(filepath, rootDir));
|
|
10516
|
+
}
|
|
10517
|
+
function filterFileChangeEvent(event, ignoreFilter, rootDir) {
|
|
10518
|
+
return {
|
|
10519
|
+
...event,
|
|
10520
|
+
added: (event.added || []).filter((f) => !isFileIgnored(f, rootDir, ignoreFilter)),
|
|
10521
|
+
modified: (event.modified || []).filter((f) => !isFileIgnored(f, rootDir, ignoreFilter)),
|
|
10522
|
+
deleted: event.deleted || []
|
|
10523
|
+
};
|
|
10524
|
+
}
|
|
10525
|
+
function isGitignoreFile(filepath) {
|
|
10526
|
+
const name = filepath.split(/[/\\]/).pop() ?? filepath;
|
|
10527
|
+
return name === ".gitignore";
|
|
10528
|
+
}
|
|
10529
|
+
function hasGitignoreChange(event) {
|
|
10530
|
+
if (event.type === "batch") {
|
|
10531
|
+
const allFiles = [...event.added || [], ...event.modified || [], ...event.deleted || []];
|
|
10532
|
+
return allFiles.some(isGitignoreFile);
|
|
10533
|
+
}
|
|
10534
|
+
return event.filepath ? isGitignoreFile(event.filepath) : false;
|
|
10535
|
+
}
|
|
10536
|
+
function createFileChangeHandler(rootDir, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10537
|
+
let ignoreFilter = null;
|
|
10529
10538
|
return async (event) => {
|
|
10539
|
+
if (hasGitignoreChange(event)) {
|
|
10540
|
+
ignoreFilter = null;
|
|
10541
|
+
}
|
|
10542
|
+
if (!ignoreFilter) {
|
|
10543
|
+
ignoreFilter = await createGitignoreFilter(rootDir);
|
|
10544
|
+
}
|
|
10530
10545
|
const { type } = event;
|
|
10531
10546
|
if (type === "batch") {
|
|
10532
|
-
|
|
10547
|
+
const filtered = filterFileChangeEvent(event, ignoreFilter, rootDir);
|
|
10548
|
+
const totalToProcess = filtered.added.length + filtered.modified.length + filtered.deleted.length;
|
|
10549
|
+
if (totalToProcess === 0) return;
|
|
10550
|
+
await checkAndReconnect();
|
|
10551
|
+
await handleBatchEvent(filtered, rootDir, vectorDB, embeddings, log, reindexStateManager);
|
|
10533
10552
|
} else if (type === "unlink") {
|
|
10553
|
+
await checkAndReconnect();
|
|
10534
10554
|
await handleUnlinkEvent(event.filepath, vectorDB, log, reindexStateManager);
|
|
10535
10555
|
} else {
|
|
10536
|
-
|
|
10556
|
+
if (isFileIgnored(event.filepath, rootDir, ignoreFilter)) return;
|
|
10557
|
+
await checkAndReconnect();
|
|
10558
|
+
await handleSingleFileChange(event.filepath, type, rootDir, vectorDB, embeddings, log, reindexStateManager);
|
|
10537
10559
|
}
|
|
10538
10560
|
};
|
|
10539
10561
|
}
|
|
10540
|
-
|
|
10562
|
+
|
|
10563
|
+
// src/mcp/git-detection.ts
|
|
10564
|
+
async function handleGitStartup(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10565
|
+
log("Checking for git changes...");
|
|
10566
|
+
const changedFiles = await gitTracker.initialize();
|
|
10567
|
+
if (changedFiles && changedFiles.length > 0) {
|
|
10568
|
+
const isIgnored = await createGitignoreFilter2(rootDir);
|
|
10569
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, isIgnored);
|
|
10570
|
+
if (filteredFiles.length === 0) {
|
|
10571
|
+
log("\u2713 Index is up to date with git state");
|
|
10572
|
+
return;
|
|
10573
|
+
}
|
|
10574
|
+
const startTime = Date.now();
|
|
10575
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10576
|
+
log(`\u{1F33F} Git changes detected: ${filteredFiles.length} files changed`);
|
|
10577
|
+
try {
|
|
10578
|
+
await checkAndReconnect();
|
|
10579
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, { verbose: false });
|
|
10580
|
+
const duration = Date.now() - startTime;
|
|
10581
|
+
reindexStateManager.completeReindex(duration);
|
|
10582
|
+
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10583
|
+
} catch (error) {
|
|
10584
|
+
reindexStateManager.failReindex();
|
|
10585
|
+
throw error;
|
|
10586
|
+
}
|
|
10587
|
+
} else {
|
|
10588
|
+
log("\u2713 Index is up to date with git state");
|
|
10589
|
+
}
|
|
10590
|
+
}
|
|
10591
|
+
function createGitPollInterval(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10592
|
+
let isIgnored = null;
|
|
10593
|
+
let pollInProgress = false;
|
|
10594
|
+
return setInterval(async () => {
|
|
10595
|
+
if (pollInProgress) return;
|
|
10596
|
+
pollInProgress = true;
|
|
10597
|
+
try {
|
|
10598
|
+
const changedFiles = await gitTracker.detectChanges();
|
|
10599
|
+
if (changedFiles && changedFiles.length > 0) {
|
|
10600
|
+
const currentState = reindexStateManager.getState();
|
|
10601
|
+
if (currentState.inProgress) {
|
|
10602
|
+
log(
|
|
10603
|
+
`Background reindex already in progress (${currentState.pendingFiles.length} files pending), skipping git poll cycle`,
|
|
10604
|
+
"debug"
|
|
10605
|
+
);
|
|
10606
|
+
return;
|
|
10607
|
+
}
|
|
10608
|
+
if (changedFiles.some(isGitignoreFile)) {
|
|
10609
|
+
isIgnored = null;
|
|
10610
|
+
}
|
|
10611
|
+
if (!isIgnored) {
|
|
10612
|
+
isIgnored = await createGitignoreFilter2(rootDir);
|
|
10613
|
+
}
|
|
10614
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, isIgnored);
|
|
10615
|
+
if (filteredFiles.length === 0) return;
|
|
10616
|
+
const startTime = Date.now();
|
|
10617
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10618
|
+
log(`\u{1F33F} Git change detected: ${filteredFiles.length} files changed`);
|
|
10619
|
+
try {
|
|
10620
|
+
await checkAndReconnect();
|
|
10621
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, { verbose: false });
|
|
10622
|
+
const duration = Date.now() - startTime;
|
|
10623
|
+
reindexStateManager.completeReindex(duration);
|
|
10624
|
+
log(`\u2713 Background reindex complete: ${count} files in ${duration}ms`);
|
|
10625
|
+
} catch (error) {
|
|
10626
|
+
reindexStateManager.failReindex();
|
|
10627
|
+
log(`Git background reindex failed: ${error}`, "warning");
|
|
10628
|
+
}
|
|
10629
|
+
}
|
|
10630
|
+
} catch (error) {
|
|
10631
|
+
log(`Git detection check failed: ${error}`, "warning");
|
|
10632
|
+
} finally {
|
|
10633
|
+
pollInProgress = false;
|
|
10634
|
+
}
|
|
10635
|
+
}, DEFAULT_GIT_POLL_INTERVAL_MS2);
|
|
10636
|
+
}
|
|
10637
|
+
function shouldSkipGitReindex(gitReindexInProgress, lastGitReindexTime, cooldownMs, reindexStateManager, log) {
|
|
10638
|
+
const { inProgress: globalInProgress } = reindexStateManager.getState();
|
|
10639
|
+
if (gitReindexInProgress || globalInProgress) {
|
|
10640
|
+
log("Git reindex already in progress, skipping", "debug");
|
|
10641
|
+
return true;
|
|
10642
|
+
}
|
|
10643
|
+
const timeSinceLastReindex = Date.now() - lastGitReindexTime;
|
|
10644
|
+
if (timeSinceLastReindex < cooldownMs) {
|
|
10645
|
+
log(`Git change ignored (cooldown: ${cooldownMs - timeSinceLastReindex}ms remaining)`, "debug");
|
|
10646
|
+
return true;
|
|
10647
|
+
}
|
|
10648
|
+
return false;
|
|
10649
|
+
}
|
|
10650
|
+
async function detectAndFilterGitChanges(gitTracker, rootDir, getIgnoreFilter, setIgnoreFilter, log) {
|
|
10651
|
+
log("\u{1F33F} Git change detected (event-driven)");
|
|
10652
|
+
const changedFiles = await gitTracker.detectChanges();
|
|
10653
|
+
if (!changedFiles || changedFiles.length === 0) return null;
|
|
10654
|
+
if (changedFiles.some(isGitignoreFile)) {
|
|
10655
|
+
setIgnoreFilter(null);
|
|
10656
|
+
}
|
|
10657
|
+
let filter = getIgnoreFilter();
|
|
10658
|
+
if (!filter) {
|
|
10659
|
+
filter = await createGitignoreFilter2(rootDir);
|
|
10660
|
+
setIgnoreFilter(filter);
|
|
10661
|
+
}
|
|
10662
|
+
const filteredFiles = await filterGitChangedFiles(changedFiles, rootDir, filter);
|
|
10663
|
+
return filteredFiles.length > 0 ? filteredFiles : null;
|
|
10664
|
+
}
|
|
10665
|
+
async function executeGitReindex(filteredFiles, vectorDB, embeddings, reindexStateManager, checkAndReconnect, log) {
|
|
10666
|
+
const startTime = Date.now();
|
|
10667
|
+
reindexStateManager.startReindex(filteredFiles);
|
|
10668
|
+
log(`Reindexing ${filteredFiles.length} files from git change`);
|
|
10669
|
+
try {
|
|
10670
|
+
await checkAndReconnect();
|
|
10671
|
+
const count = await indexMultipleFiles2(filteredFiles, vectorDB, embeddings, { verbose: false });
|
|
10672
|
+
const duration = Date.now() - startTime;
|
|
10673
|
+
reindexStateManager.completeReindex(duration);
|
|
10674
|
+
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10675
|
+
} catch (error) {
|
|
10676
|
+
reindexStateManager.failReindex();
|
|
10677
|
+
log(`Git reindex failed: ${error}`, "warning");
|
|
10678
|
+
throw error;
|
|
10679
|
+
}
|
|
10680
|
+
}
|
|
10681
|
+
function createGitChangeHandler(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10682
|
+
let isIgnored = null;
|
|
10683
|
+
let gitReindexInProgress = false;
|
|
10684
|
+
let lastGitReindexTime = 0;
|
|
10685
|
+
const GIT_REINDEX_COOLDOWN_MS = 5e3;
|
|
10686
|
+
return async () => {
|
|
10687
|
+
if (shouldSkipGitReindex(gitReindexInProgress, lastGitReindexTime, GIT_REINDEX_COOLDOWN_MS, reindexStateManager, log)) {
|
|
10688
|
+
return;
|
|
10689
|
+
}
|
|
10690
|
+
gitReindexInProgress = true;
|
|
10691
|
+
try {
|
|
10692
|
+
const filteredFiles = await detectAndFilterGitChanges(
|
|
10693
|
+
gitTracker,
|
|
10694
|
+
rootDir,
|
|
10695
|
+
() => isIgnored,
|
|
10696
|
+
(f) => {
|
|
10697
|
+
isIgnored = f;
|
|
10698
|
+
},
|
|
10699
|
+
log
|
|
10700
|
+
);
|
|
10701
|
+
if (!filteredFiles) return;
|
|
10702
|
+
await executeGitReindex(filteredFiles, vectorDB, embeddings, reindexStateManager, checkAndReconnect, log);
|
|
10703
|
+
lastGitReindexTime = Date.now();
|
|
10704
|
+
} catch (error) {
|
|
10705
|
+
log(`Git change handler failed: ${error}`, "warning");
|
|
10706
|
+
} finally {
|
|
10707
|
+
gitReindexInProgress = false;
|
|
10708
|
+
}
|
|
10709
|
+
};
|
|
10710
|
+
}
|
|
10711
|
+
async function setupGitDetection(rootDir, vectorDB, embeddings, log, reindexStateManager, fileWatcher, checkAndReconnect) {
|
|
10712
|
+
const gitAvailable = await isGitAvailable();
|
|
10713
|
+
const isRepo = await isGitRepo2(rootDir);
|
|
10714
|
+
if (!gitAvailable) {
|
|
10715
|
+
log("Git not available - git detection disabled");
|
|
10716
|
+
return { gitTracker: null, gitPollInterval: null };
|
|
10717
|
+
}
|
|
10718
|
+
if (!isRepo) {
|
|
10719
|
+
log("Not a git repository - git detection disabled");
|
|
10720
|
+
return { gitTracker: null, gitPollInterval: null };
|
|
10721
|
+
}
|
|
10722
|
+
log("\u2713 Detected git repository");
|
|
10723
|
+
const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
|
|
10724
|
+
try {
|
|
10725
|
+
await handleGitStartup(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect);
|
|
10726
|
+
} catch (error) {
|
|
10727
|
+
log(`Failed to check git state on startup: ${error}`, "warning");
|
|
10728
|
+
}
|
|
10729
|
+
if (fileWatcher) {
|
|
10730
|
+
const gitChangeHandler = createGitChangeHandler(
|
|
10731
|
+
rootDir,
|
|
10732
|
+
gitTracker,
|
|
10733
|
+
vectorDB,
|
|
10734
|
+
embeddings,
|
|
10735
|
+
log,
|
|
10736
|
+
reindexStateManager,
|
|
10737
|
+
checkAndReconnect
|
|
10738
|
+
);
|
|
10739
|
+
fileWatcher.watchGit(gitChangeHandler);
|
|
10740
|
+
log("\u2713 Git detection enabled (event-driven via file watcher)");
|
|
10741
|
+
return { gitTracker, gitPollInterval: null };
|
|
10742
|
+
}
|
|
10743
|
+
const pollIntervalSeconds = DEFAULT_GIT_POLL_INTERVAL_MS2 / 1e3;
|
|
10744
|
+
log(`\u2713 Git detection enabled (polling fallback every ${pollIntervalSeconds}s)`);
|
|
10745
|
+
const gitPollInterval = createGitPollInterval(rootDir, gitTracker, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect);
|
|
10746
|
+
return { gitTracker, gitPollInterval };
|
|
10747
|
+
}
|
|
10748
|
+
async function filterGitChangedFiles(changedFiles, rootDir, ignoreFilter) {
|
|
10749
|
+
const results = [];
|
|
10750
|
+
for (const filepath of changedFiles) {
|
|
10751
|
+
if (!isFileIgnored(filepath, rootDir, ignoreFilter)) {
|
|
10752
|
+
results.push(filepath);
|
|
10753
|
+
continue;
|
|
10754
|
+
}
|
|
10755
|
+
try {
|
|
10756
|
+
await fs4.access(filepath);
|
|
10757
|
+
} catch (error) {
|
|
10758
|
+
if (error.code === "ENOENT") {
|
|
10759
|
+
results.push(filepath);
|
|
10760
|
+
}
|
|
10761
|
+
}
|
|
10762
|
+
}
|
|
10763
|
+
return results;
|
|
10764
|
+
}
|
|
10765
|
+
|
|
10766
|
+
// src/mcp/cleanup.ts
|
|
10767
|
+
function setupCleanupHandlers(server, versionCheckInterval, gitPollInterval, fileWatcher, log) {
|
|
10768
|
+
return async () => {
|
|
10769
|
+
try {
|
|
10770
|
+
log("Shutting down MCP server...");
|
|
10771
|
+
await server.close();
|
|
10772
|
+
clearInterval(versionCheckInterval);
|
|
10773
|
+
if (gitPollInterval) clearInterval(gitPollInterval);
|
|
10774
|
+
if (fileWatcher) await fileWatcher.stop();
|
|
10775
|
+
} finally {
|
|
10776
|
+
process.exit(0);
|
|
10777
|
+
}
|
|
10778
|
+
};
|
|
10779
|
+
}
|
|
10780
|
+
|
|
10781
|
+
// src/mcp/server.ts
|
|
10782
|
+
var __filename2 = fileURLToPath2(import.meta.url);
|
|
10783
|
+
var __dirname2 = dirname2(__filename2);
|
|
10784
|
+
var require3 = createRequire2(import.meta.url);
|
|
10785
|
+
var packageJson2;
|
|
10786
|
+
try {
|
|
10787
|
+
packageJson2 = require3(join2(__dirname2, "../package.json"));
|
|
10788
|
+
} catch {
|
|
10789
|
+
packageJson2 = require3(join2(__dirname2, "../../package.json"));
|
|
10790
|
+
}
|
|
10791
|
+
async function initializeDatabase(rootDir, log) {
|
|
10792
|
+
const embeddings = new WorkerEmbeddings();
|
|
10793
|
+
log("Creating vector database...");
|
|
10794
|
+
const vectorDB = await createVectorDB(rootDir);
|
|
10795
|
+
if (!vectorDB) {
|
|
10796
|
+
throw new Error("createVectorDB returned undefined or null");
|
|
10797
|
+
}
|
|
10798
|
+
if (typeof vectorDB.initialize !== "function") {
|
|
10799
|
+
throw new Error(`Invalid vectorDB instance: ${vectorDB.constructor?.name || "unknown"}. Expected VectorDBInterface but got: ${JSON.stringify(Object.keys(vectorDB))}`);
|
|
10800
|
+
}
|
|
10801
|
+
log("Loading embedding model...");
|
|
10802
|
+
await embeddings.initialize();
|
|
10803
|
+
log("Loading vector database...");
|
|
10804
|
+
await vectorDB.initialize();
|
|
10805
|
+
log("Embeddings and vector DB ready");
|
|
10806
|
+
return { embeddings, vectorDB };
|
|
10807
|
+
}
|
|
10808
|
+
async function handleAutoIndexing(vectorDB, rootDir, log) {
|
|
10809
|
+
const hasIndex = await vectorDB.hasData();
|
|
10810
|
+
if (!hasIndex) {
|
|
10811
|
+
log("\u{1F4E6} No index found - running initial indexing...");
|
|
10812
|
+
log("\u23F1\uFE0F This may take 5-20 minutes depending on project size");
|
|
10813
|
+
try {
|
|
10814
|
+
const { indexCodebase: indexCodebase2 } = await import("@liendev/core");
|
|
10815
|
+
await indexCodebase2({ rootDir, verbose: true });
|
|
10816
|
+
log("\u2705 Initial indexing complete!");
|
|
10817
|
+
} catch (error) {
|
|
10818
|
+
log(`\u26A0\uFE0F Initial indexing failed: ${error}`, "warning");
|
|
10819
|
+
log("You can manually run: lien index", "warning");
|
|
10820
|
+
}
|
|
10821
|
+
}
|
|
10822
|
+
}
|
|
10823
|
+
async function setupFileWatching(watch, rootDir, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect) {
|
|
10541
10824
|
const fileWatchingEnabled = watch !== void 0 ? watch : true;
|
|
10542
10825
|
if (!fileWatchingEnabled) {
|
|
10543
10826
|
return null;
|
|
@@ -10545,7 +10828,7 @@ async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose,
|
|
|
10545
10828
|
log("\u{1F440} Starting file watcher...");
|
|
10546
10829
|
const fileWatcher = new FileWatcher(rootDir);
|
|
10547
10830
|
try {
|
|
10548
|
-
const handler = createFileChangeHandler(vectorDB, embeddings,
|
|
10831
|
+
const handler = createFileChangeHandler(rootDir, vectorDB, embeddings, log, reindexStateManager, checkAndReconnect);
|
|
10549
10832
|
await fileWatcher.start(handler);
|
|
10550
10833
|
log(`\u2713 File watching enabled (watching ${fileWatcher.getWatchedFiles().length} files)`);
|
|
10551
10834
|
return fileWatcher;
|
|
@@ -10556,23 +10839,11 @@ async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose,
|
|
|
10556
10839
|
}
|
|
10557
10840
|
function setupTransport(log) {
|
|
10558
10841
|
const transport = new StdioServerTransport();
|
|
10559
|
-
transport.onclose = () => {
|
|
10560
|
-
log("Transport closed");
|
|
10561
|
-
};
|
|
10562
10842
|
transport.onerror = (error) => {
|
|
10563
|
-
log(`Transport error: ${error}
|
|
10843
|
+
log(`Transport error: ${error}`, "warning");
|
|
10564
10844
|
};
|
|
10565
10845
|
return transport;
|
|
10566
10846
|
}
|
|
10567
|
-
function setupCleanupHandlers(versionCheckInterval, gitPollInterval, fileWatcher, log) {
|
|
10568
|
-
return async () => {
|
|
10569
|
-
log("Shutting down MCP server...");
|
|
10570
|
-
clearInterval(versionCheckInterval);
|
|
10571
|
-
if (gitPollInterval) clearInterval(gitPollInterval);
|
|
10572
|
-
if (fileWatcher) await fileWatcher.stop();
|
|
10573
|
-
process.exit(0);
|
|
10574
|
-
};
|
|
10575
|
-
}
|
|
10576
10847
|
function setupVersionChecking(vectorDB, log, reindexStateManager) {
|
|
10577
10848
|
const checkAndReconnect = async () => {
|
|
10578
10849
|
try {
|
|
@@ -10623,9 +10894,6 @@ function createMCPLog(server, verbose) {
|
|
|
10623
10894
|
async function initializeComponents(rootDir, earlyLog) {
|
|
10624
10895
|
try {
|
|
10625
10896
|
const result = await initializeDatabase(rootDir, earlyLog);
|
|
10626
|
-
if (!result.vectorDB || typeof result.vectorDB.initialize !== "function") {
|
|
10627
|
-
throw new Error(`Invalid vectorDB instance: ${result.vectorDB?.constructor?.name || "undefined"}. Missing initialize method.`);
|
|
10628
|
-
}
|
|
10629
10897
|
return result;
|
|
10630
10898
|
} catch (error) {
|
|
10631
10899
|
console.error(`Failed to initialize: ${error}`);
|
|
@@ -10643,13 +10911,13 @@ function createMCPServer() {
|
|
|
10643
10911
|
);
|
|
10644
10912
|
}
|
|
10645
10913
|
async function setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, options) {
|
|
10646
|
-
const { rootDir,
|
|
10914
|
+
const { rootDir, watch } = options;
|
|
10647
10915
|
const { vectorDB, embeddings } = toolContext;
|
|
10648
10916
|
registerMCPHandlers(server, toolContext, log);
|
|
10649
10917
|
await handleAutoIndexing(vectorDB, rootDir, log);
|
|
10650
|
-
const fileWatcher = await setupFileWatching(watch, rootDir, vectorDB, embeddings,
|
|
10651
|
-
const { gitPollInterval } = await setupGitDetection(rootDir, vectorDB, embeddings,
|
|
10652
|
-
const cleanup = setupCleanupHandlers(versionCheckInterval, gitPollInterval, fileWatcher, log);
|
|
10918
|
+
const fileWatcher = await setupFileWatching(watch, rootDir, vectorDB, embeddings, log, reindexStateManager, toolContext.checkAndReconnect);
|
|
10919
|
+
const { gitPollInterval } = await setupGitDetection(rootDir, vectorDB, embeddings, log, reindexStateManager, fileWatcher, toolContext.checkAndReconnect);
|
|
10920
|
+
const cleanup = setupCleanupHandlers(server, versionCheckInterval, gitPollInterval, fileWatcher, log);
|
|
10653
10921
|
process.on("SIGINT", cleanup);
|
|
10654
10922
|
process.on("SIGTERM", cleanup);
|
|
10655
10923
|
const transport = setupTransport(log);
|
|
@@ -10682,7 +10950,7 @@ async function startMCPServer(options) {
|
|
|
10682
10950
|
getIndexMetadata,
|
|
10683
10951
|
getReindexState: () => reindexStateManager.getState()
|
|
10684
10952
|
};
|
|
10685
|
-
await setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, { rootDir,
|
|
10953
|
+
await setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, { rootDir, watch });
|
|
10686
10954
|
}
|
|
10687
10955
|
|
|
10688
10956
|
// src/cli/serve.ts
|
|
@@ -10691,7 +10959,7 @@ async function serveCommand(options) {
|
|
|
10691
10959
|
try {
|
|
10692
10960
|
if (options.root) {
|
|
10693
10961
|
try {
|
|
10694
|
-
const stats = await
|
|
10962
|
+
const stats = await fs5.stat(rootDir);
|
|
10695
10963
|
if (!stats.isDirectory()) {
|
|
10696
10964
|
console.error(chalk5.red(`Error: --root path is not a directory: ${rootDir}`));
|
|
10697
10965
|
process.exit(1);
|
|
@@ -10732,7 +11000,7 @@ async function serveCommand(options) {
|
|
|
10732
11000
|
|
|
10733
11001
|
// src/cli/complexity.ts
|
|
10734
11002
|
import chalk6 from "chalk";
|
|
10735
|
-
import
|
|
11003
|
+
import fs6 from "fs";
|
|
10736
11004
|
import path5 from "path";
|
|
10737
11005
|
import { VectorDB } from "@liendev/core";
|
|
10738
11006
|
import { ComplexityAnalyzer as ComplexityAnalyzer2 } from "@liendev/core";
|
|
@@ -10755,7 +11023,7 @@ function validateFilesExist(files, rootDir) {
|
|
|
10755
11023
|
if (!files || files.length === 0) return;
|
|
10756
11024
|
const missingFiles = files.filter((file) => {
|
|
10757
11025
|
const fullPath = path5.isAbsolute(file) ? file : path5.join(rootDir, file);
|
|
10758
|
-
return !
|
|
11026
|
+
return !fs6.existsSync(fullPath);
|
|
10759
11027
|
});
|
|
10760
11028
|
if (missingFiles.length > 0) {
|
|
10761
11029
|
console.error(chalk6.red(`Error: File${missingFiles.length > 1 ? "s" : ""} not found:`));
|
|
@@ -10798,6 +11066,100 @@ async function complexityCommand(options) {
|
|
|
10798
11066
|
}
|
|
10799
11067
|
}
|
|
10800
11068
|
|
|
11069
|
+
// src/cli/config.ts
|
|
11070
|
+
import chalk7 from "chalk";
|
|
11071
|
+
import path6 from "path";
|
|
11072
|
+
import os2 from "os";
|
|
11073
|
+
import {
|
|
11074
|
+
loadGlobalConfig,
|
|
11075
|
+
mergeGlobalConfig
|
|
11076
|
+
} from "@liendev/core";
|
|
11077
|
+
var CONFIG_PATH = path6.join(os2.homedir(), ".lien", "config.json");
|
|
11078
|
+
var ALLOWED_KEYS = {
|
|
11079
|
+
"backend": {
|
|
11080
|
+
values: ["lancedb", "qdrant"],
|
|
11081
|
+
description: "Vector database backend"
|
|
11082
|
+
},
|
|
11083
|
+
"qdrant.url": {
|
|
11084
|
+
values: [],
|
|
11085
|
+
description: "Qdrant server URL"
|
|
11086
|
+
},
|
|
11087
|
+
"qdrant.apiKey": {
|
|
11088
|
+
values: [],
|
|
11089
|
+
description: "Qdrant API key"
|
|
11090
|
+
}
|
|
11091
|
+
};
|
|
11092
|
+
function getConfigValue(config, key) {
|
|
11093
|
+
const parts = key.split(".");
|
|
11094
|
+
let current = config;
|
|
11095
|
+
for (const part of parts) {
|
|
11096
|
+
if (current == null || typeof current !== "object") return void 0;
|
|
11097
|
+
current = current[part];
|
|
11098
|
+
}
|
|
11099
|
+
return current == null ? void 0 : String(current);
|
|
11100
|
+
}
|
|
11101
|
+
function buildPartialConfig(key, value) {
|
|
11102
|
+
switch (key) {
|
|
11103
|
+
case "backend":
|
|
11104
|
+
return { backend: value };
|
|
11105
|
+
case "qdrant.url":
|
|
11106
|
+
return { qdrant: { url: value } };
|
|
11107
|
+
case "qdrant.apiKey":
|
|
11108
|
+
return { qdrant: { url: "", apiKey: value } };
|
|
11109
|
+
default:
|
|
11110
|
+
return {};
|
|
11111
|
+
}
|
|
11112
|
+
}
|
|
11113
|
+
async function configSetCommand(key, value) {
|
|
11114
|
+
const allowed = ALLOWED_KEYS[key];
|
|
11115
|
+
if (!allowed) {
|
|
11116
|
+
console.error(chalk7.red(`Unknown config key: "${key}"`));
|
|
11117
|
+
console.log(chalk7.dim("Valid keys:"), Object.keys(ALLOWED_KEYS).join(", "));
|
|
11118
|
+
process.exit(1);
|
|
11119
|
+
}
|
|
11120
|
+
if (allowed.values.length > 0 && !allowed.values.includes(value)) {
|
|
11121
|
+
console.error(chalk7.red(`Invalid value "${value}" for ${key}`));
|
|
11122
|
+
console.log(chalk7.dim("Valid values:"), allowed.values.join(", "));
|
|
11123
|
+
process.exit(1);
|
|
11124
|
+
}
|
|
11125
|
+
if (key === "qdrant.apiKey") {
|
|
11126
|
+
const existing = await loadGlobalConfig();
|
|
11127
|
+
if (!existing.qdrant?.url) {
|
|
11128
|
+
console.error(chalk7.red("Set qdrant.url first before setting qdrant.apiKey"));
|
|
11129
|
+
process.exit(1);
|
|
11130
|
+
}
|
|
11131
|
+
}
|
|
11132
|
+
const partial = buildPartialConfig(key, value);
|
|
11133
|
+
await mergeGlobalConfig(partial);
|
|
11134
|
+
console.log(chalk7.green(`Set ${key} = ${value}`));
|
|
11135
|
+
console.log(chalk7.dim(`Config: ${CONFIG_PATH}`));
|
|
11136
|
+
}
|
|
11137
|
+
async function configGetCommand(key) {
|
|
11138
|
+
if (!ALLOWED_KEYS[key]) {
|
|
11139
|
+
console.error(chalk7.red(`Unknown config key: "${key}"`));
|
|
11140
|
+
console.log(chalk7.dim("Valid keys:"), Object.keys(ALLOWED_KEYS).join(", "));
|
|
11141
|
+
process.exit(1);
|
|
11142
|
+
}
|
|
11143
|
+
const config = await loadGlobalConfig();
|
|
11144
|
+
const value = getConfigValue(config, key);
|
|
11145
|
+
if (value === void 0) {
|
|
11146
|
+
console.log(chalk7.dim(`${key}: (not set)`));
|
|
11147
|
+
} else {
|
|
11148
|
+
console.log(`${key}: ${value}`);
|
|
11149
|
+
}
|
|
11150
|
+
}
|
|
11151
|
+
async function configListCommand() {
|
|
11152
|
+
const config = await loadGlobalConfig();
|
|
11153
|
+
console.log(chalk7.bold("Global Configuration"));
|
|
11154
|
+
console.log(chalk7.dim(`File: ${CONFIG_PATH}
|
|
11155
|
+
`));
|
|
11156
|
+
for (const [key, meta] of Object.entries(ALLOWED_KEYS)) {
|
|
11157
|
+
const value = getConfigValue(config, key);
|
|
11158
|
+
const display = value ?? chalk7.dim("(not set)");
|
|
11159
|
+
console.log(` ${chalk7.cyan(key)}: ${display} ${chalk7.dim(`\u2014 ${meta.description}`)}`);
|
|
11160
|
+
}
|
|
11161
|
+
}
|
|
11162
|
+
|
|
10801
11163
|
// src/cli/index.ts
|
|
10802
11164
|
var __filename3 = fileURLToPath3(import.meta.url);
|
|
10803
11165
|
var __dirname3 = dirname3(__filename3);
|
|
@@ -10815,6 +11177,10 @@ program.command("index").description("Index the codebase for semantic search").o
|
|
|
10815
11177
|
program.command("serve").description("Start the MCP server for Cursor integration").option("-p, --port <port>", "Port number (for future use)", "7133").option("--no-watch", "Disable file watching for this session").option("-w, --watch", "[DEPRECATED] File watching is now enabled by default").option("-r, --root <path>", "Root directory to serve (defaults to current directory)").action(serveCommand);
|
|
10816
11178
|
program.command("status").description("Show indexing status and statistics").action(statusCommand);
|
|
10817
11179
|
program.command("complexity").description("Analyze code complexity").option("--files <paths...>", "Specific files to analyze").option("--format <type>", "Output format: text, json, sarif", "text").option("--threshold <n>", "Override both complexity thresholds (cyclomatic & cognitive)").option("--cyclomatic-threshold <n>", "Override cyclomatic complexity threshold only").option("--cognitive-threshold <n>", "Override cognitive complexity threshold only").option("--fail-on <severity>", "Exit 1 if violations: error, warning").action(complexityCommand);
|
|
11180
|
+
var configCmd = program.command("config").description("Manage global configuration (~/.lien/config.json)");
|
|
11181
|
+
configCmd.command("set <key> <value>").description("Set a global config value").action(configSetCommand);
|
|
11182
|
+
configCmd.command("get <key>").description("Get a config value").action(configGetCommand);
|
|
11183
|
+
configCmd.command("list").description("Show all current config").action(configListCommand);
|
|
10818
11184
|
|
|
10819
11185
|
// src/index.ts
|
|
10820
11186
|
program.parse();
|