@liendev/lien 0.26.0 → 0.28.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +961 -140
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -3708,8 +3708,7 @@ import {
|
|
|
3708
3708
|
DEFAULT_EMBEDDING_BATCH_SIZE,
|
|
3709
3709
|
DEFAULT_CHUNK_SIZE,
|
|
3710
3710
|
DEFAULT_CHUNK_OVERLAP,
|
|
3711
|
-
DEFAULT_GIT_POLL_INTERVAL_MS
|
|
3712
|
-
DEFAULT_DEBOUNCE_MS
|
|
3711
|
+
DEFAULT_GIT_POLL_INTERVAL_MS
|
|
3713
3712
|
} from "@liendev/core";
|
|
3714
3713
|
async function statusCommand() {
|
|
3715
3714
|
const rootDir = process.cwd();
|
|
@@ -3766,7 +3765,7 @@ async function statusCommand() {
|
|
|
3766
3765
|
console.log(chalk3.dim("Git detection:"), chalk3.yellow("Not a git repo"));
|
|
3767
3766
|
}
|
|
3768
3767
|
console.log(chalk3.dim("File watching:"), chalk3.green("\u2713 Enabled (default)"));
|
|
3769
|
-
console.log(chalk3.dim("
|
|
3768
|
+
console.log(chalk3.dim(" Batch window:"), "500ms (collects rapid changes, force-flush after 5s)");
|
|
3770
3769
|
console.log(chalk3.dim(" Disable with:"), chalk3.bold("lien serve --no-watch"));
|
|
3771
3770
|
console.log(chalk3.bold("\nIndexing Settings (defaults):"));
|
|
3772
3771
|
console.log(chalk3.dim("Concurrency:"), DEFAULT_CONCURRENCY);
|
|
@@ -3993,7 +3992,7 @@ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
|
3993
3992
|
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
3994
3993
|
import { createRequire as createRequire2 } from "module";
|
|
3995
3994
|
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
3996
|
-
import { dirname as dirname2, join as join2 } from "path";
|
|
3995
|
+
import { dirname as dirname2, join as join2, resolve } from "path";
|
|
3997
3996
|
import {
|
|
3998
3997
|
LocalEmbeddings,
|
|
3999
3998
|
GitStateTracker,
|
|
@@ -4004,18 +4003,35 @@ import {
|
|
|
4004
4003
|
isGitRepo as isGitRepo2,
|
|
4005
4004
|
VERSION_CHECK_INTERVAL_MS,
|
|
4006
4005
|
DEFAULT_GIT_POLL_INTERVAL_MS as DEFAULT_GIT_POLL_INTERVAL_MS2,
|
|
4007
|
-
createVectorDB
|
|
4006
|
+
createVectorDB,
|
|
4007
|
+
computeContentHash,
|
|
4008
|
+
normalizeToRelativePath
|
|
4008
4009
|
} from "@liendev/core";
|
|
4009
4010
|
|
|
4010
4011
|
// src/watcher/index.ts
|
|
4011
4012
|
import chokidar from "chokidar";
|
|
4012
4013
|
import path3 from "path";
|
|
4013
|
-
import { detectAllFrameworks, getFrameworkDetector
|
|
4014
|
+
import { detectAllFrameworks, getFrameworkDetector } from "@liendev/core";
|
|
4014
4015
|
var FileWatcher = class {
|
|
4015
4016
|
watcher = null;
|
|
4016
|
-
debounceTimers = /* @__PURE__ */ new Map();
|
|
4017
4017
|
rootDir;
|
|
4018
4018
|
onChangeHandler = null;
|
|
4019
|
+
// Batch state for aggregating rapid changes
|
|
4020
|
+
pendingChanges = /* @__PURE__ */ new Map();
|
|
4021
|
+
batchTimer = null;
|
|
4022
|
+
batchInProgress = false;
|
|
4023
|
+
// Track if handler is currently processing a batch
|
|
4024
|
+
BATCH_WINDOW_MS = 500;
|
|
4025
|
+
// Collect changes for 500ms before processing
|
|
4026
|
+
MAX_BATCH_WAIT_MS = 5e3;
|
|
4027
|
+
// Force flush after 5s even if changes keep coming
|
|
4028
|
+
firstChangeTimestamp = null;
|
|
4029
|
+
// Track when batch started
|
|
4030
|
+
// Git watching state
|
|
4031
|
+
gitChangeTimer = null;
|
|
4032
|
+
gitChangeHandler = null;
|
|
4033
|
+
GIT_DEBOUNCE_MS = 1e3;
|
|
4034
|
+
// Git operations touch multiple files
|
|
4019
4035
|
constructor(rootDir) {
|
|
4020
4036
|
this.rootDir = rootDir;
|
|
4021
4037
|
}
|
|
@@ -4102,7 +4118,11 @@ var FileWatcher = class {
|
|
|
4102
4118
|
return;
|
|
4103
4119
|
}
|
|
4104
4120
|
this.watcher.on("add", (filepath) => this.handleChange("add", filepath)).on("change", (filepath) => this.handleChange("change", filepath)).on("unlink", (filepath) => this.handleChange("unlink", filepath)).on("error", (error) => {
|
|
4105
|
-
|
|
4121
|
+
try {
|
|
4122
|
+
const message = "[FileWatcher] Error: " + (error instanceof Error ? error.stack || error.message : String(error)) + "\n";
|
|
4123
|
+
process.stderr.write(message);
|
|
4124
|
+
} catch {
|
|
4125
|
+
}
|
|
4106
4126
|
});
|
|
4107
4127
|
}
|
|
4108
4128
|
/**
|
|
@@ -4114,17 +4134,17 @@ var FileWatcher = class {
|
|
|
4114
4134
|
}
|
|
4115
4135
|
let readyFired = false;
|
|
4116
4136
|
await Promise.race([
|
|
4117
|
-
new Promise((
|
|
4137
|
+
new Promise((resolve2) => {
|
|
4118
4138
|
const readyHandler = () => {
|
|
4119
4139
|
readyFired = true;
|
|
4120
|
-
|
|
4140
|
+
resolve2();
|
|
4121
4141
|
};
|
|
4122
4142
|
this.watcher.once("ready", readyHandler);
|
|
4123
4143
|
}),
|
|
4124
|
-
new Promise((
|
|
4144
|
+
new Promise((resolve2) => {
|
|
4125
4145
|
setTimeout(() => {
|
|
4126
4146
|
if (!readyFired) {
|
|
4127
|
-
|
|
4147
|
+
resolve2();
|
|
4128
4148
|
}
|
|
4129
4149
|
}, 1e3);
|
|
4130
4150
|
})
|
|
@@ -4146,34 +4166,200 @@ var FileWatcher = class {
|
|
|
4146
4166
|
await this.waitForReady();
|
|
4147
4167
|
}
|
|
4148
4168
|
/**
|
|
4149
|
-
*
|
|
4150
|
-
*
|
|
4169
|
+
* Enable watching .git directory for git operations.
|
|
4170
|
+
* Call this after start() to enable event-driven git detection.
|
|
4171
|
+
*
|
|
4172
|
+
* @param onGitChange - Callback invoked when git operations detected
|
|
4173
|
+
*/
|
|
4174
|
+
watchGit(onGitChange) {
|
|
4175
|
+
if (!this.watcher) {
|
|
4176
|
+
throw new Error("Cannot watch git - watcher not started");
|
|
4177
|
+
}
|
|
4178
|
+
this.gitChangeHandler = onGitChange;
|
|
4179
|
+
this.watcher.add([
|
|
4180
|
+
path3.join(this.rootDir, ".git/HEAD"),
|
|
4181
|
+
path3.join(this.rootDir, ".git/index"),
|
|
4182
|
+
path3.join(this.rootDir, ".git/refs/**"),
|
|
4183
|
+
path3.join(this.rootDir, ".git/MERGE_HEAD"),
|
|
4184
|
+
path3.join(this.rootDir, ".git/REBASE_HEAD"),
|
|
4185
|
+
path3.join(this.rootDir, ".git/CHERRY_PICK_HEAD"),
|
|
4186
|
+
path3.join(this.rootDir, ".git/logs/refs/stash")
|
|
4187
|
+
// git stash operations
|
|
4188
|
+
]);
|
|
4189
|
+
}
|
|
4190
|
+
/**
|
|
4191
|
+
* Check if a filepath is a git-related change
|
|
4192
|
+
*/
|
|
4193
|
+
isGitChange(filepath) {
|
|
4194
|
+
const normalized = filepath.replace(/\\/g, "/");
|
|
4195
|
+
return normalized.includes(".git/");
|
|
4196
|
+
}
|
|
4197
|
+
/**
|
|
4198
|
+
* Handle git-related file changes with debouncing
|
|
4199
|
+
*/
|
|
4200
|
+
handleGitChange() {
|
|
4201
|
+
if (this.gitChangeTimer) {
|
|
4202
|
+
clearTimeout(this.gitChangeTimer);
|
|
4203
|
+
}
|
|
4204
|
+
this.gitChangeTimer = setTimeout(async () => {
|
|
4205
|
+
try {
|
|
4206
|
+
await this.gitChangeHandler?.();
|
|
4207
|
+
} catch (error) {
|
|
4208
|
+
}
|
|
4209
|
+
this.gitChangeTimer = null;
|
|
4210
|
+
}, this.GIT_DEBOUNCE_MS);
|
|
4211
|
+
}
|
|
4212
|
+
/**
|
|
4213
|
+
* Handles a file change event with smart batching.
|
|
4214
|
+
* Collects rapid changes across multiple files and processes them together.
|
|
4215
|
+
* Forces flush after MAX_BATCH_WAIT_MS even if changes keep arriving.
|
|
4216
|
+
*
|
|
4217
|
+
* If a batch is currently being processed by an async handler, waits for completion
|
|
4218
|
+
* before starting a new batch to prevent race conditions.
|
|
4151
4219
|
*/
|
|
4152
4220
|
handleChange(type, filepath) {
|
|
4153
|
-
|
|
4154
|
-
|
|
4155
|
-
|
|
4156
|
-
}
|
|
4157
|
-
|
|
4158
|
-
|
|
4159
|
-
|
|
4160
|
-
|
|
4161
|
-
|
|
4162
|
-
|
|
4163
|
-
|
|
4164
|
-
|
|
4165
|
-
|
|
4166
|
-
|
|
4167
|
-
|
|
4168
|
-
|
|
4169
|
-
|
|
4170
|
-
}
|
|
4171
|
-
} catch (error) {
|
|
4172
|
-
console.error(`[Lien] Error handling file change: ${error}`);
|
|
4173
|
-
}
|
|
4221
|
+
if (this.gitChangeHandler && this.isGitChange(filepath)) {
|
|
4222
|
+
this.handleGitChange();
|
|
4223
|
+
return;
|
|
4224
|
+
}
|
|
4225
|
+
if (!this.onChangeHandler) {
|
|
4226
|
+
return;
|
|
4227
|
+
}
|
|
4228
|
+
if (this.pendingChanges.size === 0) {
|
|
4229
|
+
this.firstChangeTimestamp = Date.now();
|
|
4230
|
+
}
|
|
4231
|
+
this.pendingChanges.set(filepath, type);
|
|
4232
|
+
const now = Date.now();
|
|
4233
|
+
const elapsed = now - this.firstChangeTimestamp;
|
|
4234
|
+
if (elapsed >= this.MAX_BATCH_WAIT_MS) {
|
|
4235
|
+
if (this.batchTimer) {
|
|
4236
|
+
clearTimeout(this.batchTimer);
|
|
4237
|
+
this.batchTimer = null;
|
|
4174
4238
|
}
|
|
4175
|
-
|
|
4176
|
-
|
|
4239
|
+
this.flushBatch();
|
|
4240
|
+
return;
|
|
4241
|
+
}
|
|
4242
|
+
if (this.batchTimer) {
|
|
4243
|
+
clearTimeout(this.batchTimer);
|
|
4244
|
+
}
|
|
4245
|
+
if (!this.batchInProgress) {
|
|
4246
|
+
this.batchTimer = setTimeout(() => {
|
|
4247
|
+
this.flushBatch();
|
|
4248
|
+
}, this.BATCH_WINDOW_MS);
|
|
4249
|
+
}
|
|
4250
|
+
}
|
|
4251
|
+
/**
|
|
4252
|
+
* Group pending changes by type and convert to absolute paths.
|
|
4253
|
+
* Returns arrays of added, modified, and deleted files.
|
|
4254
|
+
*/
|
|
4255
|
+
groupPendingChanges(changes) {
|
|
4256
|
+
const added = [];
|
|
4257
|
+
const modified = [];
|
|
4258
|
+
const deleted = [];
|
|
4259
|
+
for (const [filepath, type] of changes) {
|
|
4260
|
+
const absolutePath = path3.isAbsolute(filepath) ? filepath : path3.join(this.rootDir, filepath);
|
|
4261
|
+
switch (type) {
|
|
4262
|
+
case "add":
|
|
4263
|
+
added.push(absolutePath);
|
|
4264
|
+
break;
|
|
4265
|
+
case "change":
|
|
4266
|
+
modified.push(absolutePath);
|
|
4267
|
+
break;
|
|
4268
|
+
case "unlink":
|
|
4269
|
+
deleted.push(absolutePath);
|
|
4270
|
+
break;
|
|
4271
|
+
}
|
|
4272
|
+
}
|
|
4273
|
+
return { added, modified, deleted };
|
|
4274
|
+
}
|
|
4275
|
+
/**
|
|
4276
|
+
* Handle completion of async batch handler.
|
|
4277
|
+
* Triggers flush of accumulated changes if any.
|
|
4278
|
+
*/
|
|
4279
|
+
handleBatchComplete() {
|
|
4280
|
+
this.batchInProgress = false;
|
|
4281
|
+
if (this.pendingChanges.size > 0 && !this.batchTimer) {
|
|
4282
|
+
this.batchTimer = setTimeout(() => {
|
|
4283
|
+
this.flushBatch();
|
|
4284
|
+
}, this.BATCH_WINDOW_MS);
|
|
4285
|
+
}
|
|
4286
|
+
}
|
|
4287
|
+
/**
|
|
4288
|
+
* Dispatch batch event to handler and track async state.
|
|
4289
|
+
* Caller must ensure at least one of added/modified/deleted is non-empty.
|
|
4290
|
+
*/
|
|
4291
|
+
dispatchBatch(added, modified, deleted) {
|
|
4292
|
+
if (!this.onChangeHandler) return;
|
|
4293
|
+
const allFiles = [...added, ...modified];
|
|
4294
|
+
const firstFile = allFiles.length > 0 ? allFiles[0] : deleted[0];
|
|
4295
|
+
if (!firstFile) {
|
|
4296
|
+
return;
|
|
4297
|
+
}
|
|
4298
|
+
try {
|
|
4299
|
+
this.batchInProgress = true;
|
|
4300
|
+
const result = this.onChangeHandler({
|
|
4301
|
+
type: "batch",
|
|
4302
|
+
filepath: firstFile,
|
|
4303
|
+
added,
|
|
4304
|
+
modified,
|
|
4305
|
+
deleted
|
|
4306
|
+
});
|
|
4307
|
+
if (result instanceof Promise) {
|
|
4308
|
+
result.catch(() => {
|
|
4309
|
+
}).finally(() => this.handleBatchComplete());
|
|
4310
|
+
} else {
|
|
4311
|
+
this.handleBatchComplete();
|
|
4312
|
+
}
|
|
4313
|
+
} catch (error) {
|
|
4314
|
+
this.handleBatchComplete();
|
|
4315
|
+
}
|
|
4316
|
+
}
|
|
4317
|
+
/**
|
|
4318
|
+
* Flush pending changes and dispatch batch event.
|
|
4319
|
+
* Tracks async handler state to prevent race conditions.
|
|
4320
|
+
*/
|
|
4321
|
+
flushBatch() {
|
|
4322
|
+
if (this.batchTimer) {
|
|
4323
|
+
clearTimeout(this.batchTimer);
|
|
4324
|
+
this.batchTimer = null;
|
|
4325
|
+
}
|
|
4326
|
+
if (this.pendingChanges.size === 0) return;
|
|
4327
|
+
const changes = new Map(this.pendingChanges);
|
|
4328
|
+
this.pendingChanges.clear();
|
|
4329
|
+
this.firstChangeTimestamp = null;
|
|
4330
|
+
const { added, modified, deleted } = this.groupPendingChanges(changes);
|
|
4331
|
+
if (added.length === 0 && modified.length === 0 && deleted.length === 0) {
|
|
4332
|
+
return;
|
|
4333
|
+
}
|
|
4334
|
+
this.dispatchBatch(added, modified, deleted);
|
|
4335
|
+
}
|
|
4336
|
+
/**
|
|
4337
|
+
* Flush final batch during shutdown.
|
|
4338
|
+
* Handles edge case where watcher is stopped while batch is pending.
|
|
4339
|
+
*/
|
|
4340
|
+
async flushFinalBatch(handler) {
|
|
4341
|
+
if (this.pendingChanges.size === 0) return;
|
|
4342
|
+
const changes = new Map(this.pendingChanges);
|
|
4343
|
+
this.pendingChanges.clear();
|
|
4344
|
+
const { added, modified, deleted } = this.groupPendingChanges(changes);
|
|
4345
|
+
if (added.length === 0 && modified.length === 0 && deleted.length === 0) {
|
|
4346
|
+
return;
|
|
4347
|
+
}
|
|
4348
|
+
try {
|
|
4349
|
+
const allFiles = [...added, ...modified];
|
|
4350
|
+
const firstFile = allFiles.length > 0 ? allFiles[0] : deleted[0];
|
|
4351
|
+
if (!firstFile) {
|
|
4352
|
+
return;
|
|
4353
|
+
}
|
|
4354
|
+
await handler({
|
|
4355
|
+
type: "batch",
|
|
4356
|
+
filepath: firstFile,
|
|
4357
|
+
added,
|
|
4358
|
+
modified,
|
|
4359
|
+
deleted
|
|
4360
|
+
});
|
|
4361
|
+
} catch (error) {
|
|
4362
|
+
}
|
|
4177
4363
|
}
|
|
4178
4364
|
/**
|
|
4179
4365
|
* Stops the file watcher and cleans up resources.
|
|
@@ -4182,13 +4368,25 @@ var FileWatcher = class {
|
|
|
4182
4368
|
if (!this.watcher) {
|
|
4183
4369
|
return;
|
|
4184
4370
|
}
|
|
4185
|
-
|
|
4186
|
-
|
|
4371
|
+
const handler = this.onChangeHandler;
|
|
4372
|
+
this.onChangeHandler = null;
|
|
4373
|
+
this.gitChangeHandler = null;
|
|
4374
|
+
if (this.gitChangeTimer) {
|
|
4375
|
+
clearTimeout(this.gitChangeTimer);
|
|
4376
|
+
this.gitChangeTimer = null;
|
|
4377
|
+
}
|
|
4378
|
+
while (this.batchInProgress) {
|
|
4379
|
+
await new Promise((resolve2) => setTimeout(resolve2, 50));
|
|
4380
|
+
}
|
|
4381
|
+
if (this.batchTimer) {
|
|
4382
|
+
clearTimeout(this.batchTimer);
|
|
4383
|
+
this.batchTimer = null;
|
|
4384
|
+
}
|
|
4385
|
+
if (handler && this.pendingChanges.size > 0) {
|
|
4386
|
+
await this.flushFinalBatch(handler);
|
|
4187
4387
|
}
|
|
4188
|
-
this.debounceTimers.clear();
|
|
4189
4388
|
await this.watcher.close();
|
|
4190
4389
|
this.watcher = null;
|
|
4191
|
-
this.onChangeHandler = null;
|
|
4192
4390
|
}
|
|
4193
4391
|
/**
|
|
4194
4392
|
* Gets the list of files currently being watched.
|
|
@@ -8334,6 +8532,9 @@ var GetDependentsSchema = external_exports.object({
|
|
|
8334
8532
|
filepath: external_exports.string().min(1, "Filepath cannot be empty").describe(
|
|
8335
8533
|
"Path to file to find dependents for (relative to workspace root).\n\nExample: 'src/utils/validate.ts'\n\nReturns all files that import or depend on this file.\n\nNote: Scans up to 10,000 code chunks. For very large codebases,\nresults may be incomplete (a warning will be included if truncated)."
|
|
8336
8534
|
),
|
|
8535
|
+
symbol: external_exports.string().min(1, "Symbol cannot be an empty string").optional().describe(
|
|
8536
|
+
"Optional: specific exported symbol to find usages of.\n\nWhen provided, returns call sites instead of just importing files.\n\nExample: 'validateEmail' to find where validateEmail() is called.\n\nResponse includes 'usages' array showing which functions call this symbol."
|
|
8537
|
+
),
|
|
8337
8538
|
depth: external_exports.number().int().min(1).max(1).default(1).describe(
|
|
8338
8539
|
"Depth of transitive dependencies. Only depth=1 (direct dependents) is currently supported.\n\n1 = Direct dependents only"
|
|
8339
8540
|
),
|
|
@@ -8643,7 +8844,7 @@ async function handleFindSimilar(args, ctx) {
|
|
|
8643
8844
|
// src/mcp/utils/path-matching.ts
|
|
8644
8845
|
function normalizePath(path6, workspaceRoot) {
|
|
8645
8846
|
let normalized = path6.replace(/['"]/g, "").trim().replace(/\\/g, "/");
|
|
8646
|
-
normalized = normalized.replace(/\.(ts|tsx|js|jsx)$/, "");
|
|
8847
|
+
normalized = normalized.replace(/\.(ts|tsx|js|jsx|php|py)$/, "");
|
|
8647
8848
|
if (normalized.startsWith(workspaceRoot + "/")) {
|
|
8648
8849
|
normalized = normalized.substring(workspaceRoot.length + 1);
|
|
8649
8850
|
}
|
|
@@ -8671,8 +8872,56 @@ function matchesFile(normalizedImport, normalizedTarget) {
|
|
|
8671
8872
|
if (matchesAtBoundary(cleanedImport, normalizedTarget) || matchesAtBoundary(normalizedTarget, cleanedImport)) {
|
|
8672
8873
|
return true;
|
|
8673
8874
|
}
|
|
8875
|
+
if (matchesPHPNamespace(normalizedImport, normalizedTarget)) {
|
|
8876
|
+
return true;
|
|
8877
|
+
}
|
|
8878
|
+
if (matchesPythonModule(normalizedImport, normalizedTarget)) {
|
|
8879
|
+
return true;
|
|
8880
|
+
}
|
|
8674
8881
|
return false;
|
|
8675
8882
|
}
|
|
8883
|
+
function matchesDirectPythonModule(moduleAsPath, targetWithoutPy) {
|
|
8884
|
+
return targetWithoutPy === moduleAsPath || targetWithoutPy === moduleAsPath + "/__init__" || targetWithoutPy.replace(/\/__init__$/, "") === moduleAsPath;
|
|
8885
|
+
}
|
|
8886
|
+
function matchesParentPythonPackage(moduleAsPath, targetWithoutPy) {
|
|
8887
|
+
return targetWithoutPy.startsWith(moduleAsPath + "/");
|
|
8888
|
+
}
|
|
8889
|
+
function matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) {
|
|
8890
|
+
return targetWithoutPy.endsWith("/" + moduleAsPath) || targetWithoutPy.endsWith("/" + moduleAsPath + "/__init__");
|
|
8891
|
+
}
|
|
8892
|
+
function matchesWithSourcePrefix(moduleAsPath, targetWithoutPy) {
|
|
8893
|
+
const moduleIndex = targetWithoutPy.indexOf(moduleAsPath);
|
|
8894
|
+
if (moduleIndex < 0) return false;
|
|
8895
|
+
const prefix = targetWithoutPy.substring(0, moduleIndex);
|
|
8896
|
+
const prefixSlashes = (prefix.match(/\//g) || []).length;
|
|
8897
|
+
return prefixSlashes <= 1 && (prefix === "" || prefix.endsWith("/"));
|
|
8898
|
+
}
|
|
8899
|
+
function matchesPythonModule(importPath, targetPath) {
|
|
8900
|
+
if (!importPath.includes(".")) {
|
|
8901
|
+
return false;
|
|
8902
|
+
}
|
|
8903
|
+
const moduleAsPath = importPath.replace(/\./g, "/");
|
|
8904
|
+
const targetWithoutPy = targetPath.replace(/\.py$/, "");
|
|
8905
|
+
return matchesDirectPythonModule(moduleAsPath, targetWithoutPy) || matchesParentPythonPackage(moduleAsPath, targetWithoutPy) || matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) || matchesWithSourcePrefix(moduleAsPath, targetWithoutPy);
|
|
8906
|
+
}
|
|
8907
|
+
function matchesPHPNamespace(importPath, targetPath) {
|
|
8908
|
+
const importComponents = importPath.split("/").filter(Boolean);
|
|
8909
|
+
const targetComponents = targetPath.split("/").filter(Boolean);
|
|
8910
|
+
if (importComponents.length === 0 || targetComponents.length === 0) {
|
|
8911
|
+
return false;
|
|
8912
|
+
}
|
|
8913
|
+
let matched = 0;
|
|
8914
|
+
for (let i = 1; i <= importComponents.length && i <= targetComponents.length; i++) {
|
|
8915
|
+
const impComp = importComponents[importComponents.length - i].toLowerCase();
|
|
8916
|
+
const targetComp = targetComponents[targetComponents.length - i].toLowerCase();
|
|
8917
|
+
if (impComp === targetComp) {
|
|
8918
|
+
matched++;
|
|
8919
|
+
} else {
|
|
8920
|
+
break;
|
|
8921
|
+
}
|
|
8922
|
+
}
|
|
8923
|
+
return matched === importComponents.length;
|
|
8924
|
+
}
|
|
8676
8925
|
function getCanonicalPath(filepath, workspaceRoot) {
|
|
8677
8926
|
let canonical = filepath.replace(/\\/g, "/");
|
|
8678
8927
|
if (canonical.startsWith(workspaceRoot + "/")) {
|
|
@@ -8942,7 +9191,7 @@ var COMPLEXITY_THRESHOLDS = {
|
|
|
8942
9191
|
MEDIUM_MAX: 15
|
|
8943
9192
|
// Occasional branching
|
|
8944
9193
|
};
|
|
8945
|
-
async function
|
|
9194
|
+
async function scanChunks(vectorDB, crossRepo, log) {
|
|
8946
9195
|
let allChunks;
|
|
8947
9196
|
if (crossRepo && vectorDB instanceof QdrantDB2) {
|
|
8948
9197
|
allChunks = await vectorDB.scanCrossRepo({ limit: SCAN_LIMIT2 });
|
|
@@ -8956,48 +9205,105 @@ async function findDependents(vectorDB, filepath, crossRepo, log) {
|
|
|
8956
9205
|
if (hitLimit) {
|
|
8957
9206
|
log(`Scanned ${SCAN_LIMIT2} chunks (limit reached). Results may be incomplete.`, "warning");
|
|
8958
9207
|
}
|
|
8959
|
-
|
|
9208
|
+
return { allChunks, hitLimit };
|
|
9209
|
+
}
|
|
9210
|
+
function createPathNormalizer() {
|
|
8960
9211
|
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
8961
|
-
const
|
|
8962
|
-
|
|
8963
|
-
if (!
|
|
8964
|
-
|
|
9212
|
+
const cache = /* @__PURE__ */ new Map();
|
|
9213
|
+
return (path6) => {
|
|
9214
|
+
if (!cache.has(path6)) {
|
|
9215
|
+
cache.set(path6, normalizePath(path6, workspaceRoot));
|
|
9216
|
+
}
|
|
9217
|
+
return cache.get(path6);
|
|
8965
9218
|
};
|
|
8966
|
-
|
|
8967
|
-
|
|
8968
|
-
const
|
|
9219
|
+
}
|
|
9220
|
+
function groupChunksByFile(chunks) {
|
|
9221
|
+
const workspaceRoot = process.cwd().replace(/\\/g, "/");
|
|
8969
9222
|
const chunksByFile = /* @__PURE__ */ new Map();
|
|
8970
|
-
for (const chunk of
|
|
9223
|
+
for (const chunk of chunks) {
|
|
8971
9224
|
const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
|
|
8972
9225
|
const existing = chunksByFile.get(canonical) || [];
|
|
8973
9226
|
existing.push(chunk);
|
|
8974
9227
|
chunksByFile.set(canonical, existing);
|
|
8975
9228
|
}
|
|
9229
|
+
return chunksByFile;
|
|
9230
|
+
}
|
|
9231
|
+
function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached, allChunks, filepath, log) {
|
|
9232
|
+
if (symbol) {
|
|
9233
|
+
validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log);
|
|
9234
|
+
return findSymbolUsages(chunksByFile, symbol, normalizedTarget, normalizePathCached);
|
|
9235
|
+
}
|
|
9236
|
+
const dependents = Array.from(chunksByFile.keys()).map((fp) => ({
|
|
9237
|
+
filepath: fp,
|
|
9238
|
+
isTestFile: isTestFile(fp)
|
|
9239
|
+
}));
|
|
9240
|
+
return { dependents, totalUsageCount: void 0 };
|
|
9241
|
+
}
|
|
9242
|
+
function validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log) {
|
|
9243
|
+
const targetFileExportsSymbol = allChunks.some((chunk) => {
|
|
9244
|
+
const chunkFile = normalizePathCached(chunk.metadata.file);
|
|
9245
|
+
return matchesFile(chunkFile, normalizedTarget) && chunk.metadata.exports?.includes(symbol);
|
|
9246
|
+
});
|
|
9247
|
+
if (!targetFileExportsSymbol) {
|
|
9248
|
+
log(`Warning: Symbol "${symbol}" not found in exports of ${filepath}`, "warning");
|
|
9249
|
+
}
|
|
9250
|
+
}
|
|
9251
|
+
async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
|
|
9252
|
+
const { allChunks, hitLimit } = await scanChunks(vectorDB, crossRepo, log);
|
|
9253
|
+
log(`Scanning ${allChunks.length} chunks for imports...`);
|
|
9254
|
+
const normalizePathCached = createPathNormalizer();
|
|
9255
|
+
const normalizedTarget = normalizePathCached(filepath);
|
|
9256
|
+
const importIndex = buildImportIndex(allChunks, normalizePathCached);
|
|
9257
|
+
const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
|
|
9258
|
+
const chunksByFile = groupChunksByFile(dependentChunks);
|
|
8976
9259
|
const fileComplexities = calculateFileComplexities(chunksByFile);
|
|
8977
9260
|
const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
|
|
8978
|
-
const
|
|
8979
|
-
|
|
8980
|
-
|
|
8981
|
-
|
|
9261
|
+
const { dependents, totalUsageCount } = buildDependentsList(
|
|
9262
|
+
chunksByFile,
|
|
9263
|
+
symbol,
|
|
9264
|
+
normalizedTarget,
|
|
9265
|
+
normalizePathCached,
|
|
9266
|
+
allChunks,
|
|
9267
|
+
filepath,
|
|
9268
|
+
log
|
|
9269
|
+
);
|
|
9270
|
+
dependents.sort((a, b) => {
|
|
9271
|
+
if (a.isTestFile === b.isTestFile) return 0;
|
|
9272
|
+
return a.isTestFile ? 1 : -1;
|
|
9273
|
+
});
|
|
9274
|
+
const testDependentCount = dependents.filter((f) => f.isTestFile).length;
|
|
9275
|
+
const productionDependentCount = dependents.length - testDependentCount;
|
|
8982
9276
|
return {
|
|
8983
|
-
dependents
|
|
9277
|
+
dependents,
|
|
9278
|
+
productionDependentCount,
|
|
9279
|
+
testDependentCount,
|
|
8984
9280
|
chunksByFile,
|
|
8985
9281
|
fileComplexities,
|
|
8986
9282
|
complexityMetrics,
|
|
8987
9283
|
hitLimit,
|
|
8988
|
-
allChunks
|
|
9284
|
+
allChunks,
|
|
9285
|
+
totalUsageCount
|
|
8989
9286
|
};
|
|
8990
9287
|
}
|
|
8991
9288
|
function buildImportIndex(allChunks, normalizePathCached) {
|
|
8992
9289
|
const importIndex = /* @__PURE__ */ new Map();
|
|
9290
|
+
const addToIndex = (importPath, chunk) => {
|
|
9291
|
+
const normalizedImport = normalizePathCached(importPath);
|
|
9292
|
+
if (!importIndex.has(normalizedImport)) {
|
|
9293
|
+
importIndex.set(normalizedImport, []);
|
|
9294
|
+
}
|
|
9295
|
+
importIndex.get(normalizedImport).push(chunk);
|
|
9296
|
+
};
|
|
8993
9297
|
for (const chunk of allChunks) {
|
|
8994
9298
|
const imports = chunk.metadata.imports || [];
|
|
8995
9299
|
for (const imp of imports) {
|
|
8996
|
-
|
|
8997
|
-
|
|
8998
|
-
|
|
9300
|
+
addToIndex(imp, chunk);
|
|
9301
|
+
}
|
|
9302
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9303
|
+
if (importedSymbols && typeof importedSymbols === "object") {
|
|
9304
|
+
for (const modulePath of Object.keys(importedSymbols)) {
|
|
9305
|
+
addToIndex(modulePath, chunk);
|
|
8999
9306
|
}
|
|
9000
|
-
importIndex.get(normalizedImport).push(chunk);
|
|
9001
9307
|
}
|
|
9002
9308
|
}
|
|
9003
9309
|
return importIndex;
|
|
@@ -9079,14 +9385,15 @@ function calculateComplexityRiskBoost(avgComplexity, maxComplexity) {
|
|
|
9079
9385
|
}
|
|
9080
9386
|
return "low";
|
|
9081
9387
|
}
|
|
9082
|
-
function calculateRiskLevel(dependentCount, complexityRiskBoost) {
|
|
9388
|
+
function calculateRiskLevel(dependentCount, complexityRiskBoost, productionDependentCount) {
|
|
9083
9389
|
const DEPENDENT_COUNT_THRESHOLDS = {
|
|
9084
9390
|
LOW: 5,
|
|
9085
9391
|
MEDIUM: 15,
|
|
9086
9392
|
HIGH: 30
|
|
9087
9393
|
};
|
|
9088
9394
|
const RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
|
|
9089
|
-
|
|
9395
|
+
const effectiveCount = productionDependentCount ?? dependentCount;
|
|
9396
|
+
let riskLevel = effectiveCount === 0 ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
|
|
9090
9397
|
if (RISK_ORDER[complexityRiskBoost] > RISK_ORDER[riskLevel]) {
|
|
9091
9398
|
riskLevel = complexityRiskBoost;
|
|
9092
9399
|
}
|
|
@@ -9118,45 +9425,167 @@ function groupDependentsByRepo(dependents, chunks) {
|
|
|
9118
9425
|
}
|
|
9119
9426
|
return grouped;
|
|
9120
9427
|
}
|
|
9428
|
+
function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9429
|
+
const dependents = [];
|
|
9430
|
+
let totalUsageCount = 0;
|
|
9431
|
+
for (const [filepath, chunks] of chunksByFile.entries()) {
|
|
9432
|
+
if (!fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached)) {
|
|
9433
|
+
continue;
|
|
9434
|
+
}
|
|
9435
|
+
const usages = extractSymbolUsagesFromChunks(chunks, targetSymbol);
|
|
9436
|
+
dependents.push({
|
|
9437
|
+
filepath,
|
|
9438
|
+
isTestFile: isTestFile(filepath),
|
|
9439
|
+
usages: usages.length > 0 ? usages : void 0
|
|
9440
|
+
});
|
|
9441
|
+
totalUsageCount += usages.length;
|
|
9442
|
+
}
|
|
9443
|
+
return { dependents, totalUsageCount };
|
|
9444
|
+
}
|
|
9445
|
+
function fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached) {
|
|
9446
|
+
return chunks.some((chunk) => {
|
|
9447
|
+
const importedSymbols = chunk.metadata.importedSymbols;
|
|
9448
|
+
if (!importedSymbols) return false;
|
|
9449
|
+
for (const [importPath, symbols] of Object.entries(importedSymbols)) {
|
|
9450
|
+
const normalizedImport = normalizePathCached(importPath);
|
|
9451
|
+
if (matchesFile(normalizedImport, normalizedTarget)) {
|
|
9452
|
+
if (symbols.includes(targetSymbol)) return true;
|
|
9453
|
+
if (symbols.some((s) => s.startsWith("* as "))) return true;
|
|
9454
|
+
}
|
|
9455
|
+
}
|
|
9456
|
+
return false;
|
|
9457
|
+
});
|
|
9458
|
+
}
|
|
9459
|
+
function extractSymbolUsagesFromChunks(chunks, targetSymbol) {
|
|
9460
|
+
const usages = [];
|
|
9461
|
+
for (const chunk of chunks) {
|
|
9462
|
+
const callSites = chunk.metadata.callSites;
|
|
9463
|
+
if (!callSites) continue;
|
|
9464
|
+
const lines = chunk.content.split("\n");
|
|
9465
|
+
for (const call of callSites) {
|
|
9466
|
+
if (call.symbol === targetSymbol) {
|
|
9467
|
+
usages.push({
|
|
9468
|
+
callerSymbol: chunk.metadata.symbolName || "unknown",
|
|
9469
|
+
line: call.line,
|
|
9470
|
+
snippet: extractSnippet(lines, call.line, chunk.metadata.startLine, targetSymbol)
|
|
9471
|
+
});
|
|
9472
|
+
}
|
|
9473
|
+
}
|
|
9474
|
+
}
|
|
9475
|
+
return usages;
|
|
9476
|
+
}
|
|
9477
|
+
function extractSnippet(lines, callLine, startLine, symbolName) {
|
|
9478
|
+
const lineIndex = callLine - startLine;
|
|
9479
|
+
const placeholder = `${symbolName}(...)`;
|
|
9480
|
+
if (lineIndex < 0 || lineIndex >= lines.length) {
|
|
9481
|
+
return placeholder;
|
|
9482
|
+
}
|
|
9483
|
+
const directLine = lines[lineIndex].trim();
|
|
9484
|
+
if (directLine) {
|
|
9485
|
+
return directLine;
|
|
9486
|
+
}
|
|
9487
|
+
const searchRadius = 5;
|
|
9488
|
+
for (let i = lineIndex - 1; i >= Math.max(0, lineIndex - searchRadius); i--) {
|
|
9489
|
+
const candidate = lines[i].trim();
|
|
9490
|
+
if (candidate) {
|
|
9491
|
+
return candidate;
|
|
9492
|
+
}
|
|
9493
|
+
}
|
|
9494
|
+
for (let i = lineIndex + 1; i < Math.min(lines.length, lineIndex + searchRadius + 1); i++) {
|
|
9495
|
+
const candidate = lines[i].trim();
|
|
9496
|
+
if (candidate) {
|
|
9497
|
+
return candidate;
|
|
9498
|
+
}
|
|
9499
|
+
}
|
|
9500
|
+
return placeholder;
|
|
9501
|
+
}
|
|
9121
9502
|
|
|
9122
9503
|
// src/mcp/handlers/get-dependents.ts
|
|
9504
|
+
function checkCrossRepoFallback(crossRepo, vectorDB) {
|
|
9505
|
+
return Boolean(crossRepo && !(vectorDB instanceof QdrantDB3));
|
|
9506
|
+
}
|
|
9507
|
+
function buildNotes(crossRepoFallback, hitLimit) {
|
|
9508
|
+
const notes = [];
|
|
9509
|
+
if (crossRepoFallback) {
|
|
9510
|
+
notes.push("Cross-repo search requires Qdrant backend. Fell back to single-repo search.");
|
|
9511
|
+
}
|
|
9512
|
+
if (hitLimit) {
|
|
9513
|
+
notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
|
|
9514
|
+
}
|
|
9515
|
+
return notes;
|
|
9516
|
+
}
|
|
9517
|
+
function logRiskAssessment(analysis, riskLevel, symbol, log) {
|
|
9518
|
+
const prodTest = `(${analysis.productionDependentCount} prod, ${analysis.testDependentCount} test)`;
|
|
9519
|
+
if (symbol && analysis.totalUsageCount !== void 0) {
|
|
9520
|
+
if (analysis.totalUsageCount > 0) {
|
|
9521
|
+
log(
|
|
9522
|
+
`Found ${analysis.totalUsageCount} tracked call sites across ${analysis.dependents.length} files ${prodTest} - risk: ${riskLevel}`
|
|
9523
|
+
);
|
|
9524
|
+
} else {
|
|
9525
|
+
log(
|
|
9526
|
+
`Found ${analysis.dependents.length} files importing '${symbol}' ${prodTest} - risk: ${riskLevel} (Note: Call site tracking unavailable for these chunks)`
|
|
9527
|
+
);
|
|
9528
|
+
}
|
|
9529
|
+
} else {
|
|
9530
|
+
log(
|
|
9531
|
+
`Found ${analysis.dependents.length} dependents ${prodTest} - risk: ${riskLevel}`
|
|
9532
|
+
);
|
|
9533
|
+
}
|
|
9534
|
+
}
|
|
9535
|
+
function buildDependentsResponse(analysis, args, riskLevel, indexInfo, notes, crossRepo, vectorDB) {
|
|
9536
|
+
const { symbol, filepath } = args;
|
|
9537
|
+
const response = {
|
|
9538
|
+
indexInfo,
|
|
9539
|
+
filepath,
|
|
9540
|
+
dependentCount: analysis.dependents.length,
|
|
9541
|
+
productionDependentCount: analysis.productionDependentCount,
|
|
9542
|
+
testDependentCount: analysis.testDependentCount,
|
|
9543
|
+
riskLevel,
|
|
9544
|
+
dependents: analysis.dependents,
|
|
9545
|
+
complexityMetrics: analysis.complexityMetrics
|
|
9546
|
+
};
|
|
9547
|
+
if (symbol) {
|
|
9548
|
+
response.symbol = symbol;
|
|
9549
|
+
}
|
|
9550
|
+
if (analysis.totalUsageCount !== void 0) {
|
|
9551
|
+
response.totalUsageCount = analysis.totalUsageCount;
|
|
9552
|
+
}
|
|
9553
|
+
if (notes.length > 0) {
|
|
9554
|
+
response.note = notes.join(" ");
|
|
9555
|
+
}
|
|
9556
|
+
if (crossRepo && vectorDB instanceof QdrantDB3) {
|
|
9557
|
+
response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
|
|
9558
|
+
}
|
|
9559
|
+
return response;
|
|
9560
|
+
}
|
|
9123
9561
|
async function handleGetDependents(args, ctx) {
|
|
9124
9562
|
const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
|
|
9125
9563
|
return await wrapToolHandler(
|
|
9126
9564
|
GetDependentsSchema,
|
|
9127
9565
|
async (validatedArgs) => {
|
|
9128
|
-
const { crossRepo, filepath } = validatedArgs;
|
|
9129
|
-
|
|
9566
|
+
const { crossRepo, filepath, symbol } = validatedArgs;
|
|
9567
|
+
const symbolSuffix = symbol ? ` (symbol: ${symbol})` : "";
|
|
9568
|
+
const crossRepoSuffix = crossRepo ? " (cross-repo)" : "";
|
|
9569
|
+
log(`Finding dependents of: ${filepath}${symbolSuffix}${crossRepoSuffix}`);
|
|
9130
9570
|
await checkAndReconnect();
|
|
9131
|
-
const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log);
|
|
9571
|
+
const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log, symbol);
|
|
9132
9572
|
const riskLevel = calculateRiskLevel(
|
|
9133
9573
|
analysis.dependents.length,
|
|
9134
|
-
analysis.complexityMetrics.complexityRiskBoost
|
|
9135
|
-
|
|
9136
|
-
log(
|
|
9137
|
-
`Found ${analysis.dependents.length} dependent files (risk: ${riskLevel}${analysis.complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`
|
|
9574
|
+
analysis.complexityMetrics.complexityRiskBoost,
|
|
9575
|
+
analysis.productionDependentCount
|
|
9138
9576
|
);
|
|
9139
|
-
|
|
9140
|
-
const crossRepoFallback = crossRepo
|
|
9141
|
-
|
|
9142
|
-
|
|
9143
|
-
|
|
9144
|
-
|
|
9145
|
-
notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
|
|
9146
|
-
}
|
|
9147
|
-
const response = {
|
|
9148
|
-
indexInfo: getIndexMetadata(),
|
|
9149
|
-
filepath: validatedArgs.filepath,
|
|
9150
|
-
dependentCount: analysis.dependents.length,
|
|
9577
|
+
logRiskAssessment(analysis, riskLevel, symbol, log);
|
|
9578
|
+
const crossRepoFallback = checkCrossRepoFallback(crossRepo, vectorDB);
|
|
9579
|
+
const notes = buildNotes(crossRepoFallback, analysis.hitLimit);
|
|
9580
|
+
return buildDependentsResponse(
|
|
9581
|
+
analysis,
|
|
9582
|
+
validatedArgs,
|
|
9151
9583
|
riskLevel,
|
|
9152
|
-
|
|
9153
|
-
|
|
9154
|
-
|
|
9155
|
-
|
|
9156
|
-
|
|
9157
|
-
response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
|
|
9158
|
-
}
|
|
9159
|
-
return response;
|
|
9584
|
+
getIndexMetadata(),
|
|
9585
|
+
notes,
|
|
9586
|
+
crossRepo,
|
|
9587
|
+
vectorDB
|
|
9588
|
+
);
|
|
9160
9589
|
}
|
|
9161
9590
|
)(args);
|
|
9162
9591
|
}
|
|
@@ -9348,6 +9777,134 @@ function registerMCPHandlers(server, toolContext, log) {
|
|
|
9348
9777
|
});
|
|
9349
9778
|
}
|
|
9350
9779
|
|
|
9780
|
+
// src/mcp/reindex-state-manager.ts
|
|
9781
|
+
function checkForStuckState(inProgress, lastStateChangeTimestamp, activeOperations, pendingFilesCount) {
|
|
9782
|
+
if (!inProgress) return;
|
|
9783
|
+
const STUCK_STATE_THRESHOLD_MS = 5 * 60 * 1e3;
|
|
9784
|
+
const stuckDuration = Date.now() - lastStateChangeTimestamp;
|
|
9785
|
+
if (stuckDuration > STUCK_STATE_THRESHOLD_MS) {
|
|
9786
|
+
console.warn(
|
|
9787
|
+
`[Lien] HEALTH CHECK: Reindex stuck in progress for ${Math.round(stuckDuration / 1e3)}s. This indicates an operation crashed without cleanup. Active operations: ${activeOperations}, Pending files: ${pendingFilesCount}. Consider using resetIfStuck() to recover.`
|
|
9788
|
+
);
|
|
9789
|
+
}
|
|
9790
|
+
}
|
|
9791
|
+
function mergePendingFiles(pendingFiles, newFiles) {
|
|
9792
|
+
const existing = new Set(pendingFiles);
|
|
9793
|
+
for (const file of newFiles) {
|
|
9794
|
+
if (!existing.has(file)) {
|
|
9795
|
+
pendingFiles.push(file);
|
|
9796
|
+
}
|
|
9797
|
+
}
|
|
9798
|
+
}
|
|
9799
|
+
function createReindexStateManager() {
|
|
9800
|
+
let state = {
|
|
9801
|
+
inProgress: false,
|
|
9802
|
+
pendingFiles: [],
|
|
9803
|
+
lastReindexTimestamp: null,
|
|
9804
|
+
lastReindexDurationMs: null
|
|
9805
|
+
};
|
|
9806
|
+
let activeOperations = 0;
|
|
9807
|
+
let lastStateChangeTimestamp = Date.now();
|
|
9808
|
+
return {
|
|
9809
|
+
/**
|
|
9810
|
+
* Get a copy of the current reindex state.
|
|
9811
|
+
* Returns a deep copy to prevent external mutation of nested arrays.
|
|
9812
|
+
*/
|
|
9813
|
+
getState: () => {
|
|
9814
|
+
checkForStuckState(
|
|
9815
|
+
state.inProgress,
|
|
9816
|
+
lastStateChangeTimestamp,
|
|
9817
|
+
activeOperations,
|
|
9818
|
+
state.pendingFiles.length
|
|
9819
|
+
);
|
|
9820
|
+
return { ...state, pendingFiles: [...state.pendingFiles] };
|
|
9821
|
+
},
|
|
9822
|
+
/**
|
|
9823
|
+
* Start a new reindex operation.
|
|
9824
|
+
*
|
|
9825
|
+
* **Important**: Silently ignores empty or null file arrays without incrementing
|
|
9826
|
+
* activeOperations. This is intentional - if there's no work to do, no operation
|
|
9827
|
+
* is started. Callers should check for empty arrays before calling if they need
|
|
9828
|
+
* to track "attempted" operations.
|
|
9829
|
+
*
|
|
9830
|
+
* @param files - Array of file paths to reindex. Empty/null arrays are ignored.
|
|
9831
|
+
*/
|
|
9832
|
+
startReindex: (files) => {
|
|
9833
|
+
if (!files || files.length === 0) {
|
|
9834
|
+
return;
|
|
9835
|
+
}
|
|
9836
|
+
activeOperations += 1;
|
|
9837
|
+
state.inProgress = true;
|
|
9838
|
+
lastStateChangeTimestamp = Date.now();
|
|
9839
|
+
mergePendingFiles(state.pendingFiles, files);
|
|
9840
|
+
},
|
|
9841
|
+
/**
|
|
9842
|
+
* Mark a reindex operation as complete.
|
|
9843
|
+
*
|
|
9844
|
+
* Logs a warning if called without a matching startReindex.
|
|
9845
|
+
* Only clears state when all concurrent operations finish.
|
|
9846
|
+
*
|
|
9847
|
+
* @param durationMs - Duration of the reindex operation in milliseconds
|
|
9848
|
+
*/
|
|
9849
|
+
completeReindex: (durationMs) => {
|
|
9850
|
+
if (activeOperations === 0) {
|
|
9851
|
+
console.warn("[Lien] completeReindex called without matching startReindex");
|
|
9852
|
+
return;
|
|
9853
|
+
}
|
|
9854
|
+
activeOperations -= 1;
|
|
9855
|
+
if (activeOperations === 0) {
|
|
9856
|
+
state.inProgress = false;
|
|
9857
|
+
state.pendingFiles = [];
|
|
9858
|
+
state.lastReindexTimestamp = Date.now();
|
|
9859
|
+
state.lastReindexDurationMs = durationMs;
|
|
9860
|
+
lastStateChangeTimestamp = Date.now();
|
|
9861
|
+
}
|
|
9862
|
+
},
|
|
9863
|
+
/**
|
|
9864
|
+
* Mark a reindex operation as failed.
|
|
9865
|
+
*
|
|
9866
|
+
* Logs a warning if called without a matching startReindex.
|
|
9867
|
+
* Only clears state when all concurrent operations finish/fail.
|
|
9868
|
+
*/
|
|
9869
|
+
failReindex: () => {
|
|
9870
|
+
if (activeOperations === 0) {
|
|
9871
|
+
console.warn("[Lien] failReindex called without matching startReindex");
|
|
9872
|
+
return;
|
|
9873
|
+
}
|
|
9874
|
+
activeOperations -= 1;
|
|
9875
|
+
if (activeOperations === 0) {
|
|
9876
|
+
state.inProgress = false;
|
|
9877
|
+
state.pendingFiles = [];
|
|
9878
|
+
lastStateChangeTimestamp = Date.now();
|
|
9879
|
+
}
|
|
9880
|
+
},
|
|
9881
|
+
/**
|
|
9882
|
+
* Manually reset state if it's stuck.
|
|
9883
|
+
*
|
|
9884
|
+
* **WARNING**: Only use this if you're certain operations have crashed without cleanup.
|
|
9885
|
+
* This will forcibly clear the inProgress flag and reset activeOperations counter.
|
|
9886
|
+
*
|
|
9887
|
+
* Use this when getState() health check detects stuck state and you've verified
|
|
9888
|
+
* no legitimate operations are running.
|
|
9889
|
+
*
|
|
9890
|
+
* @returns true if state was reset, false if state was already clean
|
|
9891
|
+
*/
|
|
9892
|
+
resetIfStuck: () => {
|
|
9893
|
+
if (state.inProgress && activeOperations > 0) {
|
|
9894
|
+
console.warn(
|
|
9895
|
+
`[Lien] Manually resetting stuck reindex state. Active operations: ${activeOperations}, Pending files: ${state.pendingFiles.length}`
|
|
9896
|
+
);
|
|
9897
|
+
activeOperations = 0;
|
|
9898
|
+
state.inProgress = false;
|
|
9899
|
+
state.pendingFiles = [];
|
|
9900
|
+
lastStateChangeTimestamp = Date.now();
|
|
9901
|
+
return true;
|
|
9902
|
+
}
|
|
9903
|
+
return false;
|
|
9904
|
+
}
|
|
9905
|
+
};
|
|
9906
|
+
}
|
|
9907
|
+
|
|
9351
9908
|
// src/mcp/server.ts
|
|
9352
9909
|
var __filename2 = fileURLToPath2(import.meta.url);
|
|
9353
9910
|
var __dirname2 = dirname2(__filename2);
|
|
@@ -9358,6 +9915,9 @@ try {
|
|
|
9358
9915
|
} catch {
|
|
9359
9916
|
packageJson2 = require3(join2(__dirname2, "../../package.json"));
|
|
9360
9917
|
}
|
|
9918
|
+
function getRootDirFromDbPath(dbPath) {
|
|
9919
|
+
return resolve(dbPath, "../../..");
|
|
9920
|
+
}
|
|
9361
9921
|
async function initializeDatabase(rootDir, log) {
|
|
9362
9922
|
const embeddings = new LocalEmbeddings();
|
|
9363
9923
|
log("Creating vector database...");
|
|
@@ -9390,7 +9950,96 @@ async function handleAutoIndexing(vectorDB, rootDir, log) {
|
|
|
9390
9950
|
}
|
|
9391
9951
|
}
|
|
9392
9952
|
}
|
|
9393
|
-
async function
|
|
9953
|
+
async function handleGitStartup(gitTracker, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
9954
|
+
log("Checking for git changes...");
|
|
9955
|
+
const changedFiles = await gitTracker.initialize();
|
|
9956
|
+
if (changedFiles && changedFiles.length > 0) {
|
|
9957
|
+
const startTime = Date.now();
|
|
9958
|
+
reindexStateManager.startReindex(changedFiles);
|
|
9959
|
+
log(`\u{1F33F} Git changes detected: ${changedFiles.length} files changed`);
|
|
9960
|
+
try {
|
|
9961
|
+
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose: false });
|
|
9962
|
+
const duration = Date.now() - startTime;
|
|
9963
|
+
reindexStateManager.completeReindex(duration);
|
|
9964
|
+
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
9965
|
+
} catch (error) {
|
|
9966
|
+
reindexStateManager.failReindex();
|
|
9967
|
+
throw error;
|
|
9968
|
+
}
|
|
9969
|
+
} else {
|
|
9970
|
+
log("\u2713 Index is up to date with git state");
|
|
9971
|
+
}
|
|
9972
|
+
}
|
|
9973
|
+
function createGitPollInterval(gitTracker, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
9974
|
+
return setInterval(async () => {
|
|
9975
|
+
try {
|
|
9976
|
+
const changedFiles = await gitTracker.detectChanges();
|
|
9977
|
+
if (changedFiles && changedFiles.length > 0) {
|
|
9978
|
+
const currentState = reindexStateManager.getState();
|
|
9979
|
+
if (currentState.inProgress) {
|
|
9980
|
+
log(
|
|
9981
|
+
`Background reindex already in progress (${currentState.pendingFiles.length} files pending), skipping git poll cycle`,
|
|
9982
|
+
"debug"
|
|
9983
|
+
);
|
|
9984
|
+
return;
|
|
9985
|
+
}
|
|
9986
|
+
const startTime = Date.now();
|
|
9987
|
+
reindexStateManager.startReindex(changedFiles);
|
|
9988
|
+
log(`\u{1F33F} Git change detected: ${changedFiles.length} files changed`);
|
|
9989
|
+
try {
|
|
9990
|
+
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose: false });
|
|
9991
|
+
const duration = Date.now() - startTime;
|
|
9992
|
+
reindexStateManager.completeReindex(duration);
|
|
9993
|
+
log(`\u2713 Background reindex complete: ${count} files in ${duration}ms`);
|
|
9994
|
+
} catch (error) {
|
|
9995
|
+
reindexStateManager.failReindex();
|
|
9996
|
+
log(`Git background reindex failed: ${error}`, "warning");
|
|
9997
|
+
}
|
|
9998
|
+
}
|
|
9999
|
+
} catch (error) {
|
|
10000
|
+
log(`Git detection check failed: ${error}`, "warning");
|
|
10001
|
+
}
|
|
10002
|
+
}, DEFAULT_GIT_POLL_INTERVAL_MS2);
|
|
10003
|
+
}
|
|
10004
|
+
function createGitChangeHandler(gitTracker, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10005
|
+
let gitReindexInProgress = false;
|
|
10006
|
+
let lastGitReindexTime = 0;
|
|
10007
|
+
const GIT_REINDEX_COOLDOWN_MS = 5e3;
|
|
10008
|
+
return async () => {
|
|
10009
|
+
const { inProgress: globalInProgress } = reindexStateManager.getState();
|
|
10010
|
+
if (gitReindexInProgress || globalInProgress) {
|
|
10011
|
+
log("Git reindex already in progress, skipping", "debug");
|
|
10012
|
+
return;
|
|
10013
|
+
}
|
|
10014
|
+
const timeSinceLastReindex = Date.now() - lastGitReindexTime;
|
|
10015
|
+
if (timeSinceLastReindex < GIT_REINDEX_COOLDOWN_MS) {
|
|
10016
|
+
log(`Git change ignored (cooldown: ${GIT_REINDEX_COOLDOWN_MS - timeSinceLastReindex}ms remaining)`, "debug");
|
|
10017
|
+
return;
|
|
10018
|
+
}
|
|
10019
|
+
log("\u{1F33F} Git change detected (event-driven)");
|
|
10020
|
+
const changedFiles = await gitTracker.detectChanges();
|
|
10021
|
+
if (!changedFiles || changedFiles.length === 0) {
|
|
10022
|
+
return;
|
|
10023
|
+
}
|
|
10024
|
+
gitReindexInProgress = true;
|
|
10025
|
+
const startTime = Date.now();
|
|
10026
|
+
reindexStateManager.startReindex(changedFiles);
|
|
10027
|
+
log(`Reindexing ${changedFiles.length} files from git change`);
|
|
10028
|
+
try {
|
|
10029
|
+
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose: false });
|
|
10030
|
+
const duration = Date.now() - startTime;
|
|
10031
|
+
reindexStateManager.completeReindex(duration);
|
|
10032
|
+
log(`\u2713 Reindexed ${count} files in ${duration}ms`);
|
|
10033
|
+
lastGitReindexTime = Date.now();
|
|
10034
|
+
} catch (error) {
|
|
10035
|
+
reindexStateManager.failReindex();
|
|
10036
|
+
log(`Git reindex failed: ${error}`, "warning");
|
|
10037
|
+
} finally {
|
|
10038
|
+
gitReindexInProgress = false;
|
|
10039
|
+
}
|
|
10040
|
+
};
|
|
10041
|
+
}
|
|
10042
|
+
async function setupGitDetection(rootDir, vectorDB, embeddings, verbose, log, reindexStateManager, fileWatcher) {
|
|
9394
10043
|
const gitAvailable = await isGitAvailable();
|
|
9395
10044
|
const isRepo = await isGitRepo2(rootDir);
|
|
9396
10045
|
if (!gitAvailable) {
|
|
@@ -9404,34 +10053,204 @@ async function setupGitDetection(rootDir, vectorDB, embeddings, verbose, log) {
|
|
|
9404
10053
|
log("\u2713 Detected git repository");
|
|
9405
10054
|
const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
|
|
9406
10055
|
try {
|
|
9407
|
-
|
|
9408
|
-
const changedFiles = await gitTracker.initialize();
|
|
9409
|
-
if (changedFiles && changedFiles.length > 0) {
|
|
9410
|
-
log(`\u{1F33F} Git changes detected: ${changedFiles.length} files changed`);
|
|
9411
|
-
const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, { verbose });
|
|
9412
|
-
log(`\u2713 Reindexed ${count} files`);
|
|
9413
|
-
} else {
|
|
9414
|
-
log("\u2713 Index is up to date with git state");
|
|
9415
|
-
}
|
|
10056
|
+
await handleGitStartup(gitTracker, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
9416
10057
|
} catch (error) {
|
|
9417
10058
|
log(`Failed to check git state on startup: ${error}`, "warning");
|
|
9418
10059
|
}
|
|
10060
|
+
if (fileWatcher) {
|
|
10061
|
+
const gitChangeHandler = createGitChangeHandler(
|
|
10062
|
+
gitTracker,
|
|
10063
|
+
vectorDB,
|
|
10064
|
+
embeddings,
|
|
10065
|
+
verbose,
|
|
10066
|
+
log,
|
|
10067
|
+
reindexStateManager
|
|
10068
|
+
);
|
|
10069
|
+
fileWatcher.watchGit(gitChangeHandler);
|
|
10070
|
+
log("\u2713 Git detection enabled (event-driven via file watcher)");
|
|
10071
|
+
return { gitTracker, gitPollInterval: null };
|
|
10072
|
+
}
|
|
9419
10073
|
const pollIntervalSeconds = DEFAULT_GIT_POLL_INTERVAL_MS2 / 1e3;
|
|
9420
|
-
log(`\u2713 Git detection enabled (
|
|
9421
|
-
const gitPollInterval =
|
|
10074
|
+
log(`\u2713 Git detection enabled (polling fallback every ${pollIntervalSeconds}s)`);
|
|
10075
|
+
const gitPollInterval = createGitPollInterval(gitTracker, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10076
|
+
return { gitTracker, gitPollInterval };
|
|
10077
|
+
}
|
|
10078
|
+
async function handleFileDeletion(filepath, vectorDB, log) {
|
|
10079
|
+
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
10080
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10081
|
+
try {
|
|
10082
|
+
await vectorDB.deleteByFile(filepath);
|
|
10083
|
+
await manifest.removeFile(filepath);
|
|
10084
|
+
log(`\u2713 Removed ${filepath} from index`);
|
|
10085
|
+
} catch (error) {
|
|
10086
|
+
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
10087
|
+
throw error;
|
|
10088
|
+
}
|
|
10089
|
+
}
|
|
10090
|
+
async function handleSingleFileChange(filepath, type, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10091
|
+
const action = type === "add" ? "added" : "changed";
|
|
10092
|
+
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10093
|
+
if (type === "change") {
|
|
10094
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10095
|
+
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
9422
10096
|
try {
|
|
9423
|
-
const
|
|
9424
|
-
|
|
9425
|
-
|
|
9426
|
-
|
|
10097
|
+
const existingEntry = await manifest.transaction(async (manifestData) => {
|
|
10098
|
+
return manifestData.files[normalizedPath];
|
|
10099
|
+
});
|
|
10100
|
+
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10101
|
+
if (!shouldReindex && newMtime && existingEntry) {
|
|
10102
|
+
const skipReindex = await manifest.transaction(async (manifestData) => {
|
|
10103
|
+
const entry = manifestData.files[normalizedPath];
|
|
10104
|
+
if (entry) {
|
|
10105
|
+
entry.lastModified = newMtime;
|
|
10106
|
+
return true;
|
|
10107
|
+
}
|
|
10108
|
+
return false;
|
|
10109
|
+
});
|
|
10110
|
+
if (skipReindex) {
|
|
10111
|
+
return;
|
|
10112
|
+
}
|
|
9427
10113
|
}
|
|
9428
10114
|
} catch (error) {
|
|
9429
|
-
log(`
|
|
10115
|
+
log(`Content hash check failed, will reindex: ${error}`, "warning");
|
|
9430
10116
|
}
|
|
9431
|
-
}
|
|
9432
|
-
|
|
10117
|
+
}
|
|
10118
|
+
const startTime = Date.now();
|
|
10119
|
+
reindexStateManager.startReindex([filepath]);
|
|
10120
|
+
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
10121
|
+
try {
|
|
10122
|
+
await indexSingleFile(filepath, vectorDB, embeddings, { verbose: false, rootDir });
|
|
10123
|
+
const duration = Date.now() - startTime;
|
|
10124
|
+
reindexStateManager.completeReindex(duration);
|
|
10125
|
+
} catch (error) {
|
|
10126
|
+
reindexStateManager.failReindex();
|
|
10127
|
+
log(`Failed to reindex ${filepath}: ${error}`, "warning");
|
|
10128
|
+
}
|
|
9433
10129
|
}
|
|
9434
|
-
async function
|
|
10130
|
+
async function shouldReindexFile(filepath, existingEntry, log) {
|
|
10131
|
+
if (!existingEntry?.contentHash) {
|
|
10132
|
+
return { shouldReindex: true };
|
|
10133
|
+
}
|
|
10134
|
+
const currentHash = await computeContentHash(filepath);
|
|
10135
|
+
if (currentHash && currentHash === existingEntry.contentHash) {
|
|
10136
|
+
log(`\u23ED\uFE0F File mtime changed but content unchanged: ${filepath}`, "debug");
|
|
10137
|
+
try {
|
|
10138
|
+
const fs5 = await import("fs/promises");
|
|
10139
|
+
const stats = await fs5.stat(filepath);
|
|
10140
|
+
return { shouldReindex: false, newMtime: stats.mtimeMs };
|
|
10141
|
+
} catch {
|
|
10142
|
+
return { shouldReindex: true };
|
|
10143
|
+
}
|
|
10144
|
+
}
|
|
10145
|
+
return { shouldReindex: true };
|
|
10146
|
+
}
|
|
10147
|
+
async function filterModifiedFilesByHash(modifiedFiles, vectorDB, log) {
|
|
10148
|
+
if (modifiedFiles.length === 0) {
|
|
10149
|
+
return [];
|
|
10150
|
+
}
|
|
10151
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
10152
|
+
const rootDir = getRootDirFromDbPath(vectorDB.dbPath);
|
|
10153
|
+
const manifestData = await manifest.transaction(async (data) => data);
|
|
10154
|
+
if (!manifestData) {
|
|
10155
|
+
return modifiedFiles;
|
|
10156
|
+
}
|
|
10157
|
+
const checkResults = [];
|
|
10158
|
+
for (const filepath of modifiedFiles) {
|
|
10159
|
+
const normalizedPath = normalizeToRelativePath(filepath, rootDir);
|
|
10160
|
+
const existingEntry = manifestData.files[normalizedPath];
|
|
10161
|
+
const { shouldReindex, newMtime } = await shouldReindexFile(filepath, existingEntry, log);
|
|
10162
|
+
checkResults.push({
|
|
10163
|
+
filepath,
|
|
10164
|
+
normalizedPath,
|
|
10165
|
+
shouldReindex,
|
|
10166
|
+
newMtime
|
|
10167
|
+
});
|
|
10168
|
+
}
|
|
10169
|
+
await manifest.transaction(async (data) => {
|
|
10170
|
+
for (const result of checkResults) {
|
|
10171
|
+
if (!result.shouldReindex && result.newMtime) {
|
|
10172
|
+
const entry = data.files[result.normalizedPath];
|
|
10173
|
+
if (entry) {
|
|
10174
|
+
entry.lastModified = result.newMtime;
|
|
10175
|
+
}
|
|
10176
|
+
}
|
|
10177
|
+
}
|
|
10178
|
+
return null;
|
|
10179
|
+
});
|
|
10180
|
+
return checkResults.filter((r) => r.shouldReindex).map((r) => r.filepath);
|
|
10181
|
+
}
|
|
10182
|
+
async function prepareFilesForReindexing(event, vectorDB, log) {
|
|
10183
|
+
const addedFiles = event.added || [];
|
|
10184
|
+
const modifiedFiles = event.modified || [];
|
|
10185
|
+
const deletedFiles = event.deleted || [];
|
|
10186
|
+
let modifiedFilesToReindex = [];
|
|
10187
|
+
try {
|
|
10188
|
+
modifiedFilesToReindex = await filterModifiedFilesByHash(modifiedFiles, vectorDB, log);
|
|
10189
|
+
} catch (error) {
|
|
10190
|
+
log(`Hash-based filtering failed, will reindex all modified files: ${error}`, "warning");
|
|
10191
|
+
modifiedFilesToReindex = modifiedFiles;
|
|
10192
|
+
}
|
|
10193
|
+
const filesToIndex = [...addedFiles, ...modifiedFilesToReindex];
|
|
10194
|
+
return { filesToIndex, deletedFiles };
|
|
10195
|
+
}
|
|
10196
|
+
async function executeReindexOperations(filesToIndex, deletedFiles, vectorDB, embeddings, log) {
|
|
10197
|
+
const operations = [];
|
|
10198
|
+
if (filesToIndex.length > 0) {
|
|
10199
|
+
log(`\u{1F4C1} ${filesToIndex.length} file(s) changed, reindexing...`);
|
|
10200
|
+
operations.push(indexMultipleFiles(filesToIndex, vectorDB, embeddings, { verbose: false }));
|
|
10201
|
+
}
|
|
10202
|
+
if (deletedFiles.length > 0) {
|
|
10203
|
+
operations.push(
|
|
10204
|
+
Promise.all(
|
|
10205
|
+
deletedFiles.map((deleted) => handleFileDeletion(deleted, vectorDB, log))
|
|
10206
|
+
)
|
|
10207
|
+
);
|
|
10208
|
+
}
|
|
10209
|
+
await Promise.all(operations);
|
|
10210
|
+
}
|
|
10211
|
+
async function handleBatchEvent(event, vectorDB, embeddings, _verbose, log, reindexStateManager) {
|
|
10212
|
+
const { filesToIndex, deletedFiles } = await prepareFilesForReindexing(event, vectorDB, log);
|
|
10213
|
+
const allFiles = [...filesToIndex, ...deletedFiles];
|
|
10214
|
+
if (allFiles.length === 0) {
|
|
10215
|
+
return;
|
|
10216
|
+
}
|
|
10217
|
+
const startTime = Date.now();
|
|
10218
|
+
reindexStateManager.startReindex(allFiles);
|
|
10219
|
+
try {
|
|
10220
|
+
await executeReindexOperations(filesToIndex, deletedFiles, vectorDB, embeddings, log);
|
|
10221
|
+
const duration = Date.now() - startTime;
|
|
10222
|
+
reindexStateManager.completeReindex(duration);
|
|
10223
|
+
log(`\u2713 Processed ${filesToIndex.length} file(s) + ${deletedFiles.length} deletion(s) in ${duration}ms`);
|
|
10224
|
+
} catch (error) {
|
|
10225
|
+
reindexStateManager.failReindex();
|
|
10226
|
+
log(`Batch reindex failed: ${error}`, "warning");
|
|
10227
|
+
}
|
|
10228
|
+
}
|
|
10229
|
+
async function handleUnlinkEvent(filepath, vectorDB, log, reindexStateManager) {
|
|
10230
|
+
const startTime = Date.now();
|
|
10231
|
+
reindexStateManager.startReindex([filepath]);
|
|
10232
|
+
try {
|
|
10233
|
+
await handleFileDeletion(filepath, vectorDB, log);
|
|
10234
|
+
const duration = Date.now() - startTime;
|
|
10235
|
+
reindexStateManager.completeReindex(duration);
|
|
10236
|
+
} catch (error) {
|
|
10237
|
+
reindexStateManager.failReindex();
|
|
10238
|
+
log(`Failed to process deletion for ${filepath}: ${error}`, "warning");
|
|
10239
|
+
}
|
|
10240
|
+
}
|
|
10241
|
+
function createFileChangeHandler(vectorDB, embeddings, verbose, log, reindexStateManager) {
|
|
10242
|
+
return async (event) => {
|
|
10243
|
+
const { type } = event;
|
|
10244
|
+
if (type === "batch") {
|
|
10245
|
+
await handleBatchEvent(event, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10246
|
+
} else if (type === "unlink") {
|
|
10247
|
+
await handleUnlinkEvent(event.filepath, vectorDB, log, reindexStateManager);
|
|
10248
|
+
} else {
|
|
10249
|
+
await handleSingleFileChange(event.filepath, type, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10250
|
+
}
|
|
10251
|
+
};
|
|
10252
|
+
}
|
|
10253
|
+
async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose, log, reindexStateManager) {
|
|
9435
10254
|
const fileWatchingEnabled = watch !== void 0 ? watch : true;
|
|
9436
10255
|
if (!fileWatchingEnabled) {
|
|
9437
10256
|
return null;
|
|
@@ -9439,24 +10258,8 @@ async function setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose,
|
|
|
9439
10258
|
log("\u{1F440} Starting file watcher...");
|
|
9440
10259
|
const fileWatcher = new FileWatcher(rootDir);
|
|
9441
10260
|
try {
|
|
9442
|
-
|
|
9443
|
-
|
|
9444
|
-
if (type === "unlink") {
|
|
9445
|
-
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
9446
|
-
try {
|
|
9447
|
-
await vectorDB.deleteByFile(filepath);
|
|
9448
|
-
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
9449
|
-
await manifest.removeFile(filepath);
|
|
9450
|
-
log(`\u2713 Removed ${filepath} from index`);
|
|
9451
|
-
} catch (error) {
|
|
9452
|
-
log(`Failed to remove ${filepath}: ${error}`, "warning");
|
|
9453
|
-
}
|
|
9454
|
-
} else {
|
|
9455
|
-
const action = type === "add" ? "added" : "changed";
|
|
9456
|
-
log(`\u{1F4DD} File ${action}: ${filepath}`);
|
|
9457
|
-
indexSingleFile(filepath, vectorDB, embeddings, { verbose }).catch((error) => log(`Failed to reindex ${filepath}: ${error}`, "warning"));
|
|
9458
|
-
}
|
|
9459
|
-
});
|
|
10261
|
+
const handler = createFileChangeHandler(vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10262
|
+
await fileWatcher.start(handler);
|
|
9460
10263
|
log(`\u2713 File watching enabled (watching ${fileWatcher.getWatchedFiles().length} files)`);
|
|
9461
10264
|
return fileWatcher;
|
|
9462
10265
|
} catch (error) {
|
|
@@ -9483,7 +10286,7 @@ function setupCleanupHandlers(versionCheckInterval, gitPollInterval, fileWatcher
|
|
|
9483
10286
|
process.exit(0);
|
|
9484
10287
|
};
|
|
9485
10288
|
}
|
|
9486
|
-
function setupVersionChecking(vectorDB, log) {
|
|
10289
|
+
function setupVersionChecking(vectorDB, log, reindexStateManager) {
|
|
9487
10290
|
const checkAndReconnect = async () => {
|
|
9488
10291
|
try {
|
|
9489
10292
|
if (await vectorDB.checkVersion()) {
|
|
@@ -9494,10 +10297,19 @@ function setupVersionChecking(vectorDB, log) {
|
|
|
9494
10297
|
log(`Version check failed: ${error}`, "warning");
|
|
9495
10298
|
}
|
|
9496
10299
|
};
|
|
9497
|
-
const getIndexMetadata = () =>
|
|
9498
|
-
|
|
9499
|
-
|
|
9500
|
-
|
|
10300
|
+
const getIndexMetadata = () => {
|
|
10301
|
+
const reindex = reindexStateManager.getState();
|
|
10302
|
+
return {
|
|
10303
|
+
indexVersion: vectorDB.getCurrentVersion(),
|
|
10304
|
+
indexDate: vectorDB.getVersionDate(),
|
|
10305
|
+
reindexInProgress: reindex.inProgress,
|
|
10306
|
+
pendingFileCount: reindex.pendingFiles.length,
|
|
10307
|
+
lastReindexDurationMs: reindex.lastReindexDurationMs,
|
|
10308
|
+
// Note: msSinceLastReindex is computed at call time, not cached.
|
|
10309
|
+
// This ensures AI assistants always get current freshness info.
|
|
10310
|
+
msSinceLastReindex: reindex.lastReindexTimestamp ? Date.now() - reindex.lastReindexTimestamp : null
|
|
10311
|
+
};
|
|
10312
|
+
};
|
|
9501
10313
|
const interval = setInterval(checkAndReconnect, VERSION_CHECK_INTERVAL_MS);
|
|
9502
10314
|
return { interval, checkAndReconnect, getIndexMetadata };
|
|
9503
10315
|
}
|
|
@@ -9543,13 +10355,13 @@ function createMCPServer() {
|
|
|
9543
10355
|
{ capabilities: serverConfig.capabilities }
|
|
9544
10356
|
);
|
|
9545
10357
|
}
|
|
9546
|
-
async function setupAndConnectServer(server, toolContext, log, versionCheckInterval, options) {
|
|
10358
|
+
async function setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, options) {
|
|
9547
10359
|
const { rootDir, verbose, watch } = options;
|
|
9548
10360
|
const { vectorDB, embeddings } = toolContext;
|
|
9549
10361
|
registerMCPHandlers(server, toolContext, log);
|
|
9550
10362
|
await handleAutoIndexing(vectorDB, rootDir, log);
|
|
9551
|
-
const
|
|
9552
|
-
const
|
|
10363
|
+
const fileWatcher = await setupFileWatching(watch, rootDir, vectorDB, embeddings, verbose, log, reindexStateManager);
|
|
10364
|
+
const { gitPollInterval } = await setupGitDetection(rootDir, vectorDB, embeddings, verbose, log, reindexStateManager, fileWatcher);
|
|
9553
10365
|
const cleanup = setupCleanupHandlers(versionCheckInterval, gitPollInterval, fileWatcher, log);
|
|
9554
10366
|
process.on("SIGINT", cleanup);
|
|
9555
10367
|
process.on("SIGTERM", cleanup);
|
|
@@ -9572,9 +10384,18 @@ async function startMCPServer(options) {
|
|
|
9572
10384
|
const { embeddings, vectorDB } = await initializeComponents(rootDir, earlyLog);
|
|
9573
10385
|
const server = createMCPServer();
|
|
9574
10386
|
const log = createMCPLog(server, verbose);
|
|
9575
|
-
const
|
|
9576
|
-
const
|
|
9577
|
-
|
|
10387
|
+
const reindexStateManager = createReindexStateManager();
|
|
10388
|
+
const { interval: versionCheckInterval, checkAndReconnect, getIndexMetadata } = setupVersionChecking(vectorDB, log, reindexStateManager);
|
|
10389
|
+
const toolContext = {
|
|
10390
|
+
vectorDB,
|
|
10391
|
+
embeddings,
|
|
10392
|
+
rootDir,
|
|
10393
|
+
log,
|
|
10394
|
+
checkAndReconnect,
|
|
10395
|
+
getIndexMetadata,
|
|
10396
|
+
getReindexState: () => reindexStateManager.getState()
|
|
10397
|
+
};
|
|
10398
|
+
await setupAndConnectServer(server, toolContext, log, versionCheckInterval, reindexStateManager, { rootDir, verbose, watch });
|
|
9578
10399
|
}
|
|
9579
10400
|
|
|
9580
10401
|
// src/cli/serve.ts
|