bluera-knowledge 0.29.0 → 0.30.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/README.md +1 -1
- package/dist/{chunk-T7J5RB6F.js → chunk-AEXFPA57.js} +380 -93
- package/dist/chunk-AEXFPA57.js.map +1 -0
- package/dist/{chunk-U27UECDZ.js → chunk-B335UOU7.js} +9 -6
- package/dist/chunk-B335UOU7.js.map +1 -0
- package/dist/{chunk-H465AZXC.js → chunk-KCI4U6FH.js} +2 -2
- package/dist/{chunk-7JTPAQFO.js → chunk-N3XYMAU3.js} +2 -1
- package/dist/index.js +5 -5
- package/dist/mcp/server.js +3 -3
- package/dist/{watch.service-3ZP35WTM.js → watch.service-LRFCT52P.js} +2 -2
- package/dist/workers/background-worker-cli.js +3 -3
- package/package.json +1 -1
- package/dist/chunk-T7J5RB6F.js.map +0 -1
- package/dist/chunk-U27UECDZ.js.map +0 -1
- /package/dist/{chunk-H465AZXC.js.map → chunk-KCI4U6FH.js.map} +0 -0
- /package/dist/{chunk-7JTPAQFO.js.map → chunk-N3XYMAU3.js.map} +0 -0
- /package/dist/{watch.service-3ZP35WTM.js.map → watch.service-LRFCT52P.js.map} +0 -0
|
@@ -2,17 +2,21 @@ import {
|
|
|
2
2
|
AdapterRegistry,
|
|
3
3
|
JobService,
|
|
4
4
|
StoreDefinitionService,
|
|
5
|
+
TEXT_EXTENSIONS,
|
|
5
6
|
createLazyServices,
|
|
6
7
|
createLogger,
|
|
7
8
|
destroyServices,
|
|
9
|
+
err,
|
|
8
10
|
isFileStoreDefinition,
|
|
9
11
|
isRepoStoreDefinition,
|
|
10
12
|
isWebStoreDefinition,
|
|
13
|
+
ok,
|
|
11
14
|
summarizePayload
|
|
12
|
-
} from "./chunk-
|
|
15
|
+
} from "./chunk-B335UOU7.js";
|
|
13
16
|
import {
|
|
17
|
+
DEFAULT_IGNORE_DIRS,
|
|
14
18
|
checkStoreModelCompatibility
|
|
15
|
-
} from "./chunk-
|
|
19
|
+
} from "./chunk-N3XYMAU3.js";
|
|
16
20
|
|
|
17
21
|
// src/mcp/server.ts
|
|
18
22
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
@@ -698,6 +702,9 @@ var ListJobsArgsSchema = z.object({
|
|
|
698
702
|
var CancelJobArgsSchema = z.object({
|
|
699
703
|
jobId: z.string().min(1, "Job ID must be a non-empty string")
|
|
700
704
|
});
|
|
705
|
+
var StoresPullArgsSchema = z.object({
|
|
706
|
+
store: z.string().min(1, "Store name or ID must be a non-empty string")
|
|
707
|
+
});
|
|
701
708
|
var ExecuteArgsSchema = z.object({
|
|
702
709
|
command: z.string().min(1, "Command name is required"),
|
|
703
710
|
args: z.record(z.string(), z.unknown()).optional()
|
|
@@ -960,15 +967,173 @@ var metaCommands = [
|
|
|
960
967
|
// src/mcp/commands/store.commands.ts
|
|
961
968
|
import { z as z5 } from "zod";
|
|
962
969
|
|
|
963
|
-
// src/
|
|
964
|
-
import {
|
|
965
|
-
|
|
970
|
+
// src/plugin/git-pull.ts
|
|
971
|
+
import { spawn } from "child_process";
|
|
972
|
+
var logger3 = createLogger("git-pull");
|
|
973
|
+
var DEFAULT_PULL_TIMEOUT_MS = 6e4;
|
|
974
|
+
var FORCE_KILL_DELAY_MS = 5e3;
|
|
975
|
+
async function pullRepository(options) {
|
|
976
|
+
const { repoPath, branch, timeoutMs = DEFAULT_PULL_TIMEOUT_MS } = options;
|
|
977
|
+
const args = ["pull", "origin"];
|
|
978
|
+
if (branch !== void 0) {
|
|
979
|
+
args.push(branch);
|
|
980
|
+
}
|
|
981
|
+
logger3.info({ repoPath, branch }, "Starting git pull");
|
|
982
|
+
return new Promise((resolve) => {
|
|
983
|
+
const git = spawn("git", args, { cwd: repoPath, stdio: ["ignore", "pipe", "pipe"] });
|
|
984
|
+
let timedOut = false;
|
|
985
|
+
let forceKillTimeout = null;
|
|
986
|
+
const timeout = setTimeout(() => {
|
|
987
|
+
timedOut = true;
|
|
988
|
+
git.kill("SIGTERM");
|
|
989
|
+
forceKillTimeout = setTimeout(() => {
|
|
990
|
+
if (!git.killed) {
|
|
991
|
+
git.kill("SIGKILL");
|
|
992
|
+
}
|
|
993
|
+
}, FORCE_KILL_DELAY_MS);
|
|
994
|
+
}, timeoutMs);
|
|
995
|
+
let stderr = "";
|
|
996
|
+
git.stderr.on("data", (data) => {
|
|
997
|
+
stderr += data.toString();
|
|
998
|
+
});
|
|
999
|
+
let stdout = "";
|
|
1000
|
+
git.stdout.on("data", (data) => {
|
|
1001
|
+
stdout += data.toString();
|
|
1002
|
+
});
|
|
1003
|
+
git.on("error", (error) => {
|
|
1004
|
+
clearTimeout(timeout);
|
|
1005
|
+
if (forceKillTimeout) clearTimeout(forceKillTimeout);
|
|
1006
|
+
resolve(err(error));
|
|
1007
|
+
});
|
|
1008
|
+
git.on("close", (code) => {
|
|
1009
|
+
clearTimeout(timeout);
|
|
1010
|
+
if (forceKillTimeout) clearTimeout(forceKillTimeout);
|
|
1011
|
+
if (timedOut) {
|
|
1012
|
+
resolve(err(new Error(`Git pull timed out after ${String(timeoutMs)}ms in: ${repoPath}`)));
|
|
1013
|
+
} else if (code === 0) {
|
|
1014
|
+
logger3.info({ repoPath, output: stdout.trim() }, "Git pull completed");
|
|
1015
|
+
resolve(ok(stdout.trim()));
|
|
1016
|
+
} else {
|
|
1017
|
+
resolve(err(new Error(`Git pull failed: ${stderr}`)));
|
|
1018
|
+
}
|
|
1019
|
+
});
|
|
1020
|
+
});
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
// src/utils/eta.ts
|
|
1024
|
+
function estimateIndexingTime(fileCount) {
|
|
1025
|
+
if (fileCount <= 0) {
|
|
1026
|
+
return { etaSeconds: 0, etaDisplay: "a few seconds" };
|
|
1027
|
+
}
|
|
1028
|
+
const etaSeconds = Math.ceil(fileCount / 5) + 10;
|
|
1029
|
+
return { etaSeconds, etaDisplay: formatDuration(etaSeconds) };
|
|
1030
|
+
}
|
|
1031
|
+
function formatDuration(seconds) {
|
|
1032
|
+
if (seconds < 60) {
|
|
1033
|
+
return `~${String(seconds)} seconds`;
|
|
1034
|
+
}
|
|
1035
|
+
const minutes = Math.round(seconds / 60);
|
|
1036
|
+
if (minutes === 1) {
|
|
1037
|
+
return "~1 minute";
|
|
1038
|
+
}
|
|
1039
|
+
return `~${String(minutes)} minutes`;
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
// src/utils/file-count.ts
|
|
1043
|
+
import { execFile } from "child_process";
|
|
1044
|
+
import { readdir, stat } from "fs/promises";
|
|
1045
|
+
import { extname, join } from "path";
|
|
1046
|
+
import { promisify } from "util";
|
|
1047
|
+
var execFileAsync = promisify(execFile);
|
|
1048
|
+
var TIMEOUT_MS = 5e3;
|
|
1049
|
+
var IGNORE_DIRS = new Set(DEFAULT_IGNORE_DIRS);
|
|
1050
|
+
async function estimateFileCount(storePath) {
|
|
1051
|
+
try {
|
|
1052
|
+
return await withTimeout(estimateFileCountInner(storePath), TIMEOUT_MS);
|
|
1053
|
+
} catch {
|
|
1054
|
+
return 0;
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
async function estimateFileCountInner(storePath) {
|
|
1058
|
+
const gitFiles = await gitLsFiles(storePath);
|
|
1059
|
+
if (gitFiles !== null) {
|
|
1060
|
+
return countTextFiles(gitFiles);
|
|
1061
|
+
}
|
|
1062
|
+
return walkAndCount(storePath);
|
|
1063
|
+
}
|
|
1064
|
+
async function gitLsFiles(repoPath) {
|
|
1065
|
+
try {
|
|
1066
|
+
await stat(join(repoPath, ".git"));
|
|
1067
|
+
const cleanEnv = { ...process.env };
|
|
1068
|
+
delete cleanEnv["GIT_DIR"];
|
|
1069
|
+
delete cleanEnv["GIT_WORK_TREE"];
|
|
1070
|
+
delete cleanEnv["GIT_INDEX_FILE"];
|
|
1071
|
+
const { stdout } = await execFileAsync("git", ["ls-files", "-z"], {
|
|
1072
|
+
cwd: repoPath,
|
|
1073
|
+
maxBuffer: 50 * 1024 * 1024,
|
|
1074
|
+
timeout: TIMEOUT_MS,
|
|
1075
|
+
env: cleanEnv
|
|
1076
|
+
});
|
|
1077
|
+
return stdout.split("\0").filter(Boolean);
|
|
1078
|
+
} catch {
|
|
1079
|
+
return null;
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
function countTextFiles(files) {
|
|
1083
|
+
let count = 0;
|
|
1084
|
+
for (const file of files) {
|
|
1085
|
+
const ext = extname(file).toLowerCase();
|
|
1086
|
+
if (TEXT_EXTENSIONS.has(ext)) {
|
|
1087
|
+
count++;
|
|
1088
|
+
}
|
|
1089
|
+
}
|
|
1090
|
+
return count;
|
|
1091
|
+
}
|
|
1092
|
+
async function walkAndCount(dirPath, depth = 0) {
|
|
1093
|
+
if (depth > 10) return 0;
|
|
1094
|
+
let count = 0;
|
|
1095
|
+
let entries;
|
|
1096
|
+
try {
|
|
1097
|
+
entries = await readdir(dirPath, { withFileTypes: true });
|
|
1098
|
+
} catch {
|
|
1099
|
+
return 0;
|
|
1100
|
+
}
|
|
1101
|
+
for (const entry of entries) {
|
|
1102
|
+
if (entry.isDirectory()) {
|
|
1103
|
+
if (IGNORE_DIRS.has(entry.name)) continue;
|
|
1104
|
+
count += await walkAndCount(join(dirPath, entry.name), depth + 1);
|
|
1105
|
+
} else if (entry.isFile()) {
|
|
1106
|
+
const ext = extname(entry.name).toLowerCase();
|
|
1107
|
+
if (TEXT_EXTENSIONS.has(ext)) {
|
|
1108
|
+
count++;
|
|
1109
|
+
}
|
|
1110
|
+
}
|
|
1111
|
+
}
|
|
1112
|
+
return count;
|
|
1113
|
+
}
|
|
1114
|
+
function withTimeout(promise, ms) {
|
|
1115
|
+
return new Promise((resolve, reject) => {
|
|
1116
|
+
const timer = setTimeout(() => {
|
|
1117
|
+
reject(new Error("Timeout"));
|
|
1118
|
+
}, ms);
|
|
1119
|
+
promise.then(
|
|
1120
|
+
(value) => {
|
|
1121
|
+
clearTimeout(timer);
|
|
1122
|
+
resolve(value);
|
|
1123
|
+
},
|
|
1124
|
+
(error) => {
|
|
1125
|
+
clearTimeout(timer);
|
|
1126
|
+
reject(error instanceof Error ? error : new Error(String(error)));
|
|
1127
|
+
}
|
|
1128
|
+
);
|
|
1129
|
+
});
|
|
1130
|
+
}
|
|
966
1131
|
|
|
967
1132
|
// src/workers/spawn-worker.ts
|
|
968
|
-
import { spawn } from "child_process";
|
|
1133
|
+
import { spawn as spawn2 } from "child_process";
|
|
969
1134
|
import path from "path";
|
|
970
1135
|
import { fileURLToPath } from "url";
|
|
971
|
-
var
|
|
1136
|
+
var logger4 = createLogger("spawn-worker");
|
|
972
1137
|
function spawnBackgroundWorker(jobId, dataDir) {
|
|
973
1138
|
const currentFilePath = fileURLToPath(import.meta.url);
|
|
974
1139
|
const currentDir = path.dirname(currentFilePath);
|
|
@@ -982,15 +1147,15 @@ function spawnBackgroundWorker(jobId, dataDir) {
|
|
|
982
1147
|
const workerScript = path.join(distDir, "workers", "background-worker-cli.js");
|
|
983
1148
|
command = process.execPath;
|
|
984
1149
|
args = [workerScript, jobId];
|
|
985
|
-
|
|
1150
|
+
logger4.debug({ workerScript, distDir, currentFilePath }, "Production worker path");
|
|
986
1151
|
} else {
|
|
987
1152
|
const workerScript = path.join(currentDir, "background-worker-cli.ts");
|
|
988
1153
|
command = "npx";
|
|
989
1154
|
args = ["tsx", workerScript, jobId];
|
|
990
|
-
|
|
1155
|
+
logger4.debug({ workerScript, currentDir }, "Development worker path");
|
|
991
1156
|
}
|
|
992
|
-
|
|
993
|
-
const worker =
|
|
1157
|
+
logger4.info({ jobId, command, args, dataDir, isProduction }, "Spawning background worker");
|
|
1158
|
+
const worker = spawn2(command, args, {
|
|
994
1159
|
detached: true,
|
|
995
1160
|
// Detach from parent process
|
|
996
1161
|
stdio: "ignore",
|
|
@@ -1002,22 +1167,104 @@ function spawnBackgroundWorker(jobId, dataDir) {
|
|
|
1002
1167
|
// Only set if provided
|
|
1003
1168
|
}
|
|
1004
1169
|
});
|
|
1005
|
-
worker.on("error", (
|
|
1006
|
-
|
|
1170
|
+
worker.on("error", (err2) => {
|
|
1171
|
+
logger4.error({ jobId, error: err2.message }, "Failed to spawn background worker");
|
|
1007
1172
|
});
|
|
1008
|
-
|
|
1173
|
+
logger4.info({ jobId, pid: worker.pid }, "Background worker spawned");
|
|
1009
1174
|
worker.unref();
|
|
1010
1175
|
}
|
|
1011
1176
|
|
|
1177
|
+
// src/mcp/handlers/pull.handler.ts
|
|
1178
|
+
var logger5 = createLogger("mcp-pull");
|
|
1179
|
+
var handleStoresPull = async (args, context) => {
|
|
1180
|
+
const validated = StoresPullArgsSchema.parse(args);
|
|
1181
|
+
logger5.info({ store: validated.store }, "Stores pull started");
|
|
1182
|
+
const { services, options } = context;
|
|
1183
|
+
const store = await services.store.getByIdOrName(validated.store);
|
|
1184
|
+
if (store === void 0) {
|
|
1185
|
+
throw new Error(`Store not found: ${validated.store}`);
|
|
1186
|
+
}
|
|
1187
|
+
if (store.type !== "repo") {
|
|
1188
|
+
throw new Error(
|
|
1189
|
+
`stores:pull only works on repo-type stores. Store "${store.name}" is type "${store.type}".`
|
|
1190
|
+
);
|
|
1191
|
+
}
|
|
1192
|
+
if (store.url === void 0) {
|
|
1193
|
+
throw new Error(`Store "${store.name}" has no git remote URL. Cannot pull without a remote.`);
|
|
1194
|
+
}
|
|
1195
|
+
const pullResult = await pullRepository({
|
|
1196
|
+
repoPath: store.path,
|
|
1197
|
+
branch: store.branch
|
|
1198
|
+
});
|
|
1199
|
+
if (!pullResult.success) {
|
|
1200
|
+
logger5.error({ store: store.name, error: pullResult.error.message }, "Git pull failed");
|
|
1201
|
+
throw new Error(`Git pull failed for "${store.name}": ${pullResult.error.message}`);
|
|
1202
|
+
}
|
|
1203
|
+
const pullOutput = pullResult.data;
|
|
1204
|
+
logger5.info({ store: store.name, pullOutput }, "Git pull succeeded");
|
|
1205
|
+
const jobService = new JobService(options.dataDir);
|
|
1206
|
+
const job = jobService.createJob({
|
|
1207
|
+
type: "index",
|
|
1208
|
+
details: {
|
|
1209
|
+
storeName: store.name,
|
|
1210
|
+
storeId: store.id,
|
|
1211
|
+
phase: "indexing",
|
|
1212
|
+
phaseStep: 1,
|
|
1213
|
+
phaseTotalSteps: 1,
|
|
1214
|
+
path: store.path
|
|
1215
|
+
},
|
|
1216
|
+
message: `Re-indexing ${store.name} after git pull...`
|
|
1217
|
+
});
|
|
1218
|
+
spawnBackgroundWorker(job.id, options.dataDir);
|
|
1219
|
+
logger5.info({ storeId: store.id, storeName: store.name, jobId: job.id }, "Stores pull completed");
|
|
1220
|
+
const fileCount = await estimateFileCount(store.path);
|
|
1221
|
+
const eta = estimateIndexingTime(fileCount);
|
|
1222
|
+
const fileCountLabel = fileCount > 0 ? `~${String(fileCount)} files` : "files";
|
|
1223
|
+
return {
|
|
1224
|
+
content: [
|
|
1225
|
+
{
|
|
1226
|
+
type: "text",
|
|
1227
|
+
text: JSON.stringify(
|
|
1228
|
+
{
|
|
1229
|
+
store: {
|
|
1230
|
+
id: store.id,
|
|
1231
|
+
name: store.name
|
|
1232
|
+
},
|
|
1233
|
+
pull: {
|
|
1234
|
+
output: pullOutput
|
|
1235
|
+
},
|
|
1236
|
+
search: {
|
|
1237
|
+
immediate: `Files available for Grep/Read at: ${store.path}`,
|
|
1238
|
+
pending: `Similarity search re-indexing ${fileCountLabel}`,
|
|
1239
|
+
eta: eta.etaDisplay,
|
|
1240
|
+
monitor: `check job status with job:status ${job.id}`
|
|
1241
|
+
},
|
|
1242
|
+
job: {
|
|
1243
|
+
id: job.id,
|
|
1244
|
+
status: job.status,
|
|
1245
|
+
message: `Re-indexing ${store.name} after git pull (${fileCountLabel}, ${eta.etaDisplay})...`
|
|
1246
|
+
},
|
|
1247
|
+
message: `Pulled latest changes. Files available for Grep/Read at ${store.path}. Similarity search re-indexing ${fileCountLabel} (${eta.etaDisplay}).`
|
|
1248
|
+
},
|
|
1249
|
+
null,
|
|
1250
|
+
2
|
|
1251
|
+
)
|
|
1252
|
+
}
|
|
1253
|
+
]
|
|
1254
|
+
};
|
|
1255
|
+
};
|
|
1256
|
+
|
|
1012
1257
|
// src/mcp/handlers/store.handler.ts
|
|
1013
|
-
|
|
1258
|
+
import { rm, access } from "fs/promises";
|
|
1259
|
+
import { join as join2 } from "path";
|
|
1260
|
+
var logger6 = createLogger("mcp-store");
|
|
1014
1261
|
var handleListStores = async (args, context) => {
|
|
1015
1262
|
const validated = ListStoresArgsSchema.parse(args);
|
|
1016
|
-
|
|
1263
|
+
logger6.info({ type: validated.type }, "List stores started");
|
|
1017
1264
|
const { services } = context;
|
|
1018
1265
|
const stores = await services.store.list();
|
|
1019
1266
|
const filtered = validated.type !== void 0 ? stores.filter((s) => s.type === validated.type) : stores;
|
|
1020
|
-
|
|
1267
|
+
logger6.info({ count: filtered.length, type: validated.type }, "List stores completed");
|
|
1021
1268
|
return {
|
|
1022
1269
|
content: [
|
|
1023
1270
|
{
|
|
@@ -1048,14 +1295,14 @@ var handleListStores = async (args, context) => {
|
|
|
1048
1295
|
};
|
|
1049
1296
|
var handleGetStoreInfo = async (args, context) => {
|
|
1050
1297
|
const validated = GetStoreInfoArgsSchema.parse(args);
|
|
1051
|
-
|
|
1298
|
+
logger6.info({ store: validated.store }, "Get store info started");
|
|
1052
1299
|
const { services } = context;
|
|
1053
1300
|
const store = await services.store.getByIdOrName(validated.store);
|
|
1054
1301
|
if (store === void 0) {
|
|
1055
|
-
|
|
1302
|
+
logger6.warn({ store: validated.store }, "Store not found");
|
|
1056
1303
|
throw new Error(`Store not found: ${validated.store}`);
|
|
1057
1304
|
}
|
|
1058
|
-
|
|
1305
|
+
logger6.info({ storeId: store.id, storeName: store.name }, "Get store info completed");
|
|
1059
1306
|
return {
|
|
1060
1307
|
content: [
|
|
1061
1308
|
{
|
|
@@ -1088,7 +1335,7 @@ var handleGetStoreInfo = async (args, context) => {
|
|
|
1088
1335
|
};
|
|
1089
1336
|
var handleCreateStore = async (args, context) => {
|
|
1090
1337
|
const validated = CreateStoreArgsSchema.parse(args);
|
|
1091
|
-
|
|
1338
|
+
logger6.info(
|
|
1092
1339
|
{ name: validated.name, type: validated.type, source: validated.source },
|
|
1093
1340
|
"Create store started"
|
|
1094
1341
|
);
|
|
@@ -1107,7 +1354,7 @@ var handleCreateStore = async (args, context) => {
|
|
|
1107
1354
|
...validated.extractInstructions !== void 0 ? { extractInstructions: validated.extractInstructions } : {}
|
|
1108
1355
|
});
|
|
1109
1356
|
if (!result.success) {
|
|
1110
|
-
|
|
1357
|
+
logger6.error({ name: validated.name, error: result.error.message }, "Create store failed");
|
|
1111
1358
|
throw new Error(result.error.message);
|
|
1112
1359
|
}
|
|
1113
1360
|
const jobService = new JobService(options.dataDir);
|
|
@@ -1152,10 +1399,20 @@ var handleCreateStore = async (args, context) => {
|
|
|
1152
1399
|
message: validated.type === "web" ? `Crawling ${result.data.name}...` : `Indexing ${result.data.name}...`
|
|
1153
1400
|
});
|
|
1154
1401
|
spawnBackgroundWorker(job.id, options.dataDir);
|
|
1155
|
-
|
|
1402
|
+
logger6.info(
|
|
1156
1403
|
{ storeId: result.data.id, storeName: result.data.name, jobId: job.id },
|
|
1157
1404
|
"Create store completed"
|
|
1158
1405
|
);
|
|
1406
|
+
const storePath = "path" in result.data ? result.data.path : void 0;
|
|
1407
|
+
let fileCount = 0;
|
|
1408
|
+
if (validated.type === "web") {
|
|
1409
|
+
fileCount = validated.maxPages ?? 0;
|
|
1410
|
+
} else if (storePath !== void 0) {
|
|
1411
|
+
fileCount = await estimateFileCount(storePath);
|
|
1412
|
+
}
|
|
1413
|
+
const eta = estimateIndexingTime(fileCount);
|
|
1414
|
+
const fileCountLabel = fileCount > 0 ? `~${String(fileCount)} files` : "files";
|
|
1415
|
+
const jobMessage = fileCount > 0 ? `Indexing ${result.data.name} (${fileCountLabel}, ${eta.etaDisplay})...` : job.message;
|
|
1159
1416
|
return {
|
|
1160
1417
|
content: [
|
|
1161
1418
|
{
|
|
@@ -1166,14 +1423,20 @@ var handleCreateStore = async (args, context) => {
|
|
|
1166
1423
|
id: result.data.id,
|
|
1167
1424
|
name: result.data.name,
|
|
1168
1425
|
type: result.data.type,
|
|
1169
|
-
path:
|
|
1426
|
+
path: storePath
|
|
1427
|
+
},
|
|
1428
|
+
search: {
|
|
1429
|
+
immediate: storePath !== void 0 ? `Files available for Grep/Read at: ${storePath}` : void 0,
|
|
1430
|
+
pending: `Similarity search indexing ${fileCountLabel}`,
|
|
1431
|
+
eta: eta.etaDisplay,
|
|
1432
|
+
monitor: `check job status with job:status ${job.id}`
|
|
1170
1433
|
},
|
|
1171
1434
|
job: {
|
|
1172
1435
|
id: job.id,
|
|
1173
1436
|
status: job.status,
|
|
1174
|
-
message:
|
|
1437
|
+
message: jobMessage
|
|
1175
1438
|
},
|
|
1176
|
-
message: `Store created.
|
|
1439
|
+
message: storePath !== void 0 ? `Store created. Files available for Grep/Read now at ${storePath}. Similarity search indexing ${fileCountLabel} (${eta.etaDisplay}).` : `Store created. Similarity search indexing ${fileCountLabel} (${eta.etaDisplay}).`
|
|
1177
1440
|
},
|
|
1178
1441
|
null,
|
|
1179
1442
|
2
|
|
@@ -1184,11 +1447,11 @@ var handleCreateStore = async (args, context) => {
|
|
|
1184
1447
|
};
|
|
1185
1448
|
var handleIndexStore = async (args, context) => {
|
|
1186
1449
|
const validated = IndexStoreArgsSchema.parse(args);
|
|
1187
|
-
|
|
1450
|
+
logger6.info({ store: validated.store }, "Index store started");
|
|
1188
1451
|
const { services, options } = context;
|
|
1189
1452
|
const store = await services.store.getByIdOrName(validated.store);
|
|
1190
1453
|
if (store === void 0) {
|
|
1191
|
-
|
|
1454
|
+
logger6.warn({ store: validated.store }, "Store not found for indexing");
|
|
1192
1455
|
throw new Error(`Store not found: ${validated.store}`);
|
|
1193
1456
|
}
|
|
1194
1457
|
const jobService = new JobService(options.dataDir);
|
|
@@ -1209,7 +1472,12 @@ var handleIndexStore = async (args, context) => {
|
|
|
1209
1472
|
message: `Re-indexing ${store.name}...`
|
|
1210
1473
|
});
|
|
1211
1474
|
spawnBackgroundWorker(job.id, options.dataDir);
|
|
1212
|
-
|
|
1475
|
+
logger6.info({ storeId: store.id, storeName: store.name, jobId: job.id }, "Index store completed");
|
|
1476
|
+
const storePath = "path" in store ? store.path : void 0;
|
|
1477
|
+
const fileCount = storePath !== void 0 ? await estimateFileCount(storePath) : 0;
|
|
1478
|
+
const eta = estimateIndexingTime(fileCount);
|
|
1479
|
+
const fileCountLabel = fileCount > 0 ? `~${String(fileCount)} files` : "files";
|
|
1480
|
+
const jobMessage = fileCount > 0 ? `Re-indexing ${store.name} (${fileCountLabel}, ${eta.etaDisplay})...` : job.message;
|
|
1213
1481
|
return {
|
|
1214
1482
|
content: [
|
|
1215
1483
|
{
|
|
@@ -1220,12 +1488,18 @@ var handleIndexStore = async (args, context) => {
|
|
|
1220
1488
|
id: store.id,
|
|
1221
1489
|
name: store.name
|
|
1222
1490
|
},
|
|
1491
|
+
search: {
|
|
1492
|
+
immediate: storePath !== void 0 ? `Files available for Grep/Read at: ${storePath}` : void 0,
|
|
1493
|
+
pending: `Similarity search re-indexing ${fileCountLabel}`,
|
|
1494
|
+
eta: eta.etaDisplay,
|
|
1495
|
+
monitor: `check job status with job:status ${job.id}`
|
|
1496
|
+
},
|
|
1223
1497
|
job: {
|
|
1224
1498
|
id: job.id,
|
|
1225
1499
|
status: job.status,
|
|
1226
|
-
message:
|
|
1500
|
+
message: jobMessage
|
|
1227
1501
|
},
|
|
1228
|
-
message: `
|
|
1502
|
+
message: storePath !== void 0 ? `Re-indexing started. Files available for Grep/Read at ${storePath}. Similarity search indexing ${fileCountLabel} (${eta.etaDisplay}).` : `Re-indexing started. Similarity search indexing ${fileCountLabel} (${eta.etaDisplay}).`
|
|
1229
1503
|
},
|
|
1230
1504
|
null,
|
|
1231
1505
|
2
|
|
@@ -1236,34 +1510,34 @@ var handleIndexStore = async (args, context) => {
|
|
|
1236
1510
|
};
|
|
1237
1511
|
var handleDeleteStore = async (args, context) => {
|
|
1238
1512
|
const validated = DeleteStoreArgsSchema.parse(args);
|
|
1239
|
-
|
|
1513
|
+
logger6.info({ store: validated.store }, "Delete store started");
|
|
1240
1514
|
const { services, options } = context;
|
|
1241
1515
|
const store = await services.store.getByIdOrName(validated.store);
|
|
1242
1516
|
if (store === void 0) {
|
|
1243
|
-
|
|
1517
|
+
logger6.warn({ store: validated.store }, "Store not found for deletion");
|
|
1244
1518
|
throw new Error(`Store not found: ${validated.store}`);
|
|
1245
1519
|
}
|
|
1246
|
-
|
|
1520
|
+
logger6.debug({ storeId: store.id, storeName: store.name }, "Deleting LanceDB table");
|
|
1247
1521
|
await services.lance.deleteStore(store.id);
|
|
1248
|
-
|
|
1522
|
+
logger6.debug({ storeId: store.id }, "Deleting code graph");
|
|
1249
1523
|
await services.codeGraph.deleteGraph(store.id);
|
|
1250
|
-
|
|
1524
|
+
logger6.debug({ storeId: store.id }, "Deleting manifest");
|
|
1251
1525
|
await services.manifest.delete(store.id);
|
|
1252
1526
|
if (store.type === "repo" && "url" in store && store.url !== void 0) {
|
|
1253
1527
|
if (options.dataDir === void 0) {
|
|
1254
1528
|
throw new Error("dataDir is required to delete cloned repository files");
|
|
1255
1529
|
}
|
|
1256
|
-
const repoPath =
|
|
1257
|
-
|
|
1530
|
+
const repoPath = join2(options.dataDir, "repos", store.id);
|
|
1531
|
+
logger6.debug({ storeId: store.id, repoPath }, "Removing cloned repository");
|
|
1258
1532
|
await rm(repoPath, { recursive: true, force: true });
|
|
1259
1533
|
}
|
|
1260
|
-
|
|
1534
|
+
logger6.debug({ storeId: store.id }, "Removing from registry");
|
|
1261
1535
|
const result = await services.store.delete(store.id);
|
|
1262
1536
|
if (!result.success) {
|
|
1263
|
-
|
|
1537
|
+
logger6.error({ storeId: store.id, error: result.error.message }, "Delete store failed");
|
|
1264
1538
|
throw new Error(result.error.message);
|
|
1265
1539
|
}
|
|
1266
|
-
|
|
1540
|
+
logger6.info(
|
|
1267
1541
|
{ storeId: store.id, storeName: store.name, storeType: store.type },
|
|
1268
1542
|
"Delete store completed"
|
|
1269
1543
|
);
|
|
@@ -1289,7 +1563,7 @@ var handleDeleteStore = async (args, context) => {
|
|
|
1289
1563
|
};
|
|
1290
1564
|
};
|
|
1291
1565
|
var handleCheckModels = async (_args, context) => {
|
|
1292
|
-
|
|
1566
|
+
logger6.info("Check models started");
|
|
1293
1567
|
const { services } = context;
|
|
1294
1568
|
const stores = await services.store.list();
|
|
1295
1569
|
const currentModelId = services.store.getCurrentModelId();
|
|
@@ -1305,7 +1579,7 @@ var handleCheckModels = async (_args, context) => {
|
|
|
1305
1579
|
};
|
|
1306
1580
|
});
|
|
1307
1581
|
const needsReindex = storeStatuses.filter((s) => s.status === "needs-reindex");
|
|
1308
|
-
|
|
1582
|
+
logger6.info(
|
|
1309
1583
|
{ total: stores.length, needsReindex: needsReindex.length },
|
|
1310
1584
|
"Check models completed"
|
|
1311
1585
|
);
|
|
@@ -1376,7 +1650,7 @@ async function checkStoreHealth(store, currentModelId) {
|
|
|
1376
1650
|
};
|
|
1377
1651
|
}
|
|
1378
1652
|
var handleStoreHealth = async (args, context) => {
|
|
1379
|
-
|
|
1653
|
+
logger6.info({ store: args.store }, "Store health check started");
|
|
1380
1654
|
const { services } = context;
|
|
1381
1655
|
const currentModelId = services.store.getCurrentModelId();
|
|
1382
1656
|
let storesToCheck;
|
|
@@ -1400,7 +1674,7 @@ var handleStoreHealth = async (args, context) => {
|
|
|
1400
1674
|
const healthy = results.filter((r) => r.status === "healthy").length;
|
|
1401
1675
|
const warnings = results.filter((r) => r.status === "warning").length;
|
|
1402
1676
|
const errors = results.filter((r) => r.status === "error").length;
|
|
1403
|
-
|
|
1677
|
+
logger6.info(
|
|
1404
1678
|
{ total: results.length, healthy, warnings, errors, exitCode },
|
|
1405
1679
|
"Store health check completed"
|
|
1406
1680
|
);
|
|
@@ -1478,6 +1752,19 @@ var storeCommands = [
|
|
|
1478
1752
|
}),
|
|
1479
1753
|
handler: (args, context) => handleDeleteStore(args, context)
|
|
1480
1754
|
},
|
|
1755
|
+
{
|
|
1756
|
+
name: "stores:pull",
|
|
1757
|
+
description: "Pull latest changes from git remote and re-index a repo-type store",
|
|
1758
|
+
argsSchema: z5.object({
|
|
1759
|
+
store: z5.string().min(1).describe("Store name or ID")
|
|
1760
|
+
}),
|
|
1761
|
+
handler: (args, context) => {
|
|
1762
|
+
if (typeof args["store"] !== "string" || args["store"].length === 0) {
|
|
1763
|
+
throw new Error("Store name or ID is required");
|
|
1764
|
+
}
|
|
1765
|
+
return handleStoresPull({ store: args["store"] }, context);
|
|
1766
|
+
}
|
|
1767
|
+
},
|
|
1481
1768
|
{
|
|
1482
1769
|
name: "stores:check-models",
|
|
1483
1770
|
description: "Check embedding model compatibility for all stores",
|
|
@@ -1499,11 +1786,11 @@ var storeCommands = [
|
|
|
1499
1786
|
|
|
1500
1787
|
// src/mcp/commands/sync.commands.ts
|
|
1501
1788
|
import { rm as rm2 } from "fs/promises";
|
|
1502
|
-
import { join as
|
|
1789
|
+
import { join as join3 } from "path";
|
|
1503
1790
|
import { z as z6 } from "zod";
|
|
1504
|
-
var
|
|
1791
|
+
var logger7 = createLogger("mcp-sync");
|
|
1505
1792
|
async function handleStoresSync(args, context) {
|
|
1506
|
-
|
|
1793
|
+
logger7.info(
|
|
1507
1794
|
{ prune: args.prune, dryRun: args.dryRun, reindex: args.reindex },
|
|
1508
1795
|
"Stores sync started"
|
|
1509
1796
|
);
|
|
@@ -1562,7 +1849,7 @@ async function handleStoresSync(args, context) {
|
|
|
1562
1849
|
await services.manifest.delete(store.id);
|
|
1563
1850
|
if (store.type === "repo" && "url" in store && store.url !== void 0) {
|
|
1564
1851
|
const dataDir = services.config.resolveDataDir();
|
|
1565
|
-
const repoPath =
|
|
1852
|
+
const repoPath = join3(dataDir, "repos", store.id);
|
|
1566
1853
|
await rm2(repoPath, { recursive: true, force: true });
|
|
1567
1854
|
}
|
|
1568
1855
|
const deleteResult = await services.store.delete(store.id, { skipDefinitionSync: true });
|
|
@@ -1594,7 +1881,7 @@ async function handleStoresSync(args, context) {
|
|
|
1594
1881
|
}
|
|
1595
1882
|
}
|
|
1596
1883
|
}
|
|
1597
|
-
|
|
1884
|
+
logger7.info(
|
|
1598
1885
|
{
|
|
1599
1886
|
created: result.created.length,
|
|
1600
1887
|
skipped: result.skipped.length,
|
|
@@ -1708,24 +1995,24 @@ import { z as z7 } from "zod";
|
|
|
1708
1995
|
|
|
1709
1996
|
// src/mcp/handlers/uninstall.handler.ts
|
|
1710
1997
|
import { existsSync } from "fs";
|
|
1711
|
-
import { readdir, rm as rm3 } from "fs/promises";
|
|
1998
|
+
import { readdir as readdir2, rm as rm3 } from "fs/promises";
|
|
1712
1999
|
import { homedir } from "os";
|
|
1713
|
-
import { join as
|
|
1714
|
-
var
|
|
2000
|
+
import { join as join4 } from "path";
|
|
2001
|
+
var logger8 = createLogger("uninstall-handler");
|
|
1715
2002
|
var handleUninstall = async (args, context) => {
|
|
1716
2003
|
const { global: includeGlobal = false, keepDefinitions = true } = args;
|
|
1717
2004
|
const deleted = [];
|
|
1718
2005
|
const kept = [];
|
|
1719
2006
|
const errors = [];
|
|
1720
2007
|
const projectRoot = context.options.projectRoot ?? process.cwd();
|
|
1721
|
-
const projectDataDir =
|
|
1722
|
-
|
|
2008
|
+
const projectDataDir = join4(projectRoot, ".bluera", "bluera-knowledge");
|
|
2009
|
+
logger8.info({ projectDataDir, includeGlobal, keepDefinitions }, "Starting uninstall");
|
|
1723
2010
|
if (existsSync(projectDataDir)) {
|
|
1724
2011
|
if (keepDefinitions) {
|
|
1725
2012
|
try {
|
|
1726
|
-
const entries = await
|
|
2013
|
+
const entries = await readdir2(projectDataDir, { withFileTypes: true });
|
|
1727
2014
|
for (const entry of entries) {
|
|
1728
|
-
const entryPath =
|
|
2015
|
+
const entryPath = join4(projectDataDir, entry.name);
|
|
1729
2016
|
if (entry.name === "stores.config.json") {
|
|
1730
2017
|
kept.push(entryPath);
|
|
1731
2018
|
continue;
|
|
@@ -1733,42 +2020,42 @@ var handleUninstall = async (args, context) => {
|
|
|
1733
2020
|
try {
|
|
1734
2021
|
await rm3(entryPath, { recursive: true, force: true });
|
|
1735
2022
|
deleted.push(entryPath);
|
|
1736
|
-
} catch (
|
|
1737
|
-
const msg =
|
|
2023
|
+
} catch (err2) {
|
|
2024
|
+
const msg = err2 instanceof Error ? err2.message : String(err2);
|
|
1738
2025
|
errors.push(`Failed to delete ${entryPath}: ${msg}`);
|
|
1739
|
-
|
|
2026
|
+
logger8.error({ error: msg, path: entryPath }, "Failed to delete");
|
|
1740
2027
|
}
|
|
1741
2028
|
}
|
|
1742
|
-
} catch (
|
|
1743
|
-
const msg =
|
|
2029
|
+
} catch (err2) {
|
|
2030
|
+
const msg = err2 instanceof Error ? err2.message : String(err2);
|
|
1744
2031
|
errors.push(`Failed to read ${projectDataDir}: ${msg}`);
|
|
1745
|
-
|
|
2032
|
+
logger8.error({ error: msg, path: projectDataDir }, "Failed to read directory");
|
|
1746
2033
|
}
|
|
1747
2034
|
} else {
|
|
1748
2035
|
try {
|
|
1749
2036
|
await rm3(projectDataDir, { recursive: true, force: true });
|
|
1750
2037
|
deleted.push(projectDataDir);
|
|
1751
|
-
} catch (
|
|
1752
|
-
const msg =
|
|
2038
|
+
} catch (err2) {
|
|
2039
|
+
const msg = err2 instanceof Error ? err2.message : String(err2);
|
|
1753
2040
|
errors.push(`Failed to delete ${projectDataDir}: ${msg}`);
|
|
1754
|
-
|
|
2041
|
+
logger8.error({ error: msg, path: projectDataDir }, "Failed to delete");
|
|
1755
2042
|
}
|
|
1756
2043
|
}
|
|
1757
2044
|
}
|
|
1758
2045
|
if (includeGlobal) {
|
|
1759
|
-
const globalDir =
|
|
2046
|
+
const globalDir = join4(homedir(), ".local", "share", "bluera-knowledge");
|
|
1760
2047
|
if (existsSync(globalDir)) {
|
|
1761
2048
|
try {
|
|
1762
2049
|
await rm3(globalDir, { recursive: true, force: true });
|
|
1763
2050
|
deleted.push(globalDir);
|
|
1764
|
-
} catch (
|
|
1765
|
-
const msg =
|
|
2051
|
+
} catch (err2) {
|
|
2052
|
+
const msg = err2 instanceof Error ? err2.message : String(err2);
|
|
1766
2053
|
errors.push(`Failed to delete ${globalDir}: ${msg}`);
|
|
1767
|
-
|
|
2054
|
+
logger8.error({ error: msg, path: globalDir }, "Failed to delete global data");
|
|
1768
2055
|
}
|
|
1769
2056
|
}
|
|
1770
2057
|
}
|
|
1771
|
-
|
|
2058
|
+
logger8.info({ deleted, kept, errors }, "Uninstall complete");
|
|
1772
2059
|
const lines = [];
|
|
1773
2060
|
if (deleted.length > 0) {
|
|
1774
2061
|
lines.push("## Deleted:");
|
|
@@ -1829,11 +2116,11 @@ commandRegistry.registerAll(syncCommands);
|
|
|
1829
2116
|
commandRegistry.registerAll(uninstallCommands);
|
|
1830
2117
|
|
|
1831
2118
|
// src/mcp/handlers/execute.handler.ts
|
|
1832
|
-
var
|
|
2119
|
+
var logger9 = createLogger("mcp-execute");
|
|
1833
2120
|
var handleExecute = async (args, context) => {
|
|
1834
2121
|
const validated = ExecuteArgsSchema.parse(args);
|
|
1835
2122
|
const commandArgs = validated.args ?? {};
|
|
1836
|
-
|
|
2123
|
+
logger9.info(
|
|
1837
2124
|
{ command: validated.command, args: JSON.stringify(commandArgs) },
|
|
1838
2125
|
"Execute command started"
|
|
1839
2126
|
);
|
|
@@ -1841,11 +2128,11 @@ var handleExecute = async (args, context) => {
|
|
|
1841
2128
|
try {
|
|
1842
2129
|
const result = await executeCommand(validated.command, commandArgs, context);
|
|
1843
2130
|
const durationMs = Date.now() - startTime;
|
|
1844
|
-
|
|
2131
|
+
logger9.info({ command: validated.command, durationMs }, "Execute command completed");
|
|
1845
2132
|
return result;
|
|
1846
2133
|
} catch (error) {
|
|
1847
2134
|
const durationMs = Date.now() - startTime;
|
|
1848
|
-
|
|
2135
|
+
logger9.error(
|
|
1849
2136
|
{
|
|
1850
2137
|
command: validated.command,
|
|
1851
2138
|
durationMs,
|
|
@@ -1952,11 +2239,11 @@ var LRUCache = class {
|
|
|
1952
2239
|
};
|
|
1953
2240
|
|
|
1954
2241
|
// src/mcp/handlers/search.handler.ts
|
|
1955
|
-
var
|
|
2242
|
+
var logger10 = createLogger("mcp-search");
|
|
1956
2243
|
var resultCache = new LRUCache(1e3);
|
|
1957
2244
|
var handleSearch = async (args, context) => {
|
|
1958
2245
|
const validated = SearchArgsSchema.parse(args);
|
|
1959
|
-
|
|
2246
|
+
logger10.info(
|
|
1960
2247
|
{
|
|
1961
2248
|
query: validated.query,
|
|
1962
2249
|
stores: validated.stores,
|
|
@@ -1996,7 +2283,7 @@ var handleSearch = async (args, context) => {
|
|
|
1996
2283
|
);
|
|
1997
2284
|
}
|
|
1998
2285
|
if (skippedStores.length > 0) {
|
|
1999
|
-
|
|
2286
|
+
logger10.warn({ skippedStores }, "Some stores skipped due to model incompatibility");
|
|
2000
2287
|
}
|
|
2001
2288
|
const storeIds = compatibleStores.map((s) => s.id);
|
|
2002
2289
|
try {
|
|
@@ -2062,7 +2349,7 @@ var handleSearch = async (args, context) => {
|
|
|
2062
2349
|
const header = `Search: "${validated.query}" | Results: ${String(results.totalResults)} | ${formatTokenCount(responseTokens)} tokens | ${String(results.timeMs)}ms${confidenceInfo}${rerankInfo}
|
|
2063
2350
|
|
|
2064
2351
|
`;
|
|
2065
|
-
|
|
2352
|
+
logger10.info(
|
|
2066
2353
|
{
|
|
2067
2354
|
query: validated.query,
|
|
2068
2355
|
totalResults: results.totalResults,
|
|
@@ -2083,7 +2370,7 @@ var handleSearch = async (args, context) => {
|
|
|
2083
2370
|
};
|
|
2084
2371
|
var handleGetFullContext = async (args, context) => {
|
|
2085
2372
|
const validated = GetFullContextArgsSchema.parse(args);
|
|
2086
|
-
|
|
2373
|
+
logger10.info({ resultId: validated.resultId }, "Get full context requested");
|
|
2087
2374
|
const resultId = validated.resultId;
|
|
2088
2375
|
const cachedResult = resultCache.get(resultId);
|
|
2089
2376
|
if (!cachedResult) {
|
|
@@ -2101,7 +2388,7 @@ var handleGetFullContext = async (args, context) => {
|
|
|
2101
2388
|
null,
|
|
2102
2389
|
2
|
|
2103
2390
|
);
|
|
2104
|
-
|
|
2391
|
+
logger10.info(
|
|
2105
2392
|
{
|
|
2106
2393
|
resultId,
|
|
2107
2394
|
cached: true,
|
|
@@ -2168,7 +2455,7 @@ var handleGetFullContext = async (args, context) => {
|
|
|
2168
2455
|
null,
|
|
2169
2456
|
2
|
|
2170
2457
|
);
|
|
2171
|
-
|
|
2458
|
+
logger10.info(
|
|
2172
2459
|
{
|
|
2173
2460
|
resultId,
|
|
2174
2461
|
cached: false,
|
|
@@ -2204,7 +2491,7 @@ var tools = [
|
|
|
2204
2491
|
];
|
|
2205
2492
|
|
|
2206
2493
|
// src/mcp/server.ts
|
|
2207
|
-
var
|
|
2494
|
+
var logger11 = createLogger("mcp-server");
|
|
2208
2495
|
var registry = AdapterRegistry.getInstance();
|
|
2209
2496
|
if (!registry.hasExtension(".zil")) {
|
|
2210
2497
|
registry.register(new ZilAdapter());
|
|
@@ -2320,7 +2607,7 @@ function createMCPServer(options, services) {
|
|
|
2320
2607
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
2321
2608
|
const { name, arguments: args } = request.params;
|
|
2322
2609
|
const startTime = Date.now();
|
|
2323
|
-
|
|
2610
|
+
logger11.info({ tool: name, args: JSON.stringify(args) }, "Tool invoked");
|
|
2324
2611
|
const context = { services, options };
|
|
2325
2612
|
try {
|
|
2326
2613
|
let result;
|
|
@@ -2336,11 +2623,11 @@ function createMCPServer(options, services) {
|
|
|
2336
2623
|
result = await tool.handler(validated, context);
|
|
2337
2624
|
}
|
|
2338
2625
|
const durationMs = Date.now() - startTime;
|
|
2339
|
-
|
|
2626
|
+
logger11.info({ tool: name, durationMs }, "Tool completed");
|
|
2340
2627
|
return result;
|
|
2341
2628
|
} catch (error) {
|
|
2342
2629
|
const durationMs = Date.now() - startTime;
|
|
2343
|
-
|
|
2630
|
+
logger11.error(
|
|
2344
2631
|
{
|
|
2345
2632
|
tool: name,
|
|
2346
2633
|
durationMs,
|
|
@@ -2354,7 +2641,7 @@ function createMCPServer(options, services) {
|
|
|
2354
2641
|
return server;
|
|
2355
2642
|
}
|
|
2356
2643
|
async function runMCPServer(options) {
|
|
2357
|
-
|
|
2644
|
+
logger11.info(
|
|
2358
2645
|
{
|
|
2359
2646
|
dataDir: options.dataDir,
|
|
2360
2647
|
projectRoot: options.projectRoot
|
|
@@ -2365,12 +2652,12 @@ async function runMCPServer(options) {
|
|
|
2365
2652
|
const server = createMCPServer(options, services);
|
|
2366
2653
|
const transport = new StdioServerTransport();
|
|
2367
2654
|
const shutdown = async (signal) => {
|
|
2368
|
-
|
|
2655
|
+
logger11.info({ signal }, "Shutdown signal received");
|
|
2369
2656
|
try {
|
|
2370
2657
|
await destroyServices(services);
|
|
2371
|
-
|
|
2658
|
+
logger11.info("Services destroyed, exiting");
|
|
2372
2659
|
} catch (error) {
|
|
2373
|
-
|
|
2660
|
+
logger11.error(
|
|
2374
2661
|
{ error: error instanceof Error ? error.message : String(error) },
|
|
2375
2662
|
"Error during shutdown"
|
|
2376
2663
|
);
|
|
@@ -2379,7 +2666,7 @@ async function runMCPServer(options) {
|
|
|
2379
2666
|
process.on("SIGINT", () => void shutdown("SIGINT"));
|
|
2380
2667
|
process.on("SIGTERM", () => void shutdown("SIGTERM"));
|
|
2381
2668
|
await server.connect(transport);
|
|
2382
|
-
|
|
2669
|
+
logger11.info("MCP server connected to stdio transport");
|
|
2383
2670
|
}
|
|
2384
2671
|
var scriptPath = process.argv[1] ?? "";
|
|
2385
2672
|
var isMCPServerEntry = scriptPath.endsWith("mcp/server.js") || scriptPath.endsWith("mcp/server");
|
|
@@ -2393,7 +2680,7 @@ if (isMCPServerEntry) {
|
|
|
2393
2680
|
config: process.env["CONFIG_PATH"],
|
|
2394
2681
|
projectRoot
|
|
2395
2682
|
}).catch((error) => {
|
|
2396
|
-
|
|
2683
|
+
logger11.error(
|
|
2397
2684
|
{ error: error instanceof Error ? error.message : String(error) },
|
|
2398
2685
|
"Failed to start MCP server"
|
|
2399
2686
|
);
|
|
@@ -2407,4 +2694,4 @@ export {
|
|
|
2407
2694
|
createMCPServer,
|
|
2408
2695
|
runMCPServer
|
|
2409
2696
|
};
|
|
2410
|
-
//# sourceMappingURL=chunk-
|
|
2697
|
+
//# sourceMappingURL=chunk-AEXFPA57.js.map
|